diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 393b765af7..f1b8489baa 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -4,8 +4,13 @@ updates: - package-ecosystem: "gomod" directory: "/" schedule: - # Check for updates to GitHub Actions every weekday - interval: "daily" + interval: "weekly" + groups: + # Group all minor/patch go dependencies into a single PR. + go-dependencies: + update-types: + - "minor" + - "patch" labels: - "dependencies" - "go" @@ -15,8 +20,7 @@ updates: - package-ecosystem: "github-actions" directory: "/" schedule: - # Check for updates to GitHub Actions every weekday - interval: "daily" + interval: "weekly" labels: - "dependencies" - "github_actions" diff --git a/.github/workflows/actions/release-notes/action.yml b/.github/workflows/actions/release-notes/action.yml index 6a25fa3132..cd1aa20d01 100644 --- a/.github/workflows/actions/release-notes/action.yml +++ b/.github/workflows/actions/release-notes/action.yml @@ -15,5 +15,5 @@ outputs: contents: description: The contents of the release notes. runs: - using: 'node12' + using: 'node16' main: 'dist/index.js' \ No newline at end of file diff --git a/.github/workflows/actions/release-notes/dist/index.js b/.github/workflows/actions/release-notes/dist/index.js index 10a9d60858..0e27c46541 100644 --- a/.github/workflows/actions/release-notes/dist/index.js +++ b/.github/workflows/actions/release-notes/dist/index.js @@ -38,14 +38,27 @@ try { "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.issue = exports.issueCommand = void 0; const os = __importStar(__webpack_require__(2087)); const utils_1 = __webpack_require__(5278); /** @@ -124,6 +137,25 @@ function escapeProperty(s) { "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -133,19 +165,14 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; const command_1 = __webpack_require__(7351); const file_command_1 = __webpack_require__(717); const utils_1 = __webpack_require__(5278); const os = __importStar(__webpack_require__(2087)); const path = __importStar(__webpack_require__(5622)); +const oidc_utils_1 = __webpack_require__(8041); /** * The code to exit an action */ @@ -174,13 +201,9 @@ function exportVariable(name, val) { process.env[name] = convertedVal; const filePath = process.env['GITHUB_ENV'] || ''; if (filePath) { - const delimiter = '_GitHubActionsFileCommandDelimeter_'; - const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`; - file_command_1.issueCommand('ENV', commandValue); - } - else { - command_1.issueCommand('set-env', { name }, convertedVal); + return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val)); } + command_1.issueCommand('set-env', { name }, convertedVal); } exports.exportVariable = exportVariable; /** @@ -198,7 +221,7 @@ exports.setSecret = setSecret; function addPath(inputPath) { const filePath = process.env['GITHUB_PATH'] || ''; if (filePath) { - file_command_1.issueCommand('PATH', inputPath); + file_command_1.issueFileCommand('PATH', inputPath); } else { command_1.issueCommand('add-path', {}, inputPath); @@ -207,7 +230,9 @@ function addPath(inputPath) { } exports.addPath = addPath; /** - * Gets the value of an input. The value is also trimmed. + * Gets the value of an input. + * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. + * Returns an empty string if the value is not defined. * * @param name name of the input to get * @param options optional. See InputOptions. @@ -218,9 +243,52 @@ function getInput(name, options) { if (options && options.required && !val) { throw new Error(`Input required and not supplied: ${name}`); } + if (options && options.trimWhitespace === false) { + return val; + } return val.trim(); } exports.getInput = getInput; +/** + * Gets the values of an multiline input. Each value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string[] + * + */ +function getMultilineInput(name, options) { + const inputs = getInput(name, options) + .split('\n') + .filter(x => x !== ''); + if (options && options.trimWhitespace === false) { + return inputs; + } + return inputs.map(input => input.trim()); +} +exports.getMultilineInput = getMultilineInput; +/** + * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. + * Support boolean input list: `true | True | TRUE | false | False | FALSE` . + * The return value is also in boolean type. + * ref: https://yaml.org/spec/1.2/spec.html#id2804923 + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns boolean + */ +function getBooleanInput(name, options) { + const trueValue = ['true', 'True', 'TRUE']; + const falseValue = ['false', 'False', 'FALSE']; + const val = getInput(name, options); + if (trueValue.includes(val)) + return true; + if (falseValue.includes(val)) + return false; + throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + + `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); +} +exports.getBooleanInput = getBooleanInput; /** * Sets the value of an output. * @@ -229,7 +297,12 @@ exports.getInput = getInput; */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function setOutput(name, value) { - command_1.issueCommand('set-output', { name }, value); + const filePath = process.env['GITHUB_OUTPUT'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value)); + } + process.stdout.write(os.EOL); + command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value)); } exports.setOutput = setOutput; /** @@ -275,19 +348,30 @@ exports.debug = debug; /** * Adds an error issue * @param message error issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. */ -function error(message) { - command_1.issue('error', message instanceof Error ? message.toString() : message); +function error(message, properties = {}) { + command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); } exports.error = error; /** - * Adds an warning issue + * Adds a warning issue * @param message warning issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. */ -function warning(message) { - command_1.issue('warning', message instanceof Error ? message.toString() : message); +function warning(message, properties = {}) { + command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); } exports.warning = warning; +/** + * Adds a notice issue + * @param message notice issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function notice(message, properties = {}) { + command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.notice = notice; /** * Writes info to log with console.log. * @param message info message @@ -347,7 +431,11 @@ exports.group = group; */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function saveState(name, value) { - command_1.issueCommand('save-state', { name }, value); + const filePath = process.env['GITHUB_STATE'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value)); + } + command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value)); } exports.saveState = saveState; /** @@ -360,6 +448,29 @@ function getState(name) { return process.env[`STATE_${name}`] || ''; } exports.getState = getState; +function getIDToken(aud) { + return __awaiter(this, void 0, void 0, function* () { + return yield oidc_utils_1.OidcClient.getIDToken(aud); + }); +} +exports.getIDToken = getIDToken; +/** + * Summary exports + */ +var summary_1 = __webpack_require__(1327); +Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); +/** + * @deprecated use core.summary + */ +var summary_2 = __webpack_require__(1327); +Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); +/** + * Path exports + */ +var path_utils_1 = __webpack_require__(2981); +Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); +Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); +Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); //# sourceMappingURL=core.js.map /***/ }), @@ -370,20 +481,34 @@ exports.getState = getState; "use strict"; // For internal use, subject to change. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ const fs = __importStar(__webpack_require__(5747)); const os = __importStar(__webpack_require__(2087)); +const uuid_1 = __webpack_require__(4552); const utils_1 = __webpack_require__(5278); -function issueCommand(command, message) { +function issueFileCommand(command, message) { const filePath = process.env[`GITHUB_${command}`]; if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); @@ -395,95 +520,111 @@ function issueCommand(command, message) { encoding: 'utf8' }); } -exports.issueCommand = issueCommand; -//# sourceMappingURL=file-command.js.map - -/***/ }), - -/***/ 5278: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -// We use any as a valid input type -/* eslint-disable @typescript-eslint/no-explicit-any */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -/** - * Sanitizes an input into a string so it can be passed into issueCommand safely - * @param input input to sanitize into a string - */ -function toCommandValue(input) { - if (input === null || input === undefined) { - return ''; +exports.issueFileCommand = issueFileCommand; +function prepareKeyValueMessage(key, value) { + const delimiter = `ghadelimiter_${uuid_1.v4()}`; + const convertedValue = utils_1.toCommandValue(value); + // These should realistically never happen, but just in case someone finds a + // way to exploit uuid generation let's not allow keys or values that contain + // the delimiter. + if (key.includes(delimiter)) { + throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); } - else if (typeof input === 'string' || input instanceof String) { - return input; + if (convertedValue.includes(delimiter)) { + throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); } - return JSON.stringify(input); + return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`; } -exports.toCommandValue = toCommandValue; -//# sourceMappingURL=utils.js.map +exports.prepareKeyValueMessage = prepareKeyValueMessage; +//# sourceMappingURL=file-command.js.map /***/ }), -/***/ 4087: -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { +/***/ 8041: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Context = void 0; -const fs_1 = __webpack_require__(5747); -const os_1 = __webpack_require__(2087); -class Context { - /** - * Hydrate the context from the environment - */ - constructor() { - this.payload = {}; - if (process.env.GITHUB_EVENT_PATH) { - if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { - this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); - } - else { - const path = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); - } - } - this.eventName = process.env.GITHUB_EVENT_NAME; - this.sha = process.env.GITHUB_SHA; - this.ref = process.env.GITHUB_REF; - this.workflow = process.env.GITHUB_WORKFLOW; - this.action = process.env.GITHUB_ACTION; - this.actor = process.env.GITHUB_ACTOR; - this.job = process.env.GITHUB_JOB; - this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); - this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); - } - get issue() { - const payload = this.payload; - return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); +exports.OidcClient = void 0; +const http_client_1 = __webpack_require__(1404); +const auth_1 = __webpack_require__(6758); +const core_1 = __webpack_require__(2186); +class OidcClient { + static createHttpClient(allowRetry = true, maxRetry = 10) { + const requestOptions = { + allowRetries: allowRetry, + maxRetries: maxRetry + }; + return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions); } - get repo() { - if (process.env.GITHUB_REPOSITORY) { - const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); - return { owner, repo }; + static getRequestToken() { + const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN']; + if (!token) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable'); } - if (this.payload.repository) { - return { - owner: this.payload.repository.owner.login, - repo: this.payload.repository.name - }; + return token; + } + static getIDTokenUrl() { + const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL']; + if (!runtimeUrl) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable'); } - throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); + return runtimeUrl; + } + static getCall(id_token_url) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const httpclient = OidcClient.createHttpClient(); + const res = yield httpclient + .getJson(id_token_url) + .catch(error => { + throw new Error(`Failed to get ID Token. \n + Error Code : ${error.statusCode}\n + Error Message: ${error.result.message}`); + }); + const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; + if (!id_token) { + throw new Error('Response json body do not have ID Token field'); + } + return id_token; + }); + } + static getIDToken(audience) { + return __awaiter(this, void 0, void 0, function* () { + try { + // New ID Token is requested from action service + let id_token_url = OidcClient.getIDTokenUrl(); + if (audience) { + const encodedAudience = encodeURIComponent(audience); + id_token_url = `${id_token_url}&audience=${encodedAudience}`; + } + core_1.debug(`ID token url is ${id_token_url}`); + const id_token = yield OidcClient.getCall(id_token_url); + core_1.setSecret(id_token); + return id_token; + } + catch (error) { + throw new Error(`Error message: ${error.message}`); + } + }); } } -exports.Context = Context; -//# sourceMappingURL=context.js.map +exports.OidcClient = OidcClient; +//# sourceMappingURL=oidc-utils.js.map /***/ }), -/***/ 5438: +/***/ 2981: /***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; @@ -503,84 +644,482 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getOctokit = exports.context = void 0; -const Context = __importStar(__webpack_require__(4087)); -const utils_1 = __webpack_require__(3030); -exports.context = new Context.Context(); +exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; +const path = __importStar(__webpack_require__(5622)); /** - * Returns a hydrated octokit ready to use for GitHub Actions + * toPosixPath converts the given path to the posix form. On Windows, \\ will be + * replaced with /. * - * @param token the repo PAT or GITHUB_TOKEN - * @param options other options to set + * @param pth. Path to transform. + * @return string Posix path. */ -function getOctokit(token, options) { - return new utils_1.GitHub(utils_1.getOctokitOptions(token, options)); +function toPosixPath(pth) { + return pth.replace(/[\\]/g, '/'); } -exports.getOctokit = getOctokit; -//# sourceMappingURL=github.js.map +exports.toPosixPath = toPosixPath; +/** + * toWin32Path converts the given path to the win32 form. On Linux, / will be + * replaced with \\. + * + * @param pth. Path to transform. + * @return string Win32 path. + */ +function toWin32Path(pth) { + return pth.replace(/[/]/g, '\\'); +} +exports.toWin32Path = toWin32Path; +/** + * toPlatformPath converts the given path to a platform-specific path. It does + * this by replacing instances of / and \ with the platform-specific path + * separator. + * + * @param pth The path to platformize. + * @return string The platform-specific path. + */ +function toPlatformPath(pth) { + return pth.replace(/[/\\]/g, path.sep); +} +exports.toPlatformPath = toPlatformPath; +//# sourceMappingURL=path-utils.js.map /***/ }), -/***/ 7914: +/***/ 1327: /***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; -const httpClient = __importStar(__webpack_require__(9925)); -function getAuthString(token, options) { - if (!token && !options.auth) { - throw new Error('Parameter token or opts.auth is required'); +exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; +const os_1 = __webpack_require__(2087); +const fs_1 = __webpack_require__(5747); +const { access, appendFile, writeFile } = fs_1.promises; +exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; +exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; +class Summary { + constructor() { + this._buffer = ''; } - else if (token && options.auth) { - throw new Error('Parameters token and opts.auth may not both be specified'); + /** + * Finds the summary file path from the environment, rejects if env var is not found or file does not exist + * Also checks r/w permissions. + * + * @returns step summary file path + */ + filePath() { + return __awaiter(this, void 0, void 0, function* () { + if (this._filePath) { + return this._filePath; + } + const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; + if (!pathFromEnv) { + throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); + } + try { + yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); + } + catch (_a) { + throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); + } + this._filePath = pathFromEnv; + return this._filePath; + }); } - return typeof options.auth === 'string' ? options.auth : `token ${token}`; -} -exports.getAuthString = getAuthString; -function getProxyAgent(destinationUrl) { - const hc = new httpClient.HttpClient(); - return hc.getAgent(destinationUrl); -} -exports.getProxyAgent = getProxyAgent; -function getApiBaseUrl() { - return process.env['GITHUB_API_URL'] || 'https://api.github.com'; -} -exports.getApiBaseUrl = getApiBaseUrl; -//# sourceMappingURL=utils.js.map - + /** + * Wraps content in an HTML tag, adding any HTML attributes + * + * @param {string} tag HTML tag to wrap + * @param {string | null} content content within the tag + * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add + * + * @returns {string} content wrapped in HTML element + */ + wrap(tag, content, attrs = {}) { + const htmlAttrs = Object.entries(attrs) + .map(([key, value]) => ` ${key}="${value}"`) + .join(''); + if (!content) { + return `<${tag}${htmlAttrs}>`; + } + return `<${tag}${htmlAttrs}>${content}`; + } + /** + * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. + * + * @param {SummaryWriteOptions} [options] (optional) options for write operation + * + * @returns {Promise} summary instance + */ + write(options) { + return __awaiter(this, void 0, void 0, function* () { + const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); + const filePath = yield this.filePath(); + const writeFunc = overwrite ? writeFile : appendFile; + yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); + return this.emptyBuffer(); + }); + } + /** + * Clears the summary buffer and wipes the summary file + * + * @returns {Summary} summary instance + */ + clear() { + return __awaiter(this, void 0, void 0, function* () { + return this.emptyBuffer().write({ overwrite: true }); + }); + } + /** + * Returns the current summary buffer as a string + * + * @returns {string} string of summary buffer + */ + stringify() { + return this._buffer; + } + /** + * If the summary buffer is empty + * + * @returns {boolen} true if the buffer is empty + */ + isEmptyBuffer() { + return this._buffer.length === 0; + } + /** + * Resets the summary buffer without writing to summary file + * + * @returns {Summary} summary instance + */ + emptyBuffer() { + this._buffer = ''; + return this; + } + /** + * Adds raw text to the summary buffer + * + * @param {string} text content to add + * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) + * + * @returns {Summary} summary instance + */ + addRaw(text, addEOL = false) { + this._buffer += text; + return addEOL ? this.addEOL() : this; + } + /** + * Adds the operating system-specific end-of-line marker to the buffer + * + * @returns {Summary} summary instance + */ + addEOL() { + return this.addRaw(os_1.EOL); + } + /** + * Adds an HTML codeblock to the summary buffer + * + * @param {string} code content to render within fenced code block + * @param {string} lang (optional) language to syntax highlight code + * + * @returns {Summary} summary instance + */ + addCodeBlock(code, lang) { + const attrs = Object.assign({}, (lang && { lang })); + const element = this.wrap('pre', this.wrap('code', code), attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML list to the summary buffer + * + * @param {string[]} items list of items to render + * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) + * + * @returns {Summary} summary instance + */ + addList(items, ordered = false) { + const tag = ordered ? 'ol' : 'ul'; + const listItems = items.map(item => this.wrap('li', item)).join(''); + const element = this.wrap(tag, listItems); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML table to the summary buffer + * + * @param {SummaryTableCell[]} rows table rows + * + * @returns {Summary} summary instance + */ + addTable(rows) { + const tableBody = rows + .map(row => { + const cells = row + .map(cell => { + if (typeof cell === 'string') { + return this.wrap('td', cell); + } + const { header, data, colspan, rowspan } = cell; + const tag = header ? 'th' : 'td'; + const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); + return this.wrap(tag, data, attrs); + }) + .join(''); + return this.wrap('tr', cells); + }) + .join(''); + const element = this.wrap('table', tableBody); + return this.addRaw(element).addEOL(); + } + /** + * Adds a collapsable HTML details element to the summary buffer + * + * @param {string} label text for the closed state + * @param {string} content collapsable content + * + * @returns {Summary} summary instance + */ + addDetails(label, content) { + const element = this.wrap('details', this.wrap('summary', label) + content); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML image tag to the summary buffer + * + * @param {string} src path to the image you to embed + * @param {string} alt text description of the image + * @param {SummaryImageOptions} options (optional) addition image attributes + * + * @returns {Summary} summary instance + */ + addImage(src, alt, options) { + const { width, height } = options || {}; + const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); + const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML section heading element + * + * @param {string} text heading text + * @param {number | string} [level=1] (optional) the heading level, default: 1 + * + * @returns {Summary} summary instance + */ + addHeading(text, level) { + const tag = `h${level}`; + const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) + ? tag + : 'h1'; + const element = this.wrap(allowedTag, text); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML thematic break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addSeparator() { + const element = this.wrap('hr', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML line break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addBreak() { + const element = this.wrap('br', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML blockquote to the summary buffer + * + * @param {string} text quote text + * @param {string} cite (optional) citation url + * + * @returns {Summary} summary instance + */ + addQuote(text, cite) { + const attrs = Object.assign({}, (cite && { cite })); + const element = this.wrap('blockquote', text, attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML anchor tag to the summary buffer + * + * @param {string} text link text/content + * @param {string} href hyperlink + * + * @returns {Summary} summary instance + */ + addLink(text, href) { + const element = this.wrap('a', text, { href }); + return this.addRaw(element).addEOL(); + } +} +const _summary = new Summary(); +/** + * @deprecated use `core.summary` + */ +exports.markdownSummary = _summary; +exports.summary = _summary; +//# sourceMappingURL=summary.js.map + /***/ }), -/***/ 3030: +/***/ 5278: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toCommandProperties = exports.toCommandValue = void 0; +/** + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string + */ +function toCommandValue(input) { + if (input === null || input === undefined) { + return ''; + } + else if (typeof input === 'string' || input instanceof String) { + return input; + } + return JSON.stringify(input); +} +exports.toCommandValue = toCommandValue; +/** + * + * @param annotationProperties + * @returns The command properties to send with the actual annotation command + * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 + */ +function toCommandProperties(annotationProperties) { + if (!Object.keys(annotationProperties).length) { + return {}; + } + return { + title: annotationProperties.title, + file: annotationProperties.file, + line: annotationProperties.startLine, + endLine: annotationProperties.endLine, + col: annotationProperties.startColumn, + endColumn: annotationProperties.endColumn + }; +} +exports.toCommandProperties = toCommandProperties; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 6758: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Bearer ${this.token}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; +//# sourceMappingURL=auth.js.map + +/***/ }), + +/***/ 1404: /***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; +/* eslint-disable @typescript-eslint/no-explicit-any */ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); @@ -596,58 +1135,25 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getOctokitOptions = exports.GitHub = exports.context = void 0; -const Context = __importStar(__webpack_require__(4087)); -const Utils = __importStar(__webpack_require__(7914)); -// octokit + plugins -const core_1 = __webpack_require__(6762); -const plugin_rest_endpoint_methods_1 = __webpack_require__(3044); -const plugin_paginate_rest_1 = __webpack_require__(4193); -exports.context = new Context.Context(); -const baseUrl = Utils.getApiBaseUrl(); -const defaults = { - baseUrl, - request: { - agent: Utils.getProxyAgent(baseUrl) - } +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; -exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults); -/** - * Convience function to correctly format Octokit Options to pass into the constructor. - * - * @param token the repo PAT or GITHUB_TOKEN - * @param options other options to set - */ -function getOctokitOptions(token, options) { - const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller - // Auth - const auth = Utils.getAuthString(token, opts); - if (auth) { - opts.auth = auth; - } - return opts; -} -exports.getOctokitOptions = getOctokitOptions; -//# sourceMappingURL=utils.js.map - -/***/ }), - -/***/ 9925: -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - -"use strict"; - Object.defineProperty(exports, "__esModule", ({ value: true })); -const url = __webpack_require__(8835); -const http = __webpack_require__(8605); -const https = __webpack_require__(7211); -const pm = __webpack_require__(6443); -let tunnel; +exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; +const http = __importStar(__webpack_require__(8605)); +const https = __importStar(__webpack_require__(7211)); +const pm = __importStar(__webpack_require__(2843)); +const tunnel = __importStar(__webpack_require__(4294)); var HttpCodes; (function (HttpCodes) { HttpCodes[HttpCodes["OK"] = 200] = "OK"; @@ -692,7 +1198,7 @@ var MediaTypes; * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com */ function getProxyUrl(serverUrl) { - let proxyUrl = pm.getProxyUrl(url.parse(serverUrl)); + const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); return proxyUrl ? proxyUrl.href : ''; } exports.getProxyUrl = getProxyUrl; @@ -711,25 +1217,36 @@ const HttpResponseRetryCodes = [ const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; const ExponentialBackoffCeiling = 10; const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); + } +} +exports.HttpClientError = HttpClientError; class HttpClientResponse { constructor(message) { this.message = message; } readBody() { - return new Promise(async (resolve, reject) => { - let output = Buffer.alloc(0); - this.message.on('data', (chunk) => { - output = Buffer.concat([output, chunk]); - }); - this.message.on('end', () => { - resolve(output.toString()); - }); + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + })); }); } } exports.HttpClientResponse = HttpClientResponse; function isHttps(requestUrl) { - let parsedUrl = url.parse(requestUrl); + const parsedUrl = new URL(requestUrl); return parsedUrl.protocol === 'https:'; } exports.isHttps = isHttps; @@ -772,141 +1289,169 @@ class HttpClient { } } options(requestUrl, additionalHeaders) { - return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + return __awaiter(this, void 0, void 0, function* () { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + }); } get(requestUrl, additionalHeaders) { - return this.request('GET', requestUrl, null, additionalHeaders || {}); + return __awaiter(this, void 0, void 0, function* () { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + }); } del(requestUrl, additionalHeaders) { - return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + return __awaiter(this, void 0, void 0, function* () { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + }); } post(requestUrl, data, additionalHeaders) { - return this.request('POST', requestUrl, data, additionalHeaders || {}); + return __awaiter(this, void 0, void 0, function* () { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + }); } patch(requestUrl, data, additionalHeaders) { - return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + return __awaiter(this, void 0, void 0, function* () { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + }); } put(requestUrl, data, additionalHeaders) { - return this.request('PUT', requestUrl, data, additionalHeaders || {}); + return __awaiter(this, void 0, void 0, function* () { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + }); } head(requestUrl, additionalHeaders) { - return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + return __awaiter(this, void 0, void 0, function* () { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + }); } sendStream(verb, requestUrl, stream, additionalHeaders) { - return this.request(verb, requestUrl, stream, additionalHeaders); + return __awaiter(this, void 0, void 0, function* () { + return this.request(verb, requestUrl, stream, additionalHeaders); + }); } /** * Gets a typed object from an endpoint * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise */ - async getJson(requestUrl, additionalHeaders = {}) { - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - let res = await this.get(requestUrl, additionalHeaders); - return this._processResponse(res, this.requestOptions); + getJson(requestUrl, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + const res = yield this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); } - async postJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.post(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); + postJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); } - async putJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.put(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); + putJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); } - async patchJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.patch(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); + patchJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); } /** * Makes a raw http request. * All other methods such as get, post, patch, and request ultimately call this. * Prefer get, del, post and patch */ - async request(verb, requestUrl, data, headers) { - if (this._disposed) { - throw new Error('Client has already been disposed.'); - } - let parsedUrl = url.parse(requestUrl); - let info = this._prepareRequest(verb, parsedUrl, headers); - // Only perform retries on reads since writes may not be idempotent. - let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 - ? this._maxRetries + 1 - : 1; - let numTries = 0; - let response; - while (numTries < maxTries) { - response = await this.requestRaw(info, data); - // Check if it's an authentication challenge - if (response && - response.message && - response.message.statusCode === HttpCodes.Unauthorized) { - let authenticationHandler; - for (let i = 0; i < this.handlers.length; i++) { - if (this.handlers[i].canHandleAuthentication(response)) { - authenticationHandler = this.handlers[i]; - break; + request(verb, requestUrl, data, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + const parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + do { + response = yield this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (const handler of this.handlers) { + if (handler.canHandleAuthentication(response)) { + authenticationHandler = handler; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; } } - if (authenticationHandler) { - return authenticationHandler.handleAuthentication(this, info, data); + let redirectsRemaining = this._maxRedirects; + while (response.message.statusCode && + HttpRedirectCodes.includes(response.message.statusCode) && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + const parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol === 'https:' && + parsedUrl.protocol !== parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + yield response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (const header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = yield this.requestRaw(info, data); + redirectsRemaining--; } - else { - // We have received an unauthorized response but have no handlers to handle it. - // Let the response return to the caller. + if (!response.message.statusCode || + !HttpResponseRetryCodes.includes(response.message.statusCode)) { + // If not a retry code, return immediately instead of retrying return response; } - } - let redirectsRemaining = this._maxRedirects; - while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && - this._allowRedirects && - redirectsRemaining > 0) { - const redirectUrl = response.message.headers['location']; - if (!redirectUrl) { - // if there's no location to redirect to, we won't - break; - } - let parsedRedirectUrl = url.parse(redirectUrl); - if (parsedUrl.protocol == 'https:' && - parsedUrl.protocol != parsedRedirectUrl.protocol && - !this._allowRedirectDowngrade) { - throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); - } - // we need to finish reading the response before reassigning response - // which will leak the open socket. - await response.readBody(); - // strip authorization header if redirected to a different hostname - if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { - for (let header in headers) { - // header names are case insensitive - if (header.toLowerCase() === 'authorization') { - delete headers[header]; - } - } + numTries += 1; + if (numTries < maxTries) { + yield response.readBody(); + yield this._performExponentialBackoff(numTries); } - // let's make the request with the new redirectUrl - info = this._prepareRequest(verb, parsedRedirectUrl, headers); - response = await this.requestRaw(info, data); - redirectsRemaining--; - } - if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { - // If not a retry code, return immediately instead of retrying - return response; - } - numTries += 1; - if (numTries < maxTries) { - await response.readBody(); - await this._performExponentialBackoff(numTries); - } - } - return response; + } while (numTries < maxTries); + return response; + }); } /** * Needs to be called if keepAlive is set to true in request options. @@ -923,14 +1468,22 @@ class HttpClient { * @param data */ requestRaw(info, data) { - return new Promise((resolve, reject) => { - let callbackForResult = function (err, res) { - if (err) { - reject(err); + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + function callbackForResult(err, res) { + if (err) { + reject(err); + } + else if (!res) { + // If `err` is not passed, then `res` must be passed. + reject(new Error('Unknown error')); + } + else { + resolve(res); + } } - resolve(res); - }; - this.requestRawWithCallback(info, data, callbackForResult); + this.requestRawWithCallback(info, data, callbackForResult); + }); }); } /** @@ -940,21 +1493,24 @@ class HttpClient { * @param onResult */ requestRawWithCallback(info, data, onResult) { - let socket; if (typeof data === 'string') { + if (!info.options.headers) { + info.options.headers = {}; + } info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); } let callbackCalled = false; - let handleResult = (err, res) => { + function handleResult(err, res) { if (!callbackCalled) { callbackCalled = true; onResult(err, res); } - }; - let req = info.httpModule.request(info.options, (msg) => { - let res = new HttpClientResponse(msg); - handleResult(null, res); + } + const req = info.httpModule.request(info.options, (msg) => { + const res = new HttpClientResponse(msg); + handleResult(undefined, res); }); + let socket; req.on('socket', sock => { socket = sock; }); @@ -963,12 +1519,12 @@ class HttpClient { if (socket) { socket.end(); } - handleResult(new Error('Request timeout: ' + info.options.path), null); + handleResult(new Error(`Request timeout: ${info.options.path}`)); }); req.on('error', function (err) { // err has statusCode property // res should have headers - handleResult(err, null); + handleResult(err); }); if (data && typeof data === 'string') { req.write(data, 'utf8'); @@ -989,7 +1545,7 @@ class HttpClient { * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com */ getAgent(serverUrl) { - let parsedUrl = url.parse(serverUrl); + const parsedUrl = new URL(serverUrl); return this._getAgent(parsedUrl); } _prepareRequest(method, requestUrl, headers) { @@ -1013,21 +1569,19 @@ class HttpClient { info.options.agent = this._getAgent(info.parsedUrl); // gives handlers an opportunity to participate if (this.handlers) { - this.handlers.forEach(handler => { + for (const handler of this.handlers) { handler.prepareRequest(info.options); - }); + } } return info; } _mergeHeaders(headers) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); if (this.requestOptions && this.requestOptions.headers) { - return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); } return lowercaseKeys(headers || {}); } _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); let clientHeader; if (this.requestOptions && this.requestOptions.headers) { clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; @@ -1036,8 +1590,8 @@ class HttpClient { } _getAgent(parsedUrl) { let agent; - let proxyUrl = pm.getProxyUrl(parsedUrl); - let useProxy = proxyUrl && proxyUrl.hostname; + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; if (this._keepAlive && useProxy) { agent = this._proxyAgent; } @@ -1045,27 +1599,22 @@ class HttpClient { agent = this._agent; } // if agent is already assigned use that agent. - if (!!agent) { + if (agent) { return agent; } const usingSsl = parsedUrl.protocol === 'https:'; let maxSockets = 100; - if (!!this.requestOptions) { + if (this.requestOptions) { maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; } - if (useProxy) { - // If using proxy, need tunnel - if (!tunnel) { - tunnel = __webpack_require__(4294); - } + // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. + if (proxyUrl && proxyUrl.hostname) { const agentOptions = { - maxSockets: maxSockets, + maxSockets, keepAlive: this._keepAlive, - proxy: { - proxyAuth: proxyUrl.auth, - host: proxyUrl.hostname, - port: proxyUrl.port - } + proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + })), { host: proxyUrl.hostname, port: proxyUrl.port }) }; let tunnelAgent; const overHttps = proxyUrl.protocol === 'https:'; @@ -1080,7 +1629,7 @@ class HttpClient { } // if reusing agent across request and tunneling agent isn't assigned create a new agent if (this._keepAlive && !agent) { - const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; + const options = { keepAlive: this._keepAlive, maxSockets }; agent = usingSsl ? new https.Agent(options) : new http.Agent(options); this._agent = agent; } @@ -1099,114 +1648,117 @@ class HttpClient { return agent; } _performExponentialBackoff(retryNumber) { - retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); - const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); - return new Promise(resolve => setTimeout(() => resolve(), ms)); - } - static dateTimeDeserializer(key, value) { - if (typeof value === 'string') { - let a = new Date(value); - if (!isNaN(a.valueOf())) { - return a; - } - } - return value; + return __awaiter(this, void 0, void 0, function* () { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + }); } - async _processResponse(res, options) { - return new Promise(async (resolve, reject) => { - const statusCode = res.message.statusCode; - const response = { - statusCode: statusCode, - result: null, - headers: {} - }; - // not found leads to null obj returned - if (statusCode == HttpCodes.NotFound) { - resolve(response); - } - let obj; - let contents; - // get the result from the body - try { - contents = await res.readBody(); - if (contents && contents.length > 0) { - if (options && options.deserializeDates) { - obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); + _processResponse(res, options) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + const statusCode = res.message.statusCode || 0; + const response = { + statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode === HttpCodes.NotFound) { + resolve(response); + } + // get the result from the body + function dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + const a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } } - else { - obj = JSON.parse(contents); + return value; + } + let obj; + let contents; + try { + contents = yield res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; } - response.result = obj; + response.headers = res.message.headers; } - response.headers = res.message.headers; - } - catch (err) { - // Invalid resource (contents not json); leaving result obj null - } - // note that 3xx redirects are handled by the http layer. - if (statusCode > 299) { - let msg; - // if exception/error in body, attempt to get better error - if (obj && obj.message) { - msg = obj.message; + catch (err) { + // Invalid resource (contents not json); leaving result obj null } - else if (contents && contents.length > 0) { - // it may be the case that the exception is in the body message as string - msg = contents; + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = `Failed request: (${statusCode})`; + } + const err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); } else { - msg = 'Failed request: (' + statusCode + ')'; - } - let err = new Error(msg); - // attach statusCode and body obj (if available) to the error object - err['statusCode'] = statusCode; - if (response.result) { - err['result'] = response.result; + resolve(response); } - reject(err); - } - else { - resolve(response); - } + })); }); } } exports.HttpClient = HttpClient; - +const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); +//# sourceMappingURL=index.js.map /***/ }), -/***/ 6443: -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { +/***/ 2843: +/***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const url = __webpack_require__(8835); +exports.checkBypass = exports.getProxyUrl = void 0; function getProxyUrl(reqUrl) { - let usingSsl = reqUrl.protocol === 'https:'; - let proxyUrl; + const usingSsl = reqUrl.protocol === 'https:'; if (checkBypass(reqUrl)) { - return proxyUrl; + return undefined; } - let proxyVar; - if (usingSsl) { - proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; + const proxyVar = (() => { + if (usingSsl) { + return process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + return process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + })(); + if (proxyVar) { + return new URL(proxyVar); } else { - proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; + return undefined; } - if (proxyVar) { - proxyUrl = url.parse(proxyVar); - } - return proxyUrl; } exports.getProxyUrl = getProxyUrl; function checkBypass(reqUrl) { if (!reqUrl.hostname) { return false; } - let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; if (!noProxy) { return false; } @@ -1222,12 +1774,12 @@ function checkBypass(reqUrl) { reqPort = 443; } // Format the request hostname and hostname with port - let upperReqHosts = [reqUrl.hostname.toUpperCase()]; + const upperReqHosts = [reqUrl.hostname.toUpperCase()]; if (typeof reqPort === 'number') { upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); } // Compare request host against noproxy - for (let upperNoProxyItem of noProxy + for (const upperNoProxyItem of noProxy .split(',') .map(x => x.trim().toUpperCase()) .filter(x => x)) { @@ -1238,6064 +1790,8141 @@ function checkBypass(reqUrl) { return false; } exports.checkBypass = checkBypass; - +//# sourceMappingURL=proxy.js.map /***/ }), -/***/ 334: -/***/ ((__unused_webpack_module, exports) => { +/***/ 4087: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; - Object.defineProperty(exports, "__esModule", ({ value: true })); - -async function auth(token) { - const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth"; - return { - type: "token", - token: token, - tokenType - }; -} - -/** - * Prefix token for usage in the Authorization header - * - * @param token OAuth token or JSON Web Token - */ -function withAuthorizationPrefix(token) { - if (token.split(/\./).length === 3) { - return `bearer ${token}`; - } - - return `token ${token}`; -} - -async function hook(token, request, route, parameters) { - const endpoint = request.endpoint.merge(route, parameters); - endpoint.headers.authorization = withAuthorizationPrefix(token); - return request(endpoint); -} - -const createTokenAuth = function createTokenAuth(token) { - if (!token) { - throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); - } - - if (typeof token !== "string") { - throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); - } - - token = token.replace(/^(token|bearer) +/i, ""); - return Object.assign(auth.bind(null, token), { - hook: hook.bind(null, token) - }); -}; - -exports.createTokenAuth = createTokenAuth; -//# sourceMappingURL=index.js.map - +exports.Context = void 0; +const fs_1 = __webpack_require__(5747); +const os_1 = __webpack_require__(2087); +class Context { + /** + * Hydrate the context from the environment + */ + constructor() { + this.payload = {}; + if (process.env.GITHUB_EVENT_PATH) { + if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { + this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); + } + else { + const path = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); + } + } + this.eventName = process.env.GITHUB_EVENT_NAME; + this.sha = process.env.GITHUB_SHA; + this.ref = process.env.GITHUB_REF; + this.workflow = process.env.GITHUB_WORKFLOW; + this.action = process.env.GITHUB_ACTION; + this.actor = process.env.GITHUB_ACTOR; + this.job = process.env.GITHUB_JOB; + this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); + this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); + } + get issue() { + const payload = this.payload; + return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); + } + get repo() { + if (process.env.GITHUB_REPOSITORY) { + const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); + return { owner, repo }; + } + if (this.payload.repository) { + return { + owner: this.payload.repository.owner.login, + repo: this.payload.repository.name + }; + } + throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); + } +} +exports.Context = Context; +//# sourceMappingURL=context.js.map /***/ }), -/***/ 6762: -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { +/***/ 5438: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; - +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; Object.defineProperty(exports, "__esModule", ({ value: true })); - -var universalUserAgent = __webpack_require__(5030); -var beforeAfterHook = __webpack_require__(3682); -var request = __webpack_require__(6234); -var graphql = __webpack_require__(8467); -var authToken = __webpack_require__(334); - -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; - } - - return obj; +exports.getOctokit = exports.context = void 0; +const Context = __importStar(__webpack_require__(4087)); +const utils_1 = __webpack_require__(3030); +exports.context = new Context.Context(); +/** + * Returns a hydrated octokit ready to use for GitHub Actions + * + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set + */ +function getOctokit(token, options) { + return new utils_1.GitHub(utils_1.getOctokitOptions(token, options)); } +exports.getOctokit = getOctokit; +//# sourceMappingURL=github.js.map -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); - - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - if (enumerableOnly) symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - }); - keys.push.apply(keys, symbols); - } +/***/ }), - return keys; -} +/***/ 7914: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = arguments[i] != null ? arguments[i] : {}; +"use strict"; - if (i % 2) { - ownKeys(Object(source), true).forEach(function (key) { - _defineProperty(target, key, source[key]); - }); - } else if (Object.getOwnPropertyDescriptors) { - Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); - } else { - ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; +const httpClient = __importStar(__webpack_require__(9925)); +function getAuthString(token, options) { + if (!token && !options.auth) { + throw new Error('Parameter token or opts.auth is required'); } - } - - return target; + else if (token && options.auth) { + throw new Error('Parameters token and opts.auth may not both be specified'); + } + return typeof options.auth === 'string' ? options.auth : `token ${token}`; } +exports.getAuthString = getAuthString; +function getProxyAgent(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgent(destinationUrl); +} +exports.getProxyAgent = getProxyAgent; +function getApiBaseUrl() { + return process.env['GITHUB_API_URL'] || 'https://api.github.com'; +} +exports.getApiBaseUrl = getApiBaseUrl; +//# sourceMappingURL=utils.js.map -const VERSION = "3.1.0"; - -class Octokit { - constructor(options = {}) { - const hook = new beforeAfterHook.Collection(); - const requestDefaults = { - baseUrl: request.request.endpoint.DEFAULTS.baseUrl, - headers: {}, - request: Object.assign({}, options.request, { - hook: hook.bind(null, "request") - }), - mediaType: { - previews: [], - format: "" - } - }; // prepend default user agent with `options.userAgent` if set - - requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); +/***/ }), - if (options.baseUrl) { - requestDefaults.baseUrl = options.baseUrl; - } +/***/ 3030: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { - if (options.previews) { - requestDefaults.mediaType.previews = options.previews; - } +"use strict"; - if (options.timeZone) { - requestDefaults.headers["time-zone"] = options.timeZone; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOctokitOptions = exports.GitHub = exports.context = void 0; +const Context = __importStar(__webpack_require__(4087)); +const Utils = __importStar(__webpack_require__(7914)); +// octokit + plugins +const core_1 = __webpack_require__(6762); +const plugin_rest_endpoint_methods_1 = __webpack_require__(3044); +const plugin_paginate_rest_1 = __webpack_require__(4193); +exports.context = new Context.Context(); +const baseUrl = Utils.getApiBaseUrl(); +const defaults = { + baseUrl, + request: { + agent: Utils.getProxyAgent(baseUrl) } - - this.request = request.request.defaults(requestDefaults); - this.graphql = graphql.withCustomRequest(this.request).defaults(_objectSpread2(_objectSpread2({}, requestDefaults), {}, { - baseUrl: requestDefaults.baseUrl.replace(/\/api\/v3$/, "/api") - })); - this.log = Object.assign({ - debug: () => {}, - info: () => {}, - warn: console.warn.bind(console), - error: console.error.bind(console) - }, options.log); - this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance - // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registred. - // (2) If only `options.auth` is set, use the default token authentication strategy. - // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. - // TODO: type `options.auth` based on `options.authStrategy`. - - if (!options.authStrategy) { - if (!options.auth) { - // (1) - this.auth = async () => ({ - type: "unauthenticated" - }); - } else { - // (2) - const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ - - hook.wrap("request", auth.hook); - this.auth = auth; - } - } else { - const auth = options.authStrategy(Object.assign({ - request: this.request - }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ - - hook.wrap("request", auth.hook); - this.auth = auth; - } // apply plugins - // https://stackoverflow.com/a/16345172 - - - const classConstructor = this.constructor; - classConstructor.plugins.forEach(plugin => { - Object.assign(this, plugin(this, options)); - }); - } - - static defaults(defaults) { - const OctokitWithDefaults = class extends this { - constructor(...args) { - const options = args[0] || {}; - - if (typeof defaults === "function") { - super(defaults(options)); - return; - } - - super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { - userAgent: `${options.userAgent} ${defaults.userAgent}` - } : null)); - } - - }; - return OctokitWithDefaults; - } - /** - * Attach a plugin (or many) to your Octokit instance. - * - * @example - * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) - */ - - - static plugin(...newPlugins) { - var _a; - - const currentPlugins = this.plugins; - const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); - return NewOctokit; - } - +}; +exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults); +/** + * Convience function to correctly format Octokit Options to pass into the constructor. + * + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set + */ +function getOctokitOptions(token, options) { + const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller + // Auth + const auth = Utils.getAuthString(token, opts); + if (auth) { + opts.auth = auth; + } + return opts; } -Octokit.VERSION = VERSION; -Octokit.plugins = []; - -exports.Octokit = Octokit; -//# sourceMappingURL=index.js.map - +exports.getOctokitOptions = getOctokitOptions; +//# sourceMappingURL=utils.js.map /***/ }), -/***/ 9440: +/***/ 9925: /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; - Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var isPlainObject = _interopDefault(__webpack_require__(8840)); -var universalUserAgent = __webpack_require__(5030); - -function lowercaseKeys(object) { - if (!object) { - return {}; - } - - return Object.keys(object).reduce((newObj, key) => { - newObj[key.toLowerCase()] = object[key]; - return newObj; - }, {}); +const url = __webpack_require__(8835); +const http = __webpack_require__(8605); +const https = __webpack_require__(7211); +const pm = __webpack_require__(6443); +let tunnel; +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers = exports.Headers || (exports.Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +function getProxyUrl(serverUrl) { + let proxyUrl = pm.getProxyUrl(url.parse(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; } - -function mergeDeep(defaults, options) { - const result = Object.assign({}, defaults); - Object.keys(options).forEach(key => { - if (isPlainObject(options[key])) { - if (!(key in defaults)) Object.assign(result, { - [key]: options[key] - });else result[key] = mergeDeep(defaults[key], options[key]); - } else { - Object.assign(result, { - [key]: options[key] - }); +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientResponse { + constructor(message) { + this.message = message; } - }); - return result; -} - -function merge(defaults, route, options) { - if (typeof route === "string") { - let [method, url] = route.split(" "); - options = Object.assign(url ? { - method, - url - } : { - url: method - }, options); - } else { - options = Object.assign({}, route); - } // lowercase header names before merging with defaults to avoid duplicates - - - options.headers = lowercaseKeys(options.headers); - const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten - - if (defaults && defaults.mediaType.previews.length) { - mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); - } - - mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); - return mergedOptions; -} - -function addQueryParameters(url, parameters) { - const separator = /\?/.test(url) ? "&" : "?"; - const names = Object.keys(parameters); - - if (names.length === 0) { - return url; - } - - return url + separator + names.map(name => { - if (name === "q") { - return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + readBody() { + return new Promise(async (resolve, reject) => { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + }); } - - return `${name}=${encodeURIComponent(parameters[name])}`; - }).join("&"); } - -const urlVariableRegex = /\{[^}]+\}/g; - -function removeNonChars(variableName) { - return variableName.replace(/^\W+|\W+$/g, "").split(/,/); -} - -function extractUrlVariableNames(url) { - const matches = url.match(urlVariableRegex); - - if (!matches) { - return []; - } - - return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); -} - -function omit(object, keysToOmit) { - return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { - obj[key] = object[key]; - return obj; - }, {}); -} - -// Based on https://github.com/bramstein/url-template, licensed under BSD -// TODO: create separate package. -// -// Copyright (c) 2012-2014, Bram Stein -// All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// 1. Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright -// notice, this list of conditions and the following disclaimer in the -// documentation and/or other materials provided with the distribution. -// 3. The name of the author may not be used to endorse or promote products -// derived from this software without specific prior written permission. -// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED -// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY -// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, -// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -/* istanbul ignore file */ -function encodeReserved(str) { - return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { - if (!/%[0-9A-Fa-f]/.test(part)) { - part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); - } - - return part; - }).join(""); -} - -function encodeUnreserved(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { - return "%" + c.charCodeAt(0).toString(16).toUpperCase(); - }); -} - -function encodeValue(operator, value, key) { - value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); - - if (key) { - return encodeUnreserved(key) + "=" + value; - } else { - return value; - } -} - -function isDefined(value) { - return value !== undefined && value !== null; -} - -function isKeyOperator(operator) { - return operator === ";" || operator === "&" || operator === "?"; +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + let parsedUrl = url.parse(requestUrl); + return parsedUrl.protocol === 'https:'; } - -function getValues(context, operator, key, modifier) { - var value = context[key], - result = []; - - if (isDefined(value) && value !== "") { - if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { - value = value.toString(); - - if (modifier && modifier !== "*") { - value = value.substring(0, parseInt(modifier, 10)); - } - - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - } else { - if (modifier === "*") { - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - result.push(encodeValue(operator, value[k], k)); +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; } - }); - } - } else { - const tmp = []; - - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - tmp.push(encodeValue(operator, value)); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - tmp.push(encodeUnreserved(k)); - tmp.push(encodeValue(operator, value[k].toString())); + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; } - }); - } - - if (isKeyOperator(operator)) { - result.push(encodeUnreserved(key) + "=" + tmp.join(",")); - } else if (tmp.length !== 0) { - result.push(tmp.join(",")); } - } } - } else { - if (operator === ";") { - if (isDefined(value)) { - result.push(encodeUnreserved(key)); - } - } else if (value === "" && (operator === "&" || operator === "?")) { - result.push(encodeUnreserved(key) + "="); - } else if (value === "") { - result.push(""); + options(requestUrl, additionalHeaders) { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); } - } - - return result; -} - -function parseUrl(template) { - return { - expand: expand.bind(null, template) - }; -} - -function expand(template, context) { - var operators = ["+", "#", ".", "/", ";", "?", "&"]; - return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { - if (expression) { - let operator = ""; - const values = []; - - if (operators.indexOf(expression.charAt(0)) !== -1) { - operator = expression.charAt(0); - expression = expression.substr(1); - } - - expression.split(/,/g).forEach(function (variable) { - var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); - }); - - if (operator && operator !== "+") { - var separator = ","; - - if (operator === "?") { - separator = "&"; - } else if (operator !== "#") { - separator = operator; - } - - return (values.length !== 0 ? operator : "") + values.join(separator); - } else { - return values.join(","); - } - } else { - return encodeReserved(literal); + get(requestUrl, additionalHeaders) { + return this.request('GET', requestUrl, null, additionalHeaders || {}); } - }); -} - -function parse(options) { - // https://fetch.spec.whatwg.org/#methods - let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible - - let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{+$1}"); - let headers = Object.assign({}, options.headers); - let body; - let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later - - const urlVariableNames = extractUrlVariableNames(url); - url = parseUrl(url).expand(parameters); - - if (!/^http/.test(url)) { - url = options.baseUrl + url; - } - - const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); - const remainingParameters = omit(parameters, omittedParameters); - const isBinaryRequset = /application\/octet-stream/i.test(headers.accept); - - if (!isBinaryRequset) { - if (options.mediaType.format) { - // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw - headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); + del(requestUrl, additionalHeaders) { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); } - - if (options.mediaType.previews.length) { - const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; - headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { - const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; - return `application/vnd.github.${preview}-preview${format}`; - }).join(","); + post(requestUrl, data, additionalHeaders) { + return this.request('POST', requestUrl, data, additionalHeaders || {}); } - } // for GET/HEAD requests, set URL query parameters from remaining parameters - // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters - - - if (["GET", "HEAD"].includes(method)) { - url = addQueryParameters(url, remainingParameters); - } else { - if ("data" in remainingParameters) { - body = remainingParameters.data; - } else { - if (Object.keys(remainingParameters).length) { - body = remainingParameters; - } else { - headers["content-length"] = 0; - } + patch(requestUrl, data, additionalHeaders) { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); } - } // default content-type for JSON if body is set - - - if (!headers["content-type"] && typeof body !== "undefined") { - headers["content-type"] = "application/json; charset=utf-8"; - } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. - // fetch does not allow to set `content-length` header, but we can set body to an empty string - - - if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { - body = ""; - } // Only return body/request keys if present - - - return Object.assign({ - method, - url, - headers - }, typeof body !== "undefined" ? { - body - } : null, options.request ? { - request: options.request - } : null); -} - -function endpointWithDefaults(defaults, route, options) { - return parse(merge(defaults, route, options)); -} - -function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS = merge(oldDefaults, newDefaults); - const endpoint = endpointWithDefaults.bind(null, DEFAULTS); - return Object.assign(endpoint, { - DEFAULTS, - defaults: withDefaults.bind(null, DEFAULTS), - merge: merge.bind(null, DEFAULTS), - parse - }); -} - -const VERSION = "6.0.3"; - -const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. -// So we use RequestParameters and add method as additional required property. - -const DEFAULTS = { - method: "GET", - baseUrl: "https://api.github.com", - headers: { - accept: "application/vnd.github.v3+json", - "user-agent": userAgent - }, - mediaType: { - format: "", - previews: [] - } -}; - -const endpoint = withDefaults(null, DEFAULTS); - -exports.endpoint = endpoint; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 8467: -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var request = __webpack_require__(6234); -var universalUserAgent = __webpack_require__(5030); - -const VERSION = "4.5.1"; - -class GraphqlError extends Error { - constructor(request, response) { - const message = response.data.errors[0].message; - super(message); - Object.assign(this, response.data); - this.name = "GraphqlError"; - this.request = request; // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); + put(requestUrl, data, additionalHeaders) { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); } - } - -} - -const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; -function graphql(request, query, options) { - options = typeof query === "string" ? options = Object.assign({ - query - }, options) : options = query; - const requestOptions = Object.keys(options).reduce((result, key) => { - if (NON_VARIABLE_OPTIONS.includes(key)) { - result[key] = options[key]; - return result; + head(requestUrl, additionalHeaders) { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); } - - if (!result.variables) { - result.variables = {}; + sendStream(verb, requestUrl, stream, additionalHeaders) { + return this.request(verb, requestUrl, stream, additionalHeaders); } - - result.variables[key] = options[key]; - return result; - }, {}); - return request(requestOptions).then(response => { - if (response.data.errors) { - throw new GraphqlError(requestOptions, { - data: response.data - }); + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + async getJson(requestUrl, additionalHeaders = {}) { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + let res = await this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); } - - return response.data.data; - }); -} - -function withDefaults(request$1, newDefaults) { - const newRequest = request$1.defaults(newDefaults); - - const newApi = (query, options) => { - return graphql(newRequest, query, options); - }; - - return Object.assign(newApi, { - defaults: withDefaults.bind(null, newRequest), - endpoint: request.request.endpoint - }); -} - -const graphql$1 = withDefaults(request.request, { - headers: { - "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` - }, - method: "POST", - url: "/graphql" -}); -function withCustomRequest(customRequest) { - return withDefaults(customRequest, { - method: "POST", - url: "/graphql" - }); -} - -exports.graphql = graphql$1; -exports.withCustomRequest = withCustomRequest; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 4193: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -const VERSION = "2.2.3"; - -/** - * Some “list” response that can be paginated have a different response structure - * - * They have a `total_count` key in the response (search also has `incomplete_results`, - * /installation/repositories also has `repository_selection`), as well as a key with - * the list of the items which name varies from endpoint to endpoint. - * - * Octokit normalizes these responses so that paginated results are always returned following - * the same structure. One challenge is that if the list response has only one page, no Link - * header is provided, so this header alone is not sufficient to check wether a response is - * paginated or not. - * - * We check if a "total_count" key is present in the response data, but also make sure that - * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would - * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref - */ -function normalizePaginatedListResponse(response) { - const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); - if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way - // to retrieve the same information. - - const incompleteResults = response.data.incomplete_results; - const repositorySelection = response.data.repository_selection; - const totalCount = response.data.total_count; - delete response.data.incomplete_results; - delete response.data.repository_selection; - delete response.data.total_count; - const namespaceKey = Object.keys(response.data)[0]; - const data = response.data[namespaceKey]; - response.data = data; - - if (typeof incompleteResults !== "undefined") { - response.data.incomplete_results = incompleteResults; - } - - if (typeof repositorySelection !== "undefined") { - response.data.repository_selection = repositorySelection; - } - - response.data.total_count = totalCount; - return response; -} - -function iterator(octokit, route, parameters) { - const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); - const requestMethod = typeof route === "function" ? route : octokit.request; - const method = options.method; - const headers = options.headers; - let url = options.url; - return { - [Symbol.asyncIterator]: () => ({ - next() { - if (!url) { - return Promise.resolve({ - done: true - }); + async postJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + async putJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + async patchJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + async request(verb, requestUrl, data, headers) { + if (this._disposed) { + throw new Error('Client has already been disposed.'); } - - return requestMethod({ - method, - url, - headers - }).then(normalizePaginatedListResponse).then(response => { - // `response.headers.link` format: - // '; rel="next", ; rel="last"' - // sets `url` to undefined if "next" URL is not present or `link` header is not set - url = ((response.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; - return { - value: response - }; + let parsedUrl = url.parse(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + while (numTries < maxTries) { + response = await this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (let i = 0; i < this.handlers.length; i++) { + if (this.handlers[i].canHandleAuthentication(response)) { + authenticationHandler = this.handlers[i]; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + let parsedRedirectUrl = url.parse(redirectUrl); + if (parsedUrl.protocol == 'https:' && + parsedUrl.protocol != parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + await response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (let header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = await this.requestRaw(info, data); + redirectsRemaining--; + } + if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + await response.readBody(); + await this._performExponentialBackoff(numTries); + } + } + return response; + } + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return new Promise((resolve, reject) => { + let callbackForResult = function (err, res) { + if (err) { + reject(err); + } + resolve(res); + }; + this.requestRawWithCallback(info, data, callbackForResult); }); - } - - }) - }; -} - -function paginate(octokit, route, parameters, mapFn) { - if (typeof parameters === "function") { - mapFn = parameters; - parameters = undefined; - } - - return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); -} - -function gather(octokit, results, iterator, mapFn) { - return iterator.next().then(result => { - if (result.done) { - return results; } - - let earlyExit = false; - - function done() { - earlyExit = true; + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + let socket; + if (typeof data === 'string') { + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + let handleResult = (err, res) => { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + }; + let req = info.httpModule.request(info.options, (msg) => { + let res = new HttpClientResponse(msg); + handleResult(null, res); + }); + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error('Request timeout: ' + info.options.path), null); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err, null); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } } - - results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); - - if (earlyExit) { - return results; + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + let parsedUrl = url.parse(serverUrl); + return this._getAgent(parsedUrl); } - - return gather(octokit, results, iterator, mapFn); - }); -} - -/** - * @param octokit Octokit instance - * @param options Options passed to Octokit constructor - */ - -function paginateRest(octokit) { - return { - paginate: Object.assign(paginate.bind(null, octokit), { - iterator: iterator.bind(null, octokit) - }) - }; -} -paginateRest.VERSION = VERSION; - -exports.paginateRest = paginateRest; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 3044: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + this.handlers.forEach(handler => { + handler.prepareRequest(info.options); + }); + } + return info; + } + _mergeHeaders(headers) { + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); + } + return lowercaseKeys(headers || {}); + } + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + let proxyUrl = pm.getProxyUrl(parsedUrl); + let useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (this._keepAlive && !useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (!!agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (!!this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + if (useProxy) { + // If using proxy, need tunnel + if (!tunnel) { + tunnel = __webpack_require__(4294); + } + const agentOptions = { + maxSockets: maxSockets, + keepAlive: this._keepAlive, + proxy: { + proxyAuth: proxyUrl.auth, + host: proxyUrl.hostname, + port: proxyUrl.port + } + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if reusing agent across request and tunneling agent isn't assigned create a new agent + if (this._keepAlive && !agent) { + const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + // if not using private agent and tunnel agent isn't setup then use global agent + if (!agent) { + agent = usingSsl ? https.globalAgent : http.globalAgent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _performExponentialBackoff(retryNumber) { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + } + static dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + let a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + async _processResponse(res, options) { + return new Promise(async (resolve, reject) => { + const statusCode = res.message.statusCode; + const response = { + statusCode: statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode == HttpCodes.NotFound) { + resolve(response); + } + let obj; + let contents; + // get the result from the body + try { + contents = await res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = 'Failed request: (' + statusCode + ')'; + } + let err = new Error(msg); + // attach statusCode and body obj (if available) to the error object + err['statusCode'] = statusCode; + if (response.result) { + err['result'] = response.result; + } + reject(err); + } + else { + resolve(response); + } + }); + } +} +exports.HttpClient = HttpClient; + + +/***/ }), + +/***/ 6443: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); +const url = __webpack_require__(8835); +function getProxyUrl(reqUrl) { + let usingSsl = reqUrl.protocol === 'https:'; + let proxyUrl; + if (checkBypass(reqUrl)) { + return proxyUrl; + } + let proxyVar; + if (usingSsl) { + proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + if (proxyVar) { + proxyUrl = url.parse(proxyVar); + } + return proxyUrl; +} +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { + return false; + } + let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { + return false; + } + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); + } + else if (reqUrl.protocol === 'http:') { + reqPort = 80; + } + else if (reqUrl.protocol === 'https:') { + reqPort = 443; + } + // Format the request hostname and hostname with port + let upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (let upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperReqHosts.some(x => x === upperNoProxyItem)) { + return true; + } + } + return false; +} +exports.checkBypass = checkBypass; -const Endpoints = { - actions: { - addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], - createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], - createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], - createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], - deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], - deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], - deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], - downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], - downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], - getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], - getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], - getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], - getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], - getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], - getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], - listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], - listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], - listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], - listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], - listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], - listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], - listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], - listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], - listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], - listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], - listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], - listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], - reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], - removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"] - }, - activity: { - checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], - deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], - deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], - getFeeds: ["GET /feeds"], - getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], - getThread: ["GET /notifications/threads/{thread_id}"], - getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], - listEventsForAuthenticatedUser: ["GET /users/{username}/events"], - listNotificationsForAuthenticatedUser: ["GET /notifications"], - listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], - listPublicEvents: ["GET /events"], - listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], - listPublicEventsForUser: ["GET /users/{username}/events/public"], - listPublicOrgEvents: ["GET /orgs/{org}/events"], - listReceivedEventsForUser: ["GET /users/{username}/received_events"], - listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], - listRepoEvents: ["GET /repos/{owner}/{repo}/events"], - listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], - listReposStarredByAuthenticatedUser: ["GET /user/starred"], - listReposStarredByUser: ["GET /users/{username}/starred"], - listReposWatchedByUser: ["GET /users/{username}/subscriptions"], - listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], - listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], - listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], - markNotificationsAsRead: ["PUT /notifications"], - markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], - markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], - setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], - setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], - starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], - unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] - }, - apps: { - addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", { - mediaType: { - previews: ["machine-man"] - } - }], - checkToken: ["POST /applications/{client_id}/token"], - createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", { - mediaType: { - previews: ["corsair"] - } - }], - createFromManifest: ["POST /app-manifests/{code}/conversions"], - createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens", { - mediaType: { - previews: ["machine-man"] - } - }], - deleteAuthorization: ["DELETE /applications/{client_id}/grant"], - deleteInstallation: ["DELETE /app/installations/{installation_id}", { - mediaType: { - previews: ["machine-man"] - } - }], - deleteToken: ["DELETE /applications/{client_id}/token"], - getAuthenticated: ["GET /app", { - mediaType: { - previews: ["machine-man"] - } - }], - getBySlug: ["GET /apps/{app_slug}", { - mediaType: { - previews: ["machine-man"] - } - }], - getInstallation: ["GET /app/installations/{installation_id}", { - mediaType: { - previews: ["machine-man"] - } - }], - getOrgInstallation: ["GET /orgs/{org}/installation", { - mediaType: { - previews: ["machine-man"] - } - }], - getRepoInstallation: ["GET /repos/{owner}/{repo}/installation", { + +/***/ }), + +/***/ 334: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +async function auth(token) { + const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth"; + return { + type: "token", + token: token, + tokenType + }; +} + +/** + * Prefix token for usage in the Authorization header + * + * @param token OAuth token or JSON Web Token + */ +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; + } + + return `token ${token}`; +} + +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge(route, parameters); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} + +const createTokenAuth = function createTokenAuth(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } + + if (typeof token !== "string") { + throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); + } + + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; + +exports.createTokenAuth = createTokenAuth; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 6762: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +var universalUserAgent = __webpack_require__(5030); +var beforeAfterHook = __webpack_require__(3682); +var request = __webpack_require__(6234); +var graphql = __webpack_require__(8467); +var authToken = __webpack_require__(334); + +function _defineProperty(obj, key, value) { + if (key in obj) { + Object.defineProperty(obj, key, { + value: value, + enumerable: true, + configurable: true, + writable: true + }); + } else { + obj[key] = value; + } + + return obj; +} + +function ownKeys(object, enumerableOnly) { + var keys = Object.keys(object); + + if (Object.getOwnPropertySymbols) { + var symbols = Object.getOwnPropertySymbols(object); + if (enumerableOnly) symbols = symbols.filter(function (sym) { + return Object.getOwnPropertyDescriptor(object, sym).enumerable; + }); + keys.push.apply(keys, symbols); + } + + return keys; +} + +function _objectSpread2(target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i] != null ? arguments[i] : {}; + + if (i % 2) { + ownKeys(Object(source), true).forEach(function (key) { + _defineProperty(target, key, source[key]); + }); + } else if (Object.getOwnPropertyDescriptors) { + Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); + } else { + ownKeys(Object(source)).forEach(function (key) { + Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); + }); + } + } + + return target; +} + +const VERSION = "3.1.0"; + +class Octokit { + constructor(options = {}) { + const hook = new beforeAfterHook.Collection(); + const requestDefaults = { + baseUrl: request.request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + hook: hook.bind(null, "request") + }), mediaType: { - previews: ["machine-man"] + previews: [], + format: "" } - }], - getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], - getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], - getUserInstallation: ["GET /users/{username}/installation", { - mediaType: { - previews: ["machine-man"] - } - }], - listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], - listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], - listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories", { - mediaType: { - previews: ["machine-man"] - } - }], - listInstallations: ["GET /app/installations", { - mediaType: { - previews: ["machine-man"] - } - }], - listInstallationsForAuthenticatedUser: ["GET /user/installations", { - mediaType: { - previews: ["machine-man"] - } - }], - listPlans: ["GET /marketplace_listing/plans"], - listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], - listReposAccessibleToInstallation: ["GET /installation/repositories", { - mediaType: { - previews: ["machine-man"] - } - }], - listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], - listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], - removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", { - mediaType: { - previews: ["machine-man"] - } - }], - resetToken: ["PATCH /applications/{client_id}/token"], - revokeInstallationAccessToken: ["DELETE /installation/token"], - suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], - unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"] - }, - checks: { - create: ["POST /repos/{owner}/{repo}/check-runs", { - mediaType: { - previews: ["antiope"] - } - }], - createSuite: ["POST /repos/{owner}/{repo}/check-suites", { - mediaType: { - previews: ["antiope"] - } - }], - get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}", { - mediaType: { - previews: ["antiope"] - } - }], - getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}", { - mediaType: { - previews: ["antiope"] - } - }], - listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", { - mediaType: { - previews: ["antiope"] - } - }], - listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs", { - mediaType: { - previews: ["antiope"] - } - }], - listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", { - mediaType: { - previews: ["antiope"] - } - }], - listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites", { - mediaType: { - previews: ["antiope"] - } - }], - rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", { - mediaType: { - previews: ["antiope"] - } - }], - setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences", { - mediaType: { - previews: ["antiope"] - } - }], - update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}", { - mediaType: { - previews: ["antiope"] - } - }] - }, - codeScanning: { - getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_id}"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"] - }, - codesOfConduct: { - getAllCodesOfConduct: ["GET /codes_of_conduct", { - mediaType: { - previews: ["scarlet-witch"] - } - }], - getConductCode: ["GET /codes_of_conduct/{key}", { - mediaType: { - previews: ["scarlet-witch"] - } - }], - getForRepo: ["GET /repos/{owner}/{repo}/community/code_of_conduct", { - mediaType: { - previews: ["scarlet-witch"] - } - }] - }, - emojis: { - get: ["GET /emojis"] - }, - gists: { - checkIsStarred: ["GET /gists/{gist_id}/star"], - create: ["POST /gists"], - createComment: ["POST /gists/{gist_id}/comments"], - delete: ["DELETE /gists/{gist_id}"], - deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], - fork: ["POST /gists/{gist_id}/forks"], - get: ["GET /gists/{gist_id}"], - getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], - getRevision: ["GET /gists/{gist_id}/{sha}"], - list: ["GET /gists"], - listComments: ["GET /gists/{gist_id}/comments"], - listCommits: ["GET /gists/{gist_id}/commits"], - listForUser: ["GET /users/{username}/gists"], - listForks: ["GET /gists/{gist_id}/forks"], - listPublic: ["GET /gists/public"], - listStarred: ["GET /gists/starred"], - star: ["PUT /gists/{gist_id}/star"], - unstar: ["DELETE /gists/{gist_id}/star"], - update: ["PATCH /gists/{gist_id}"], - updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] - }, - git: { - createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], - createCommit: ["POST /repos/{owner}/{repo}/git/commits"], - createRef: ["POST /repos/{owner}/{repo}/git/refs"], - createTag: ["POST /repos/{owner}/{repo}/git/tags"], - createTree: ["POST /repos/{owner}/{repo}/git/trees"], - deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], - getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], - getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], - getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], - getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], - getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], - listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], - updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] - }, - gitignore: { - getAllTemplates: ["GET /gitignore/templates"], - getTemplate: ["GET /gitignore/templates/{name}"] - }, - interactions: { - getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }], - getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }], - removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits", { - mediaType: { - previews: ["sombra"] + }; // prepend default user agent with `options.userAgent` if set + + requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); + + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; + } + + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; + } + + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } + + this.request = request.request.defaults(requestDefaults); + this.graphql = graphql.withCustomRequest(this.request).defaults(_objectSpread2(_objectSpread2({}, requestDefaults), {}, { + baseUrl: requestDefaults.baseUrl.replace(/\/api\/v3$/, "/api") + })); + this.log = Object.assign({ + debug: () => {}, + info: () => {}, + warn: console.warn.bind(console), + error: console.error.bind(console) + }, options.log); + this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance + // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registred. + // (2) If only `options.auth` is set, use the default token authentication strategy. + // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. + // TODO: type `options.auth` based on `options.authStrategy`. + + if (!options.authStrategy) { + if (!options.auth) { + // (1) + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + // (2) + const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ + + hook.wrap("request", auth.hook); + this.auth = auth; } - }], - removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits", { - mediaType: { - previews: ["sombra"] + } else { + const auth = options.authStrategy(Object.assign({ + request: this.request + }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ + + hook.wrap("request", auth.hook); + this.auth = auth; + } // apply plugins + // https://stackoverflow.com/a/16345172 + + + const classConstructor = this.constructor; + classConstructor.plugins.forEach(plugin => { + Object.assign(this, plugin(this, options)); + }); + } + + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + + super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null)); } - }], - setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits", { - mediaType: { - previews: ["sombra"] + + }; + return OctokitWithDefaults; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + + + static plugin(...newPlugins) { + var _a; + + const currentPlugins = this.plugins; + const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); + return NewOctokit; + } + +} +Octokit.VERSION = VERSION; +Octokit.plugins = []; + +exports.Octokit = Octokit; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 9440: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var isPlainObject = _interopDefault(__webpack_require__(8840)); +var universalUserAgent = __webpack_require__(5030); + +function lowercaseKeys(object) { + if (!object) { + return {}; + } + + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} + +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach(key => { + if (isPlainObject(options[key])) { + if (!(key in defaults)) Object.assign(result, { + [key]: options[key] + });else result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { + [key]: options[key] + }); + } + }); + return result; +} + +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { + method, + url + } : { + url: method + }, options); + } else { + options = Object.assign({}, route); + } // lowercase header names before merging with defaults to avoid duplicates + + + options.headers = lowercaseKeys(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten + + if (defaults && defaults.mediaType.previews.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); + } + + mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); + return mergedOptions; +} + +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + + if (names.length === 0) { + return url; + } + + return url + separator + names.map(name => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + } + + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); +} + +const urlVariableRegex = /\{[^}]+\}/g; + +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} + +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + + if (!matches) { + return []; + } + + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} + +function omit(object, keysToOmit) { + return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { + obj[key] = object[key]; + return obj; + }, {}); +} + +// Based on https://github.com/bramstein/url-template, licensed under BSD +// TODO: create separate package. +// +// Copyright (c) 2012-2014, Bram Stein +// All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions +// are met: +// 1. Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// 2. Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// 3. The name of the author may not be used to endorse or promote products +// derived from this software without specific prior written permission. +// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED +// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY +// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +/* istanbul ignore file */ +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + } + + return part; + }).join(""); +} + +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} + +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; + } +} + +function isDefined(value) { + return value !== undefined && value !== null; +} + +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} + +function getValues(context, operator, key, modifier) { + var value = context[key], + result = []; + + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); + + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); } - }], - setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits", { - mediaType: { - previews: ["sombra"] + + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + }); + } else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } + } else { + const tmp = []; + + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + tmp.push(encodeValue(operator, value)); + }); + } else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); + } + + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); + } } - }] - }, - issues: { - addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], - addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], - checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], - create: ["POST /repos/{owner}/{repo}/issues"], - createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], - createLabel: ["POST /repos/{owner}/{repo}/labels"], - createMilestone: ["POST /repos/{owner}/{repo}/milestones"], - deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], - deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], - deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], - get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], - getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], - getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], - getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], - getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], - list: ["GET /issues"], - listAssignees: ["GET /repos/{owner}/{repo}/assignees"], - listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], - listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], - listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], - listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], - listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", { - mediaType: { - previews: ["mockingbird"] - } - }], - listForAuthenticatedUser: ["GET /user/issues"], - listForOrg: ["GET /orgs/{org}/issues"], - listForRepo: ["GET /repos/{owner}/{repo}/issues"], - listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], - listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], - listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], - listMilestones: ["GET /repos/{owner}/{repo}/milestones"], - lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], - removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], - removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], - removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], - setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], - unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], - update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], - updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], - updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], - updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] - }, - licenses: { - get: ["GET /licenses/{license}"], - getAllCommonlyUsed: ["GET /licenses"], - getForRepo: ["GET /repos/{owner}/{repo}/license"] - }, - markdown: { - render: ["POST /markdown"], - renderRaw: ["POST /markdown/raw", { - headers: { - "content-type": "text/plain; charset=utf-8" - } - }] - }, - meta: { - get: ["GET /meta"] - }, - migrations: { - cancelImport: ["DELETE /repos/{owner}/{repo}/import"], - deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], - getImportStatus: ["GET /repos/{owner}/{repo}/import"], - getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], - getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}", { - mediaType: { - previews: ["wyandotte"] - } - }], - getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}", { - mediaType: { - previews: ["wyandotte"] - } - }], - listForAuthenticatedUser: ["GET /user/migrations", { - mediaType: { - previews: ["wyandotte"] - } - }], - listForOrg: ["GET /orgs/{org}/migrations", { - mediaType: { - previews: ["wyandotte"] - } - }], - listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories", { - mediaType: { - previews: ["wyandotte"] - } - }], - listReposForUser: ["GET /user/{migration_id}/repositories", { - mediaType: { - previews: ["wyandotte"] + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); } - }], - mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], - setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], - startForAuthenticatedUser: ["POST /user/migrations"], - startForOrg: ["POST /orgs/{org}/migrations"], - startImport: ["PUT /repos/{owner}/{repo}/import"], - unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", { - mediaType: { - previews: ["wyandotte"] + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); + } + } + + return result; +} + +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; +} + +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); } - }], - unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", { - mediaType: { - previews: ["wyandotte"] + + expression.split(/,/g).forEach(function (variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + + if (operator && operator !== "+") { + var separator = ","; + + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } + + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); } - }], - updateImport: ["PATCH /repos/{owner}/{repo}/import"] - }, - orgs: { - blockUser: ["PUT /orgs/{org}/blocks/{username}"], - checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], - checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], - checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], - convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], - createInvitation: ["POST /orgs/{org}/invitations"], - createWebhook: ["POST /orgs/{org}/hooks"], - deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], - get: ["GET /orgs/{org}"], - getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], - getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], - getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], - list: ["GET /organizations"], - listAppInstallations: ["GET /orgs/{org}/installations", { - mediaType: { - previews: ["machine-man"] + } else { + return encodeReserved(literal); + } + }); +} + +function parse(options) { + // https://fetch.spec.whatwg.org/#methods + let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible + + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{+$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later + + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } + + const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequset = /application\/octet-stream/i.test(headers.accept); + + if (!isBinaryRequset) { + if (options.mediaType.format) { + // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw + headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); + } + + if (options.mediaType.previews.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } + } // for GET/HEAD requests, set URL query parameters from remaining parameters + // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters + + + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } else { + headers["content-length"] = 0; } - }], - listBlockedUsers: ["GET /orgs/{org}/blocks"], - listForAuthenticatedUser: ["GET /user/orgs"], - listForUser: ["GET /users/{username}/orgs"], - listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], - listMembers: ["GET /orgs/{org}/members"], - listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], - listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], - listPendingInvitations: ["GET /orgs/{org}/invitations"], - listPublicMembers: ["GET /orgs/{org}/public_members"], - listWebhooks: ["GET /orgs/{org}/hooks"], - pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], - removeMember: ["DELETE /orgs/{org}/members/{username}"], - removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], - removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], - removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], - setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], - setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], - unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], - update: ["PATCH /orgs/{org}"], - updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], - updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"] + } + } // default content-type for JSON if body is set + + + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. + // fetch does not allow to set `content-length` header, but we can set body to an empty string + + + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } // Only return body/request keys if present + + + return Object.assign({ + method, + url, + headers + }, typeof body !== "undefined" ? { + body + } : null, options.request ? { + request: options.request + } : null); +} + +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); +} + +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS = merge(oldDefaults, newDefaults); + const endpoint = endpointWithDefaults.bind(null, DEFAULTS); + return Object.assign(endpoint, { + DEFAULTS, + defaults: withDefaults.bind(null, DEFAULTS), + merge: merge.bind(null, DEFAULTS), + parse + }); +} + +const VERSION = "6.0.3"; + +const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. +// So we use RequestParameters and add method as additional required property. + +const DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent }, - projects: { - addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}", { - mediaType: { - previews: ["inertia"] + mediaType: { + format: "", + previews: [] + } +}; + +const endpoint = withDefaults(null, DEFAULTS); + +exports.endpoint = endpoint; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 8467: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +var request = __webpack_require__(6234); +var universalUserAgent = __webpack_require__(5030); + +const VERSION = "4.5.1"; + +class GraphqlError extends Error { + constructor(request, response) { + const message = response.data.errors[0].message; + super(message); + Object.assign(this, response.data); + this.name = "GraphqlError"; + this.request = request; // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + } + +} + +const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; +function graphql(request, query, options) { + options = typeof query === "string" ? options = Object.assign({ + query + }, options) : options = query; + const requestOptions = Object.keys(options).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = options[key]; + return result; + } + + if (!result.variables) { + result.variables = {}; + } + + result.variables[key] = options[key]; + return result; + }, {}); + return request(requestOptions).then(response => { + if (response.data.errors) { + throw new GraphqlError(requestOptions, { + data: response.data + }); + } + + return response.data.data; + }); +} + +function withDefaults(request$1, newDefaults) { + const newRequest = request$1.defaults(newDefaults); + + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; + + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: request.request.endpoint + }); +} + +const graphql$1 = withDefaults(request.request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` + }, + method: "POST", + url: "/graphql" +}); +function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); +} + +exports.graphql = graphql$1; +exports.withCustomRequest = withCustomRequest; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 4193: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +const VERSION = "2.2.3"; + +/** + * Some “list” response that can be paginated have a different response structure + * + * They have a `total_count` key in the response (search also has `incomplete_results`, + * /installation/repositories also has `repository_selection`), as well as a key with + * the list of the items which name varies from endpoint to endpoint. + * + * Octokit normalizes these responses so that paginated results are always returned following + * the same structure. One challenge is that if the list response has only one page, no Link + * header is provided, so this header alone is not sufficient to check wether a response is + * paginated or not. + * + * We check if a "total_count" key is present in the response data, but also make sure that + * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would + * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref + */ +function normalizePaginatedListResponse(response) { + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way + // to retrieve the same information. + + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; + + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; + } + + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; + } + + response.data.total_count = totalCount; + return response; +} + +function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + next() { + if (!url) { + return Promise.resolve({ + done: true + }); + } + + return requestMethod({ + method, + url, + headers + }).then(normalizePaginatedListResponse).then(response => { + // `response.headers.link` format: + // '; rel="next", ; rel="last"' + // sets `url` to undefined if "next" URL is not present or `link` header is not set + url = ((response.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; + return { + value: response + }; + }); } - }], - createCard: ["POST /projects/columns/{column_id}/cards", { - mediaType: { - previews: ["inertia"] + + }) + }; +} + +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = undefined; + } + + return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); +} + +function gather(octokit, results, iterator, mapFn) { + return iterator.next().then(result => { + if (result.done) { + return results; + } + + let earlyExit = false; + + function done() { + earlyExit = true; + } + + results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); + + if (earlyExit) { + return results; + } + + return gather(octokit, results, iterator, mapFn); + }); +} + +/** + * @param octokit Octokit instance + * @param options Options passed to Octokit constructor + */ + +function paginateRest(octokit) { + return { + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit) + }) + }; +} +paginateRest.VERSION = VERSION; + +exports.paginateRest = paginateRest; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 3044: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +const Endpoints = { + actions: { + addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], + cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], + createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], + deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], + deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], + deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], + downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], + downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], + downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], + getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], + listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], + listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], + setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"] + }, + activity: { + checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], + deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], + deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], + getFeeds: ["GET /feeds"], + getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], + getThread: ["GET /notifications/threads/{thread_id}"], + getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], + listEventsForAuthenticatedUser: ["GET /users/{username}/events"], + listNotificationsForAuthenticatedUser: ["GET /notifications"], + listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], + listPublicEvents: ["GET /events"], + listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], + listPublicEventsForUser: ["GET /users/{username}/events/public"], + listPublicOrgEvents: ["GET /orgs/{org}/events"], + listReceivedEventsForUser: ["GET /users/{username}/received_events"], + listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], + listRepoEvents: ["GET /repos/{owner}/{repo}/events"], + listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], + listReposStarredByAuthenticatedUser: ["GET /user/starred"], + listReposStarredByUser: ["GET /users/{username}/starred"], + listReposWatchedByUser: ["GET /users/{username}/subscriptions"], + listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], + listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], + listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], + markNotificationsAsRead: ["PUT /notifications"], + markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], + markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], + setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], + setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], + starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], + unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] + }, + apps: { + addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", { + mediaType: { + previews: ["machine-man"] } }], - createColumn: ["POST /projects/{project_id}/columns", { + checkToken: ["POST /applications/{client_id}/token"], + createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", { mediaType: { - previews: ["inertia"] + previews: ["corsair"] } }], - createForAuthenticatedUser: ["POST /user/projects", { + createFromManifest: ["POST /app-manifests/{code}/conversions"], + createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens", { mediaType: { - previews: ["inertia"] + previews: ["machine-man"] } }], - createForOrg: ["POST /orgs/{org}/projects", { + deleteAuthorization: ["DELETE /applications/{client_id}/grant"], + deleteInstallation: ["DELETE /app/installations/{installation_id}", { mediaType: { - previews: ["inertia"] + previews: ["machine-man"] } }], - createForRepo: ["POST /repos/{owner}/{repo}/projects", { + deleteToken: ["DELETE /applications/{client_id}/token"], + getAuthenticated: ["GET /app", { mediaType: { - previews: ["inertia"] + previews: ["machine-man"] } }], - delete: ["DELETE /projects/{project_id}", { + getBySlug: ["GET /apps/{app_slug}", { mediaType: { - previews: ["inertia"] + previews: ["machine-man"] } }], - deleteCard: ["DELETE /projects/columns/cards/{card_id}", { + getInstallation: ["GET /app/installations/{installation_id}", { mediaType: { - previews: ["inertia"] + previews: ["machine-man"] } }], - deleteColumn: ["DELETE /projects/columns/{column_id}", { + getOrgInstallation: ["GET /orgs/{org}/installation", { mediaType: { - previews: ["inertia"] + previews: ["machine-man"] } }], - get: ["GET /projects/{project_id}", { + getRepoInstallation: ["GET /repos/{owner}/{repo}/installation", { mediaType: { - previews: ["inertia"] + previews: ["machine-man"] } }], - getCard: ["GET /projects/columns/cards/{card_id}", { + getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], + getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], + getUserInstallation: ["GET /users/{username}/installation", { mediaType: { - previews: ["inertia"] + previews: ["machine-man"] } }], - getColumn: ["GET /projects/columns/{column_id}", { + listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], + listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], + listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories", { mediaType: { - previews: ["inertia"] + previews: ["machine-man"] } }], - getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission", { + listInstallations: ["GET /app/installations", { mediaType: { - previews: ["inertia"] + previews: ["machine-man"] } }], - listCards: ["GET /projects/columns/{column_id}/cards", { + listInstallationsForAuthenticatedUser: ["GET /user/installations", { mediaType: { - previews: ["inertia"] + previews: ["machine-man"] } }], - listCollaborators: ["GET /projects/{project_id}/collaborators", { + listPlans: ["GET /marketplace_listing/plans"], + listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], + listReposAccessibleToInstallation: ["GET /installation/repositories", { mediaType: { - previews: ["inertia"] + previews: ["machine-man"] } }], - listColumns: ["GET /projects/{project_id}/columns", { + listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], + listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], + removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", { mediaType: { - previews: ["inertia"] + previews: ["machine-man"] } }], - listForOrg: ["GET /orgs/{org}/projects", { + resetToken: ["PATCH /applications/{client_id}/token"], + revokeInstallationAccessToken: ["DELETE /installation/token"], + suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], + unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"] + }, + checks: { + create: ["POST /repos/{owner}/{repo}/check-runs", { mediaType: { - previews: ["inertia"] + previews: ["antiope"] } }], - listForRepo: ["GET /repos/{owner}/{repo}/projects", { + createSuite: ["POST /repos/{owner}/{repo}/check-suites", { mediaType: { - previews: ["inertia"] + previews: ["antiope"] } }], - listForUser: ["GET /users/{username}/projects", { + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}", { mediaType: { - previews: ["inertia"] + previews: ["antiope"] } }], - moveCard: ["POST /projects/columns/cards/{card_id}/moves", { + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}", { mediaType: { - previews: ["inertia"] + previews: ["antiope"] } }], - moveColumn: ["POST /projects/columns/{column_id}/moves", { + listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", { mediaType: { - previews: ["inertia"] + previews: ["antiope"] } }], - removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}", { + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs", { mediaType: { - previews: ["inertia"] + previews: ["antiope"] } }], - update: ["PATCH /projects/{project_id}", { + listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", { mediaType: { - previews: ["inertia"] + previews: ["antiope"] } }], - updateCard: ["PATCH /projects/columns/cards/{card_id}", { + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites", { mediaType: { - previews: ["inertia"] + previews: ["antiope"] } }], - updateColumn: ["PATCH /projects/columns/{column_id}", { + rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", { mediaType: { - previews: ["inertia"] + previews: ["antiope"] + } + }], + setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences", { + mediaType: { + previews: ["antiope"] + } + }], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}", { + mediaType: { + previews: ["antiope"] } }] }, - pulls: { - checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - create: ["POST /repos/{owner}/{repo}/pulls"], - createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], - createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], - deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], - get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], - getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - list: ["GET /repos/{owner}/{repo}/pulls"], - listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], - listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], - listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], - listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], - listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], - listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], - update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], - updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", { + codeScanning: { + getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_id}"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"] + }, + codesOfConduct: { + getAllCodesOfConduct: ["GET /codes_of_conduct", { mediaType: { - previews: ["lydian"] + previews: ["scarlet-witch"] } }], - updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] + getConductCode: ["GET /codes_of_conduct/{key}", { + mediaType: { + previews: ["scarlet-witch"] + } + }], + getForRepo: ["GET /repos/{owner}/{repo}/community/code_of_conduct", { + mediaType: { + previews: ["scarlet-witch"] + } + }] }, - rateLimit: { - get: ["GET /rate_limit"] + emojis: { + get: ["GET /emojis"] }, - reactions: { - createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", { + gists: { + checkIsStarred: ["GET /gists/{gist_id}/star"], + create: ["POST /gists"], + createComment: ["POST /gists/{gist_id}/comments"], + delete: ["DELETE /gists/{gist_id}"], + deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], + fork: ["POST /gists/{gist_id}/forks"], + get: ["GET /gists/{gist_id}"], + getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], + getRevision: ["GET /gists/{gist_id}/{sha}"], + list: ["GET /gists"], + listComments: ["GET /gists/{gist_id}/comments"], + listCommits: ["GET /gists/{gist_id}/commits"], + listForUser: ["GET /users/{username}/gists"], + listForks: ["GET /gists/{gist_id}/forks"], + listPublic: ["GET /gists/public"], + listStarred: ["GET /gists/starred"], + star: ["PUT /gists/{gist_id}/star"], + unstar: ["DELETE /gists/{gist_id}/star"], + update: ["PATCH /gists/{gist_id}"], + updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] + }, + git: { + createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], + createCommit: ["POST /repos/{owner}/{repo}/git/commits"], + createRef: ["POST /repos/{owner}/{repo}/git/refs"], + createTag: ["POST /repos/{owner}/{repo}/git/tags"], + createTree: ["POST /repos/{owner}/{repo}/git/trees"], + deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], + getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], + getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], + getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], + getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], + getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], + listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], + updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] + }, + gitignore: { + getAllTemplates: ["GET /gitignore/templates"], + getTemplate: ["GET /gitignore/templates/{name}"] + }, + interactions: { + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits", { mediaType: { - previews: ["squirrel-girl"] + previews: ["sombra"] } }], - createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", { + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits", { mediaType: { - previews: ["squirrel-girl"] + previews: ["sombra"] } }], - createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits", { mediaType: { - previews: ["squirrel-girl"] + previews: ["sombra"] } }], - createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { + removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits", { mediaType: { - previews: ["squirrel-girl"] + previews: ["sombra"] } }], - createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits", { mediaType: { - previews: ["squirrel-girl"] + previews: ["sombra"] } }], - createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits", { mediaType: { - previews: ["squirrel-girl"] + previews: ["sombra"] + } + }] + }, + issues: { + addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], + addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], + checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + create: ["POST /repos/{owner}/{repo}/issues"], + createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], + createLabel: ["POST /repos/{owner}/{repo}/labels"], + createMilestone: ["POST /repos/{owner}/{repo}/milestones"], + deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], + deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], + deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], + get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], + getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], + getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], + getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], + getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], + list: ["GET /issues"], + listAssignees: ["GET /repos/{owner}/{repo}/assignees"], + listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], + listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], + listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], + listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", { + mediaType: { + previews: ["mockingbird"] } }], - deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", { + listForAuthenticatedUser: ["GET /user/issues"], + listForOrg: ["GET /orgs/{org}/issues"], + listForRepo: ["GET /repos/{owner}/{repo}/issues"], + listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], + listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], + listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], + listMilestones: ["GET /repos/{owner}/{repo}/milestones"], + lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], + removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], + removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], + removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], + setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], + unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], + update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], + updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], + updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], + updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] + }, + licenses: { + get: ["GET /licenses/{license}"], + getAllCommonlyUsed: ["GET /licenses"], + getForRepo: ["GET /repos/{owner}/{repo}/license"] + }, + markdown: { + render: ["POST /markdown"], + renderRaw: ["POST /markdown/raw", { + headers: { + "content-type": "text/plain; charset=utf-8" + } + }] + }, + meta: { + get: ["GET /meta"] + }, + migrations: { + cancelImport: ["DELETE /repos/{owner}/{repo}/import"], + deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive", { mediaType: { - previews: ["squirrel-girl"] + previews: ["wyandotte"] } }], - deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", { + deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive", { mediaType: { - previews: ["squirrel-girl"] + previews: ["wyandotte"] } }], - deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", { + downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive", { mediaType: { - previews: ["squirrel-girl"] + previews: ["wyandotte"] } }], - deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", { + getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive", { mediaType: { - previews: ["squirrel-girl"] + previews: ["wyandotte"] } }], - deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", { + getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], + getImportStatus: ["GET /repos/{owner}/{repo}/import"], + getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}", { mediaType: { - previews: ["squirrel-girl"] + previews: ["wyandotte"] } }], - deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", { + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}", { mediaType: { - previews: ["squirrel-girl"] + previews: ["wyandotte"] } }], - listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", { + listForAuthenticatedUser: ["GET /user/migrations", { mediaType: { - previews: ["squirrel-girl"] + previews: ["wyandotte"] } }], - listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", { + listForOrg: ["GET /orgs/{org}/migrations", { mediaType: { - previews: ["squirrel-girl"] + previews: ["wyandotte"] } }], - listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories", { mediaType: { - previews: ["squirrel-girl"] + previews: ["wyandotte"] } }], - listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { + listReposForUser: ["GET /user/{migration_id}/repositories", { mediaType: { - previews: ["squirrel-girl"] + previews: ["wyandotte"] } }], - listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { + mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], + setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], + startForAuthenticatedUser: ["POST /user/migrations"], + startForOrg: ["POST /orgs/{org}/migrations"], + startImport: ["PUT /repos/{owner}/{repo}/import"], + unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", { mediaType: { - previews: ["squirrel-girl"] + previews: ["wyandotte"] } }], - listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { + unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", { mediaType: { - previews: ["squirrel-girl"] + previews: ["wyandotte"] } - }] + }], + updateImport: ["PATCH /repos/{owner}/{repo}/import"] }, - repos: { - acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}"], - addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" + orgs: { + blockUser: ["PUT /orgs/{org}/blocks/{username}"], + checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], + checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], + checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], + convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], + createInvitation: ["POST /orgs/{org}/invitations"], + createWebhook: ["POST /orgs/{org}/hooks"], + deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + get: ["GET /orgs/{org}"], + getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], + getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], + getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + list: ["GET /organizations"], + listAppInstallations: ["GET /orgs/{org}/installations", { + mediaType: { + previews: ["machine-man"] + } }], - addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], - addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" + listBlockedUsers: ["GET /orgs/{org}/blocks"], + listForAuthenticatedUser: ["GET /user/orgs"], + listForUser: ["GET /users/{username}/orgs"], + listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], + listMembers: ["GET /orgs/{org}/members"], + listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], + listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPendingInvitations: ["GET /orgs/{org}/invitations"], + listPublicMembers: ["GET /orgs/{org}/public_members"], + listWebhooks: ["GET /orgs/{org}/hooks"], + pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + removeMember: ["DELETE /orgs/{org}/members/{username}"], + removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], + removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], + removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], + setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], + setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], + unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], + update: ["PATCH /orgs/{org}"], + updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"] + }, + projects: { + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}", { + mediaType: { + previews: ["inertia"] + } }], - addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" + createCard: ["POST /projects/columns/{column_id}/cards", { + mediaType: { + previews: ["inertia"] + } }], - addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" + createColumn: ["POST /projects/{project_id}/columns", { + mediaType: { + previews: ["inertia"] + } }], - checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], - checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts", { + createForAuthenticatedUser: ["POST /user/projects", { mediaType: { - previews: ["dorian"] + previews: ["inertia"] } }], - compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], - createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + createForOrg: ["POST /orgs/{org}/projects", { mediaType: { - previews: ["zzzax"] + previews: ["inertia"] } }], - createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], - createDeployKey: ["POST /repos/{owner}/{repo}/keys"], - createDeployment: ["POST /repos/{owner}/{repo}/deployments"], - createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], - createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], - createForAuthenticatedUser: ["POST /user/repos"], - createFork: ["POST /repos/{owner}/{repo}/forks"], - createInOrg: ["POST /orgs/{org}/repos"], - createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], - createPagesSite: ["POST /repos/{owner}/{repo}/pages", { + createForRepo: ["POST /repos/{owner}/{repo}/projects", { mediaType: { - previews: ["switcheroo"] + previews: ["inertia"] } }], - createRelease: ["POST /repos/{owner}/{repo}/releases"], - createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate", { + delete: ["DELETE /projects/{project_id}", { mediaType: { - previews: ["baptiste"] + previews: ["inertia"] } }], - createWebhook: ["POST /repos/{owner}/{repo}/hooks"], - declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}"], - delete: ["DELETE /repos/{owner}/{repo}"], - deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], - deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], - deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + deleteCard: ["DELETE /projects/columns/cards/{card_id}", { mediaType: { - previews: ["zzzax"] + previews: ["inertia"] } }], - deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], - deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], - deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], - deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], - deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages", { + deleteColumn: ["DELETE /projects/columns/{column_id}", { mediaType: { - previews: ["switcheroo"] + previews: ["inertia"] } }], - deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], - deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], - deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], - disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes", { + get: ["GET /projects/{project_id}", { mediaType: { - previews: ["london"] + previews: ["inertia"] } }], - disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts", { + getCard: ["GET /projects/columns/cards/{card_id}", { mediaType: { - previews: ["dorian"] + previews: ["inertia"] } }], - downloadArchive: ["GET /repos/{owner}/{repo}/{archive_format}/{ref}"], - enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes", { + getColumn: ["GET /projects/columns/{column_id}", { mediaType: { - previews: ["london"] + previews: ["inertia"] } }], - enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts", { + getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission", { mediaType: { - previews: ["dorian"] + previews: ["inertia"] } }], - get: ["GET /repos/{owner}/{repo}"], - getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], - getAllTopics: ["GET /repos/{owner}/{repo}/topics", { + listCards: ["GET /projects/columns/{column_id}/cards", { mediaType: { - previews: ["mercy"] + previews: ["inertia"] } }], - getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], - getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], - getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], - getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], - getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], - getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], - getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], - getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], - getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], - getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], - getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + listCollaborators: ["GET /projects/{project_id}/collaborators", { mediaType: { - previews: ["zzzax"] + previews: ["inertia"] } }], - getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], - getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], - getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], - getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], - getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], - getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], - getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], - getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], - getPages: ["GET /repos/{owner}/{repo}/pages"], - getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], - getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], - getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], - getReadme: ["GET /repos/{owner}/{repo}/readme"], - getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], - getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], - getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], - getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], - getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], - getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], - getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], - getViews: ["GET /repos/{owner}/{repo}/traffic/views"], - getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], - listBranches: ["GET /repos/{owner}/{repo}/branches"], - listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", { + listColumns: ["GET /projects/{project_id}/columns", { mediaType: { - previews: ["groot"] + previews: ["inertia"] } }], - listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], - listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], - listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], - listCommits: ["GET /repos/{owner}/{repo}/commits"], - listContributors: ["GET /repos/{owner}/{repo}/contributors"], - listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], - listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], - listDeployments: ["GET /repos/{owner}/{repo}/deployments"], - listForAuthenticatedUser: ["GET /user/repos"], - listForOrg: ["GET /orgs/{org}/repos"], - listForUser: ["GET /users/{username}/repos"], - listForks: ["GET /repos/{owner}/{repo}/forks"], - listInvitations: ["GET /repos/{owner}/{repo}/invitations"], - listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], - listLanguages: ["GET /repos/{owner}/{repo}/languages"], - listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], - listPublic: ["GET /repositories"], - listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", { + listForOrg: ["GET /orgs/{org}/projects", { mediaType: { - previews: ["groot"] + previews: ["inertia"] } }], - listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], - listReleases: ["GET /repos/{owner}/{repo}/releases"], - listTags: ["GET /repos/{owner}/{repo}/tags"], - listTeams: ["GET /repos/{owner}/{repo}/teams"], - listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], - merge: ["POST /repos/{owner}/{repo}/merges"], - pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], - removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" + listForRepo: ["GET /repos/{owner}/{repo}/projects", { + mediaType: { + previews: ["inertia"] + } }], - removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], - removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" + listForUser: ["GET /users/{username}/projects", { + mediaType: { + previews: ["inertia"] + } }], - removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" + moveCard: ["POST /projects/columns/cards/{card_id}/moves", { + mediaType: { + previews: ["inertia"] + } }], - removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" + moveColumn: ["POST /projects/columns/{column_id}/moves", { + mediaType: { + previews: ["inertia"] + } }], - replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics", { + removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}", { mediaType: { - previews: ["mercy"] + previews: ["inertia"] } }], - requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], - setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" + update: ["PATCH /projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } }], - setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" + updateCard: ["PATCH /projects/columns/cards/{card_id}", { + mediaType: { + previews: ["inertia"] + } }], - setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], - transfer: ["POST /repos/{owner}/{repo}/transfer"], - update: ["PATCH /repos/{owner}/{repo}"], - updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], - updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], - updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], - updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], - updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], - updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], - updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], - uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { - baseUrl: "https://uploads.github.com" + updateColumn: ["PATCH /projects/columns/{column_id}", { + mediaType: { + previews: ["inertia"] + } }] }, - search: { - code: ["GET /search/code"], - commits: ["GET /search/commits", { + pulls: { + checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + create: ["POST /repos/{owner}/{repo}/pulls"], + createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], + createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], + deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], + get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], + getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + list: ["GET /repos/{owner}/{repo}/pulls"], + listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], + listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], + listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], + listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], + listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], + listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], + update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], + updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", { mediaType: { - previews: ["cloak"] + previews: ["lydian"] } }], - issuesAndPullRequests: ["GET /search/issues"], - labels: ["GET /search/labels"], - repos: ["GET /search/repositories"], - topics: ["GET /search/topics"], - users: ["GET /search/users"] + updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] }, - teams: { - addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], - addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", { + rateLimit: { + get: ["GET /rate_limit"] + }, + reactions: { + createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", { mediaType: { - previews: ["inertia"] + previews: ["squirrel-girl"] } }], - addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", { + createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", { mediaType: { - previews: ["inertia"] + previews: ["squirrel-girl"] } }], - checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - create: ["POST /orgs/{org}/teams"], - createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], - createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], - deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], - getByName: ["GET /orgs/{org}/teams/{team_slug}"], - getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], - list: ["GET /orgs/{org}/teams"], - listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], - listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], - listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], - listForAuthenticatedUser: ["GET /user/teams"], - listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], - listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], - listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects", { + createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { mediaType: { - previews: ["inertia"] + previews: ["squirrel-girl"] } }], - listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], - removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], - removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] + createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }] }, - users: { - addEmailForAuthenticated: ["POST /user/emails"], - block: ["PUT /user/blocks/{username}"], - checkBlocked: ["GET /user/blocks/{username}"], - checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], - checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], - createGpgKeyForAuthenticated: ["POST /user/gpg_keys"], - createPublicSshKeyForAuthenticated: ["POST /user/keys"], - deleteEmailForAuthenticated: ["DELETE /user/emails"], - deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}"], - deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}"], - follow: ["PUT /user/following/{username}"], - getAuthenticated: ["GET /user"], - getByUsername: ["GET /users/{username}"], - getContextForUser: ["GET /users/{username}/hovercard"], - getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}"], - getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}"], - list: ["GET /users"], - listBlockedByAuthenticated: ["GET /user/blocks"], - listEmailsForAuthenticated: ["GET /user/emails"], - listFollowedByAuthenticated: ["GET /user/following"], - listFollowersForAuthenticatedUser: ["GET /user/followers"], - listFollowersForUser: ["GET /users/{username}/followers"], - listFollowingForUser: ["GET /users/{username}/following"], - listGpgKeysForAuthenticated: ["GET /user/gpg_keys"], - listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], - listPublicEmailsForAuthenticated: ["GET /user/public_emails"], - listPublicKeysForUser: ["GET /users/{username}/keys"], - listPublicSshKeysForAuthenticated: ["GET /user/keys"], - setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility"], - unblock: ["DELETE /user/blocks/{username}"], - unfollow: ["DELETE /user/following/{username}"], - updateAuthenticated: ["PATCH /user"] + repos: { + acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}"], + addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], + addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], + checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts", { + mediaType: { + previews: ["dorian"] + } + }], + compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], + createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }], + createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], + createDeployKey: ["POST /repos/{owner}/{repo}/keys"], + createDeployment: ["POST /repos/{owner}/{repo}/deployments"], + createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], + createForAuthenticatedUser: ["POST /user/repos"], + createFork: ["POST /repos/{owner}/{repo}/forks"], + createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createPagesSite: ["POST /repos/{owner}/{repo}/pages", { + mediaType: { + previews: ["switcheroo"] + } + }], + createRelease: ["POST /repos/{owner}/{repo}/releases"], + createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate", { + mediaType: { + previews: ["baptiste"] + } + }], + createWebhook: ["POST /repos/{owner}/{repo}/hooks"], + declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}"], + delete: ["DELETE /repos/{owner}/{repo}"], + deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], + deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], + deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], + deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }], + deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], + deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], + deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], + deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages", { + mediaType: { + previews: ["switcheroo"] + } + }], + deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], + deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], + deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], + disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes", { + mediaType: { + previews: ["london"] + } + }], + disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts", { + mediaType: { + previews: ["dorian"] + } + }], + downloadArchive: ["GET /repos/{owner}/{repo}/{archive_format}/{ref}"], + enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes", { + mediaType: { + previews: ["london"] + } + }], + enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts", { + mediaType: { + previews: ["dorian"] + } + }], + get: ["GET /repos/{owner}/{repo}"], + getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], + getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], + getAllTopics: ["GET /repos/{owner}/{repo}/topics", { + mediaType: { + previews: ["mercy"] + } + }], + getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], + getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], + getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], + getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], + getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], + getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], + getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], + getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], + getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], + getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], + getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }], + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], + getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], + getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], + getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], + getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], + getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], + getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getPages: ["GET /repos/{owner}/{repo}/pages"], + getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], + getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], + getReadme: ["GET /repos/{owner}/{repo}/readme"], + getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], + getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], + getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], + getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], + getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], + getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], + getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], + getViews: ["GET /repos/{owner}/{repo}/traffic/views"], + getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + listBranches: ["GET /repos/{owner}/{repo}/branches"], + listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", { + mediaType: { + previews: ["groot"] + } + }], + listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], + listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], + listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], + listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], + listCommits: ["GET /repos/{owner}/{repo}/commits"], + listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], + listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + listDeployments: ["GET /repos/{owner}/{repo}/deployments"], + listForAuthenticatedUser: ["GET /user/repos"], + listForOrg: ["GET /orgs/{org}/repos"], + listForUser: ["GET /users/{username}/repos"], + listForks: ["GET /repos/{owner}/{repo}/forks"], + listInvitations: ["GET /repos/{owner}/{repo}/invitations"], + listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], + listLanguages: ["GET /repos/{owner}/{repo}/languages"], + listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], + listPublic: ["GET /repositories"], + listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", { + mediaType: { + previews: ["groot"] + } + }], + listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], + listReleases: ["GET /repos/{owner}/{repo}/releases"], + listTags: ["GET /repos/{owner}/{repo}/tags"], + listTeams: ["GET /repos/{owner}/{repo}/teams"], + listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], + merge: ["POST /repos/{owner}/{repo}/merges"], + pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], + removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], + removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics", { + mediaType: { + previews: ["mercy"] + } + }], + requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], + setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], + transfer: ["POST /repos/{owner}/{repo}/transfer"], + update: ["PATCH /repos/{owner}/{repo}"], + updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], + updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], + updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], + updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], + updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], + updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], + uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { + baseUrl: "https://uploads.github.com" + }] + }, + search: { + code: ["GET /search/code"], + commits: ["GET /search/commits", { + mediaType: { + previews: ["cloak"] + } + }], + issuesAndPullRequests: ["GET /search/issues"], + labels: ["GET /search/labels"], + repos: ["GET /search/repositories"], + topics: ["GET /search/topics"], + users: ["GET /search/users"] + }, + teams: { + addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], + addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + create: ["POST /orgs/{org}/teams"], + createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], + deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], + getByName: ["GET /orgs/{org}/teams/{team_slug}"], + getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], + list: ["GET /orgs/{org}/teams"], + listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], + listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], + listForAuthenticatedUser: ["GET /user/teams"], + listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], + listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], + removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], + removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], + removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] + }, + users: { + addEmailForAuthenticated: ["POST /user/emails"], + block: ["PUT /user/blocks/{username}"], + checkBlocked: ["GET /user/blocks/{username}"], + checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], + checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], + createGpgKeyForAuthenticated: ["POST /user/gpg_keys"], + createPublicSshKeyForAuthenticated: ["POST /user/keys"], + deleteEmailForAuthenticated: ["DELETE /user/emails"], + deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}"], + follow: ["PUT /user/following/{username}"], + getAuthenticated: ["GET /user"], + getByUsername: ["GET /users/{username}"], + getContextForUser: ["GET /users/{username}/hovercard"], + getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}"], + list: ["GET /users"], + listBlockedByAuthenticated: ["GET /user/blocks"], + listEmailsForAuthenticated: ["GET /user/emails"], + listFollowedByAuthenticated: ["GET /user/following"], + listFollowersForAuthenticatedUser: ["GET /user/followers"], + listFollowersForUser: ["GET /users/{username}/followers"], + listFollowingForUser: ["GET /users/{username}/following"], + listGpgKeysForAuthenticated: ["GET /user/gpg_keys"], + listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], + listPublicEmailsForAuthenticated: ["GET /user/public_emails"], + listPublicKeysForUser: ["GET /users/{username}/keys"], + listPublicSshKeysForAuthenticated: ["GET /user/keys"], + setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility"], + unblock: ["DELETE /user/blocks/{username}"], + unfollow: ["DELETE /user/following/{username}"], + updateAuthenticated: ["PATCH /user"] + } +}; + +const VERSION = "4.0.0"; + +function endpointsToMethods(octokit, endpointsMap) { + const newMethods = {}; + + for (const [scope, endpoints] of Object.entries(endpointsMap)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign({ + method, + url + }, defaults); + + if (!newMethods[scope]) { + newMethods[scope] = {}; + } + + const scopeMethods = newMethods[scope]; + + if (decorations) { + scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); + continue; + } + + scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); + } + } + + return newMethods; +} + +function decorate(octokit, scope, methodName, defaults, decorations) { + const requestWithDefaults = octokit.request.defaults(defaults); + /* istanbul ignore next */ + + function withDecorations(...args) { + // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` + + if (decorations.mapToData) { + options = Object.assign({}, options, { + data: options[decorations.mapToData], + [decorations.mapToData]: undefined + }); + return requestWithDefaults(options); + } + + if (decorations.renamed) { + const [newScope, newMethodName] = decorations.renamed; + octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); + } + + if (decorations.deprecated) { + octokit.log.warn(decorations.deprecated); + } + + if (decorations.renamedParameters) { + // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + const options = requestWithDefaults.endpoint.merge(...args); + + for (const [name, alias] of Object.entries(decorations.renamedParameters)) { + if (name in options) { + octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); + + if (!(alias in options)) { + options[alias] = options[name]; + } + + delete options[name]; + } + } + + return requestWithDefaults(options); + } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + + + return requestWithDefaults(...args); + } + + return Object.assign(withDecorations, requestWithDefaults); +} + +/** + * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary + * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is + * done, we will remove the registerEndpoints methods and return the methods + * directly as with the other plugins. At that point we will also remove the + * legacy workarounds and deprecations. + * + * See the plan at + * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1 + */ + +function restEndpointMethods(octokit) { + return endpointsToMethods(octokit, Endpoints); +} +restEndpointMethods.VERSION = VERSION; + +exports.restEndpointMethods = restEndpointMethods; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 537: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var deprecation = __webpack_require__(8932); +var once = _interopDefault(__webpack_require__(1223)); + +const logOnce = once(deprecation => console.warn(deprecation)); +/** + * Error with extra properties to help with debugging + */ + +class RequestError extends Error { + constructor(message, statusCode, options) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = "HttpError"; + this.status = statusCode; + Object.defineProperty(this, "code", { + get() { + logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + return statusCode; + } + + }); + this.headers = options.headers || {}; // redact request credentials without mutating original request options + + const requestCopy = Object.assign({}, options.request); + + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") + }); + } + + requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit + // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications + .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended + // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header + .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; + } + +} + +exports.RequestError = RequestError; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 6234: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var endpoint = __webpack_require__(9440); +var universalUserAgent = __webpack_require__(5030); +var isPlainObject = _interopDefault(__webpack_require__(8840)); +var nodeFetch = _interopDefault(__webpack_require__(467)); +var requestError = __webpack_require__(537); + +const VERSION = "5.4.5"; + +function getBufferResponse(response) { + return response.arrayBuffer(); +} + +function fetchWrapper(requestOptions) { + if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } + + let headers = {}; + let status; + let url; + const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; + return fetch(requestOptions.url, Object.assign({ + method: requestOptions.method, + body: requestOptions.body, + headers: requestOptions.headers, + redirect: requestOptions.redirect + }, requestOptions.request)).then(response => { + url = response.url; + status = response.status; + + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; + } + + if (status === 204 || status === 205) { + return; + } // GitHub API returns 200 for HEAD requests + + + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; + } + + throw new requestError.RequestError(response.statusText, status, { + headers, + request: requestOptions + }); + } + + if (status === 304) { + throw new requestError.RequestError("Not modified", status, { + headers, + request: requestOptions + }); + } + + if (status >= 400) { + return response.text().then(message => { + const error = new requestError.RequestError(message, status, { + headers, + request: requestOptions + }); + + try { + let responseBody = JSON.parse(error.message); + Object.assign(error, responseBody); + let errors = responseBody.errors; // Assumption `errors` would always be in Array format + + error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); + } catch (e) {// ignore, see octokit/rest.js#684 + } + + throw error; + }); + } + + const contentType = response.headers.get("content-type"); + + if (/application\/json/.test(contentType)) { + return response.json(); + } + + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + + return getBufferResponse(response); + }).then(data => { + return { + status, + url, + headers, + data + }; + }).catch(error => { + if (error instanceof requestError.RequestError) { + throw error; + } + + throw new requestError.RequestError(error.message, 500, { + headers, + request: requestOptions + }); + }); +} + +function withDefaults(oldEndpoint, newDefaults) { + const endpoint = oldEndpoint.defaults(newDefaults); + + const newApi = function (route, parameters) { + const endpointOptions = endpoint.merge(route, parameters); + + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint.parse(endpointOptions)); + } + + const request = (route, parameters) => { + return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); + }; + + Object.assign(request, { + endpoint, + defaults: withDefaults.bind(null, endpoint) + }); + return endpointOptions.request.hook(request, endpointOptions); + }; + + return Object.assign(newApi, { + endpoint, + defaults: withDefaults.bind(null, endpoint) + }); +} + +const request = withDefaults(endpoint.endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` + } +}); + +exports.request = request; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 3682: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +var register = __webpack_require__(4670) +var addHook = __webpack_require__(5549) +var removeHook = __webpack_require__(6819) + +// bind with array of arguments: https://stackoverflow.com/a/21792913 +var bind = Function.bind +var bindable = bind.bind(bind) + +function bindApi (hook, state, name) { + var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state]) + hook.api = { remove: removeHookRef } + hook.remove = removeHookRef + + ;['before', 'error', 'after', 'wrap'].forEach(function (kind) { + var args = name ? [state, kind, name] : [state, kind] + hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args) + }) +} + +function HookSingular () { + var singularHookName = 'h' + var singularHookState = { + registry: {} + } + var singularHook = register.bind(null, singularHookState, singularHookName) + bindApi(singularHook, singularHookState, singularHookName) + return singularHook +} + +function HookCollection () { + var state = { + registry: {} + } + + var hook = register.bind(null, state) + bindApi(hook, state) + + return hook +} + +var collectionHookDeprecationMessageDisplayed = false +function Hook () { + if (!collectionHookDeprecationMessageDisplayed) { + console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4') + collectionHookDeprecationMessageDisplayed = true + } + return HookCollection() +} + +Hook.Singular = HookSingular.bind() +Hook.Collection = HookCollection.bind() + +module.exports = Hook +// expose constructors as a named property for TypeScript +module.exports.Hook = Hook +module.exports.Singular = Hook.Singular +module.exports.Collection = Hook.Collection + + +/***/ }), + +/***/ 5549: +/***/ ((module) => { + +module.exports = addHook + +function addHook (state, kind, name, hook) { + var orig = hook + if (!state.registry[name]) { + state.registry[name] = [] + } + + if (kind === 'before') { + hook = function (method, options) { + return Promise.resolve() + .then(orig.bind(null, options)) + .then(method.bind(null, options)) + } + } + + if (kind === 'after') { + hook = function (method, options) { + var result + return Promise.resolve() + .then(method.bind(null, options)) + .then(function (result_) { + result = result_ + return orig(result, options) + }) + .then(function () { + return result + }) + } + } + + if (kind === 'error') { + hook = function (method, options) { + return Promise.resolve() + .then(method.bind(null, options)) + .catch(function (error) { + return orig(error, options) + }) + } } -}; -const VERSION = "4.0.0"; + state.registry[name].push({ + hook: hook, + orig: orig + }) +} -function endpointsToMethods(octokit, endpointsMap) { - const newMethods = {}; - for (const [scope, endpoints] of Object.entries(endpointsMap)) { - for (const [methodName, endpoint] of Object.entries(endpoints)) { - const [route, defaults, decorations] = endpoint; - const [method, url] = route.split(/ /); - const endpointDefaults = Object.assign({ - method, - url - }, defaults); +/***/ }), - if (!newMethods[scope]) { - newMethods[scope] = {}; - } +/***/ 4670: +/***/ ((module) => { - const scopeMethods = newMethods[scope]; +module.exports = register - if (decorations) { - scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); - continue; +function register (state, name, method, options) { + if (typeof method !== 'function') { + throw new Error('method for before hook must be a function') + } + + if (!options) { + options = {} + } + + if (Array.isArray(name)) { + return name.reverse().reduce(function (callback, name) { + return register.bind(null, state, name, callback, options) + }, method)() + } + + return Promise.resolve() + .then(function () { + if (!state.registry[name]) { + return method(options) } - scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); - } + return (state.registry[name]).reduce(function (method, registered) { + return registered.hook.bind(null, method, options) + }, method)() + }) +} + + +/***/ }), + +/***/ 6819: +/***/ ((module) => { + +module.exports = removeHook + +function removeHook (state, name, method) { + if (!state.registry[name]) { + return } - return newMethods; + var index = state.registry[name] + .map(function (registered) { return registered.orig }) + .indexOf(method) + + if (index === -1) { + return + } + + state.registry[name].splice(index, 1) } -function decorate(octokit, scope, methodName, defaults, decorations) { - const requestWithDefaults = octokit.request.defaults(defaults); - /* istanbul ignore next */ - function withDecorations(...args) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` +/***/ }), - if (decorations.mapToData) { - options = Object.assign({}, options, { - data: options[decorations.mapToData], - [decorations.mapToData]: undefined - }); - return requestWithDefaults(options); +/***/ 2746: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +"use strict"; + + +const cp = __webpack_require__(3129); +const parse = __webpack_require__(6855); +const enoent = __webpack_require__(4101); + +function spawn(command, args, options) { + // Parse the arguments + const parsed = parse(command, args, options); + + // Spawn the child process + const spawned = cp.spawn(parsed.command, parsed.args, parsed.options); + + // Hook into child process "exit" event to emit an error if the command + // does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 + enoent.hookChildProcess(spawned, parsed); + + return spawned; +} + +function spawnSync(command, args, options) { + // Parse the arguments + const parsed = parse(command, args, options); + + // Spawn the child process + const result = cp.spawnSync(parsed.command, parsed.args, parsed.options); + + // Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 + result.error = result.error || enoent.verifyENOENTSync(result.status, parsed); + + return result; +} + +module.exports = spawn; +module.exports.spawn = spawn; +module.exports.sync = spawnSync; + +module.exports._parse = parse; +module.exports._enoent = enoent; + + +/***/ }), + +/***/ 4101: +/***/ ((module) => { + +"use strict"; + + +const isWin = process.platform === 'win32'; + +function notFoundError(original, syscall) { + return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), { + code: 'ENOENT', + errno: 'ENOENT', + syscall: `${syscall} ${original.command}`, + path: original.command, + spawnargs: original.args, + }); +} + +function hookChildProcess(cp, parsed) { + if (!isWin) { + return; + } + + const originalEmit = cp.emit; + + cp.emit = function (name, arg1) { + // If emitting "exit" event and exit code is 1, we need to check if + // the command exists and emit an "error" instead + // See https://github.com/IndigoUnited/node-cross-spawn/issues/16 + if (name === 'exit') { + const err = verifyENOENT(arg1, parsed, 'spawn'); + + if (err) { + return originalEmit.call(cp, 'error', err); + } + } + + return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params + }; +} + +function verifyENOENT(status, parsed) { + if (isWin && status === 1 && !parsed.file) { + return notFoundError(parsed.original, 'spawn'); + } + + return null; +} + +function verifyENOENTSync(status, parsed) { + if (isWin && status === 1 && !parsed.file) { + return notFoundError(parsed.original, 'spawnSync'); + } + + return null; +} + +module.exports = { + hookChildProcess, + verifyENOENT, + verifyENOENTSync, + notFoundError, +}; + + +/***/ }), + +/***/ 6855: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +"use strict"; + + +const path = __webpack_require__(5622); +const niceTry = __webpack_require__(8560); +const resolveCommand = __webpack_require__(7274); +const escape = __webpack_require__(4274); +const readShebang = __webpack_require__(1252); +const semver = __webpack_require__(5911); + +const isWin = process.platform === 'win32'; +const isExecutableRegExp = /\.(?:com|exe)$/i; +const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i; + +// `options.shell` is supported in Node ^4.8.0, ^5.7.0 and >= 6.0.0 +const supportsShellOption = niceTry(() => semver.satisfies(process.version, '^4.8.0 || ^5.7.0 || >= 6.0.0', true)) || false; + +function detectShebang(parsed) { + parsed.file = resolveCommand(parsed); + + const shebang = parsed.file && readShebang(parsed.file); + + if (shebang) { + parsed.args.unshift(parsed.file); + parsed.command = shebang; + + return resolveCommand(parsed); + } + + return parsed.file; +} + +function parseNonShell(parsed) { + if (!isWin) { + return parsed; } - if (decorations.renamed) { - const [newScope, newMethodName] = decorations.renamed; - octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); - } + // Detect & add support for shebangs + const commandFile = detectShebang(parsed); + + // We don't need a shell if the command filename is an executable + const needsShell = !isExecutableRegExp.test(commandFile); + + // If a shell is required, use cmd.exe and take care of escaping everything correctly + // Note that `forceShell` is an hidden option used only in tests + if (parsed.options.forceShell || needsShell) { + // Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/` + // The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument + // Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called, + // we need to double escape them + const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile); + + // Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar) + // This is necessary otherwise it will always fail with ENOENT in those cases + parsed.command = path.normalize(parsed.command); - if (decorations.deprecated) { - octokit.log.warn(decorations.deprecated); + // Escape command & arguments + parsed.command = escape.command(parsed.command); + parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars)); + + const shellCommand = [parsed.command].concat(parsed.args).join(' '); + + parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; + parsed.command = process.env.comspec || 'cmd.exe'; + parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped } - if (decorations.renamedParameters) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - const options = requestWithDefaults.endpoint.merge(...args); + return parsed; +} - for (const [name, alias] of Object.entries(decorations.renamedParameters)) { - if (name in options) { - octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); +function parseShell(parsed) { + // If node supports the shell option, there's no need to mimic its behavior + if (supportsShellOption) { + return parsed; + } - if (!(alias in options)) { - options[alias] = options[name]; - } + // Mimic node shell option + // See https://github.com/nodejs/node/blob/b9f6a2dc059a1062776133f3d4fd848c4da7d150/lib/child_process.js#L335 + const shellCommand = [parsed.command].concat(parsed.args).join(' '); - delete options[name]; + if (isWin) { + parsed.command = typeof parsed.options.shell === 'string' ? parsed.options.shell : process.env.comspec || 'cmd.exe'; + parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; + parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped + } else { + if (typeof parsed.options.shell === 'string') { + parsed.command = parsed.options.shell; + } else if (process.platform === 'android') { + parsed.command = '/system/bin/sh'; + } else { + parsed.command = '/bin/sh'; } - } - return requestWithDefaults(options); - } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + parsed.args = ['-c', shellCommand]; + } + return parsed; +} - return requestWithDefaults(...args); - } +function parse(command, args, options) { + // Normalize arguments, similar to nodejs + if (args && !Array.isArray(args)) { + options = args; + args = null; + } - return Object.assign(withDecorations, requestWithDefaults); -} + args = args ? args.slice(0) : []; // Clone array to avoid changing the original + options = Object.assign({}, options); // Clone object to avoid changing the original -/** - * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary - * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is - * done, we will remove the registerEndpoints methods and return the methods - * directly as with the other plugins. At that point we will also remove the - * legacy workarounds and deprecations. - * - * See the plan at - * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1 - */ + // Build our parsed object + const parsed = { + command, + args, + options, + file: undefined, + original: { + command, + args, + }, + }; -function restEndpointMethods(octokit) { - return endpointsToMethods(octokit, Endpoints); + // Delegate further parsing to shell or non-shell + return options.shell ? parseShell(parsed) : parseNonShell(parsed); } -restEndpointMethods.VERSION = VERSION; -exports.restEndpointMethods = restEndpointMethods; -//# sourceMappingURL=index.js.map +module.exports = parse; /***/ }), -/***/ 537: -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { +/***/ 4274: +/***/ ((module) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); +// See http://www.robvanderwoude.com/escapechars.php +const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g; -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } +function escapeCommand(arg) { + // Escape meta chars + arg = arg.replace(metaCharsRegExp, '^$1'); -var deprecation = __webpack_require__(8932); -var once = _interopDefault(__webpack_require__(1223)); + return arg; +} -const logOnce = once(deprecation => console.warn(deprecation)); -/** - * Error with extra properties to help with debugging - */ +function escapeArgument(arg, doubleEscapeMetaChars) { + // Convert to string + arg = `${arg}`; -class RequestError extends Error { - constructor(message, statusCode, options) { - super(message); // Maintains proper stack trace (only available on V8) + // Algorithm below is based on https://qntm.org/cmd - /* istanbul ignore next */ + // Sequence of backslashes followed by a double quote: + // double up all the backslashes and escape the double quote + arg = arg.replace(/(\\*)"/g, '$1$1\\"'); - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } + // Sequence of backslashes followed by the end of the string + // (which will become a double quote later): + // double up all the backslashes + arg = arg.replace(/(\\*)$/, '$1$1'); - this.name = "HttpError"; - this.status = statusCode; - Object.defineProperty(this, "code", { - get() { - logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); - return statusCode; - } + // All other backslashes occur literally - }); - this.headers = options.headers || {}; // redact request credentials without mutating original request options + // Quote the whole thing: + arg = `"${arg}"`; - const requestCopy = Object.assign({}, options.request); + // Escape meta chars + arg = arg.replace(metaCharsRegExp, '^$1'); - if (options.request.headers.authorization) { - requestCopy.headers = Object.assign({}, options.request.headers, { - authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") - }); + // Double escape meta chars if necessary + if (doubleEscapeMetaChars) { + arg = arg.replace(metaCharsRegExp, '^$1'); } - requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit - // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications - .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended - // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header - .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); - this.request = requestCopy; - } - + return arg; } -exports.RequestError = RequestError; -//# sourceMappingURL=index.js.map +module.exports.command = escapeCommand; +module.exports.argument = escapeArgument; /***/ }), -/***/ 6234: -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { +/***/ 1252: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var endpoint = __webpack_require__(9440); -var universalUserAgent = __webpack_require__(5030); -var isPlainObject = _interopDefault(__webpack_require__(8840)); -var nodeFetch = _interopDefault(__webpack_require__(467)); -var requestError = __webpack_require__(537); - -const VERSION = "5.4.5"; - -function getBufferResponse(response) { - return response.arrayBuffer(); -} - -function fetchWrapper(requestOptions) { - if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { - requestOptions.body = JSON.stringify(requestOptions.body); - } - - let headers = {}; - let status; - let url; - const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; - return fetch(requestOptions.url, Object.assign({ - method: requestOptions.method, - body: requestOptions.body, - headers: requestOptions.headers, - redirect: requestOptions.redirect - }, requestOptions.request)).then(response => { - url = response.url; - status = response.status; - - for (const keyAndValue of response.headers) { - headers[keyAndValue[0]] = keyAndValue[1]; - } - - if (status === 204 || status === 205) { - return; - } // GitHub API returns 200 for HEAD requests - - - if (requestOptions.method === "HEAD") { - if (status < 400) { - return; - } +const fs = __webpack_require__(5747); +const shebangCommand = __webpack_require__(7032); - throw new requestError.RequestError(response.statusText, status, { - headers, - request: requestOptions - }); - } +function readShebang(command) { + // Read the first 150 bytes from the file + const size = 150; + let buffer; - if (status === 304) { - throw new requestError.RequestError("Not modified", status, { - headers, - request: requestOptions - }); + if (Buffer.alloc) { + // Node.js v4.5+ / v5.10+ + buffer = Buffer.alloc(size); + } else { + // Old Node.js API + buffer = new Buffer(size); + buffer.fill(0); // zero-fill } - if (status >= 400) { - return response.text().then(message => { - const error = new requestError.RequestError(message, status, { - headers, - request: requestOptions - }); - - try { - let responseBody = JSON.parse(error.message); - Object.assign(error, responseBody); - let errors = responseBody.errors; // Assumption `errors` would always be in Array format - - error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); - } catch (e) {// ignore, see octokit/rest.js#684 - } + let fd; - throw error; - }); - } + try { + fd = fs.openSync(command, 'r'); + fs.readSync(fd, buffer, 0, size, 0); + fs.closeSync(fd); + } catch (e) { /* Empty */ } - const contentType = response.headers.get("content-type"); + // Attempt to extract shebang (null is returned if not a shebang) + return shebangCommand(buffer.toString()); +} - if (/application\/json/.test(contentType)) { - return response.json(); - } +module.exports = readShebang; - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { - return response.text(); - } - return getBufferResponse(response); - }).then(data => { - return { - status, - url, - headers, - data - }; - }).catch(error => { - if (error instanceof requestError.RequestError) { - throw error; - } +/***/ }), - throw new requestError.RequestError(error.message, 500, { - headers, - request: requestOptions - }); - }); -} +/***/ 7274: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { -function withDefaults(oldEndpoint, newDefaults) { - const endpoint = oldEndpoint.defaults(newDefaults); +"use strict"; - const newApi = function (route, parameters) { - const endpointOptions = endpoint.merge(route, parameters); - if (!endpointOptions.request || !endpointOptions.request.hook) { - return fetchWrapper(endpoint.parse(endpointOptions)); +const path = __webpack_require__(5622); +const which = __webpack_require__(4207); +const pathKey = __webpack_require__(539)(); + +function resolveCommandAttempt(parsed, withoutPathExt) { + const cwd = process.cwd(); + const hasCustomCwd = parsed.options.cwd != null; + + // If a custom `cwd` was specified, we need to change the process cwd + // because `which` will do stat calls but does not support a custom cwd + if (hasCustomCwd) { + try { + process.chdir(parsed.options.cwd); + } catch (err) { + /* Empty */ + } } - const request = (route, parameters) => { - return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); - }; + let resolved; - Object.assign(request, { - endpoint, - defaults: withDefaults.bind(null, endpoint) - }); - return endpointOptions.request.hook(request, endpointOptions); - }; + try { + resolved = which.sync(parsed.command, { + path: (parsed.options.env || process.env)[pathKey], + pathExt: withoutPathExt ? path.delimiter : undefined, + }); + } catch (e) { + /* Empty */ + } finally { + process.chdir(cwd); + } - return Object.assign(newApi, { - endpoint, - defaults: withDefaults.bind(null, endpoint) - }); + // If we successfully resolved, ensure that an absolute path is returned + // Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it + if (resolved) { + resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved); + } + + return resolved; } -const request = withDefaults(endpoint.endpoint, { - headers: { - "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` - } -}); +function resolveCommand(parsed) { + return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true); +} -exports.request = request; -//# sourceMappingURL=index.js.map +module.exports = resolveCommand; /***/ }), -/***/ 3682: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { +/***/ 8932: +/***/ ((__unused_webpack_module, exports) => { -var register = __webpack_require__(4670) -var addHook = __webpack_require__(5549) -var removeHook = __webpack_require__(6819) +"use strict"; -// bind with array of arguments: https://stackoverflow.com/a/21792913 -var bind = Function.bind -var bindable = bind.bind(bind) -function bindApi (hook, state, name) { - var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state]) - hook.api = { remove: removeHookRef } - hook.remove = removeHookRef +Object.defineProperty(exports, "__esModule", ({ value: true })); - ;['before', 'error', 'after', 'wrap'].forEach(function (kind) { - var args = name ? [state, kind, name] : [state, kind] - hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args) - }) -} +class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) -function HookSingular () { - var singularHookName = 'h' - var singularHookState = { - registry: {} - } - var singularHook = register.bind(null, singularHookState, singularHookName) - bindApi(singularHook, singularHookState, singularHookName) - return singularHook -} + /* istanbul ignore next */ -function HookCollection () { - var state = { - registry: {} - } + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } - var hook = register.bind(null, state) - bindApi(hook, state) + this.name = 'Deprecation'; + } - return hook } -var collectionHookDeprecationMessageDisplayed = false -function Hook () { - if (!collectionHookDeprecationMessageDisplayed) { - console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4') - collectionHookDeprecationMessageDisplayed = true - } - return HookCollection() -} +exports.Deprecation = Deprecation; -Hook.Singular = HookSingular.bind() -Hook.Collection = HookCollection.bind() -module.exports = Hook -// expose constructors as a named property for TypeScript -module.exports.Hook = Hook -module.exports.Singular = Hook.Singular -module.exports.Collection = Hook.Collection +/***/ }), +/***/ 1205: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { -/***/ }), +var once = __webpack_require__(1223); -/***/ 5549: -/***/ ((module) => { +var noop = function() {}; -module.exports = addHook +var isRequest = function(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +}; -function addHook (state, kind, name, hook) { - var orig = hook - if (!state.registry[name]) { - state.registry[name] = [] - } +var isChildProcess = function(stream) { + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 +}; - if (kind === 'before') { - hook = function (method, options) { - return Promise.resolve() - .then(orig.bind(null, options)) - .then(method.bind(null, options)) - } - } +var eos = function(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; - if (kind === 'after') { - hook = function (method, options) { - var result - return Promise.resolve() - .then(method.bind(null, options)) - .then(function (result_) { - result = result_ - return orig(result, options) - }) - .then(function () { - return result - }) - } - } + callback = once(callback || noop); - if (kind === 'error') { - hook = function (method, options) { - return Promise.resolve() - .then(method.bind(null, options)) - .catch(function (error) { - return orig(error, options) - }) - } - } + var ws = stream._writableState; + var rs = stream._readableState; + var readable = opts.readable || (opts.readable !== false && stream.readable); + var writable = opts.writable || (opts.writable !== false && stream.writable); + var cancelled = false; - state.registry[name].push({ - hook: hook, - orig: orig - }) -} + var onlegacyfinish = function() { + if (!stream.writable) onfinish(); + }; + + var onfinish = function() { + writable = false; + if (!readable) callback.call(stream); + }; + var onend = function() { + readable = false; + if (!writable) callback.call(stream); + }; -/***/ }), + var onexit = function(exitCode) { + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); + }; -/***/ 4670: -/***/ ((module) => { + var onerror = function(err) { + callback.call(stream, err); + }; -module.exports = register + var onclose = function() { + process.nextTick(onclosenexttick); + }; -function register (state, name, method, options) { - if (typeof method !== 'function') { - throw new Error('method for before hook must be a function') - } + var onclosenexttick = function() { + if (cancelled) return; + if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); + if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); + }; - if (!options) { - options = {} - } + var onrequest = function() { + stream.req.on('finish', onfinish); + }; - if (Array.isArray(name)) { - return name.reverse().reduce(function (callback, name) { - return register.bind(null, state, name, callback, options) - }, method)() - } + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest(); + else stream.on('request', onrequest); + } else if (writable && !ws) { // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } - return Promise.resolve() - .then(function () { - if (!state.registry[name]) { - return method(options) - } + if (isChildProcess(stream)) stream.on('exit', onexit); - return (state.registry[name]).reduce(function (method, registered) { - return registered.hook.bind(null, method, options) - }, method)() - }) -} + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function() { + cancelled = true; + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('exit', onexit); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +}; -/***/ }), +module.exports = eos; -/***/ 6819: -/***/ ((module) => { -module.exports = removeHook +/***/ }), -function removeHook (state, name, method) { - if (!state.registry[name]) { - return - } +/***/ 5447: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { - var index = state.registry[name] - .map(function (registered) { return registered.orig }) - .indexOf(method) +"use strict"; - if (index === -1) { - return - } +const path = __webpack_require__(5622); +const childProcess = __webpack_require__(3129); +const crossSpawn = __webpack_require__(2746); +const stripEof = __webpack_require__(5515); +const npmRunPath = __webpack_require__(502); +const isStream = __webpack_require__(1554); +const _getStream = __webpack_require__(1766); +const pFinally = __webpack_require__(1330); +const onExit = __webpack_require__(4931); +const errname = __webpack_require__(4689); +const stdio = __webpack_require__(166); - state.registry[name].splice(index, 1) -} +const TEN_MEGABYTES = 1000 * 1000 * 10; +function handleArgs(cmd, args, opts) { + let parsed; -/***/ }), + opts = Object.assign({ + extendEnv: true, + env: {} + }, opts); -/***/ 2746: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + if (opts.extendEnv) { + opts.env = Object.assign({}, process.env, opts.env); + } -"use strict"; + if (opts.__winShell === true) { + delete opts.__winShell; + parsed = { + command: cmd, + args, + options: opts, + file: cmd, + original: { + cmd, + args + } + }; + } else { + parsed = crossSpawn._parse(cmd, args, opts); + } + opts = Object.assign({ + maxBuffer: TEN_MEGABYTES, + buffer: true, + stripEof: true, + preferLocal: true, + localDir: parsed.options.cwd || process.cwd(), + encoding: 'utf8', + reject: true, + cleanup: true + }, parsed.options); -const cp = __webpack_require__(3129); -const parse = __webpack_require__(6855); -const enoent = __webpack_require__(4101); + opts.stdio = stdio(opts); -function spawn(command, args, options) { - // Parse the arguments - const parsed = parse(command, args, options); + if (opts.preferLocal) { + opts.env = npmRunPath.env(Object.assign({}, opts, {cwd: opts.localDir})); + } - // Spawn the child process - const spawned = cp.spawn(parsed.command, parsed.args, parsed.options); + if (opts.detached) { + // #115 + opts.cleanup = false; + } - // Hook into child process "exit" event to emit an error if the command - // does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 - enoent.hookChildProcess(spawned, parsed); + if (process.platform === 'win32' && path.basename(parsed.command) === 'cmd.exe') { + // #116 + parsed.args.unshift('/q'); + } - return spawned; + return { + cmd: parsed.command, + args: parsed.args, + opts, + parsed + }; } -function spawnSync(command, args, options) { - // Parse the arguments - const parsed = parse(command, args, options); +function handleInput(spawned, input) { + if (input === null || input === undefined) { + return; + } - // Spawn the child process - const result = cp.spawnSync(parsed.command, parsed.args, parsed.options); + if (isStream(input)) { + input.pipe(spawned.stdin); + } else { + spawned.stdin.end(input); + } +} - // Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 - result.error = result.error || enoent.verifyENOENTSync(result.status, parsed); +function handleOutput(opts, val) { + if (val && opts.stripEof) { + val = stripEof(val); + } - return result; + return val; } -module.exports = spawn; -module.exports.spawn = spawn; -module.exports.sync = spawnSync; +function handleShell(fn, cmd, opts) { + let file = '/bin/sh'; + let args = ['-c', cmd]; -module.exports._parse = parse; -module.exports._enoent = enoent; + opts = Object.assign({}, opts); + if (process.platform === 'win32') { + opts.__winShell = true; + file = process.env.comspec || 'cmd.exe'; + args = ['/s', '/c', `"${cmd}"`]; + opts.windowsVerbatimArguments = true; + } -/***/ }), + if (opts.shell) { + file = opts.shell; + delete opts.shell; + } -/***/ 4101: -/***/ ((module) => { + return fn(file, args, opts); +} -"use strict"; +function getStream(process, stream, {encoding, buffer, maxBuffer}) { + if (!process[stream]) { + return null; + } + let ret; -const isWin = process.platform === 'win32'; + if (!buffer) { + // TODO: Use `ret = util.promisify(stream.finished)(process[stream]);` when targeting Node.js 10 + ret = new Promise((resolve, reject) => { + process[stream] + .once('end', resolve) + .once('error', reject); + }); + } else if (encoding) { + ret = _getStream(process[stream], { + encoding, + maxBuffer + }); + } else { + ret = _getStream.buffer(process[stream], {maxBuffer}); + } -function notFoundError(original, syscall) { - return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), { - code: 'ENOENT', - errno: 'ENOENT', - syscall: `${syscall} ${original.command}`, - path: original.command, - spawnargs: original.args, - }); + return ret.catch(err => { + err.stream = stream; + err.message = `${stream} ${err.message}`; + throw err; + }); } -function hookChildProcess(cp, parsed) { - if (!isWin) { - return; - } +function makeError(result, options) { + const {stdout, stderr} = result; + + let err = result.error; + const {code, signal} = result; + + const {parsed, joinedCmd} = options; + const timedOut = options.timedOut || false; - const originalEmit = cp.emit; + if (!err) { + let output = ''; - cp.emit = function (name, arg1) { - // If emitting "exit" event and exit code is 1, we need to check if - // the command exists and emit an "error" instead - // See https://github.com/IndigoUnited/node-cross-spawn/issues/16 - if (name === 'exit') { - const err = verifyENOENT(arg1, parsed, 'spawn'); + if (Array.isArray(parsed.opts.stdio)) { + if (parsed.opts.stdio[2] !== 'inherit') { + output += output.length > 0 ? stderr : `\n${stderr}`; + } - if (err) { - return originalEmit.call(cp, 'error', err); - } - } + if (parsed.opts.stdio[1] !== 'inherit') { + output += `\n${stdout}`; + } + } else if (parsed.opts.stdio !== 'inherit') { + output = `\n${stderr}${stdout}`; + } - return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params - }; -} + err = new Error(`Command failed: ${joinedCmd}${output}`); + err.code = code < 0 ? errname(code) : code; + } -function verifyENOENT(status, parsed) { - if (isWin && status === 1 && !parsed.file) { - return notFoundError(parsed.original, 'spawn'); - } + err.stdout = stdout; + err.stderr = stderr; + err.failed = true; + err.signal = signal || null; + err.cmd = joinedCmd; + err.timedOut = timedOut; - return null; + return err; } -function verifyENOENTSync(status, parsed) { - if (isWin && status === 1 && !parsed.file) { - return notFoundError(parsed.original, 'spawnSync'); - } +function joinCmd(cmd, args) { + let joinedCmd = cmd; - return null; -} + if (Array.isArray(args) && args.length > 0) { + joinedCmd += ' ' + args.join(' '); + } -module.exports = { - hookChildProcess, - verifyENOENT, - verifyENOENTSync, - notFoundError, -}; + return joinedCmd; +} +module.exports = (cmd, args, opts) => { + const parsed = handleArgs(cmd, args, opts); + const {encoding, buffer, maxBuffer} = parsed.opts; + const joinedCmd = joinCmd(cmd, args); -/***/ }), + let spawned; + try { + spawned = childProcess.spawn(parsed.cmd, parsed.args, parsed.opts); + } catch (err) { + return Promise.reject(err); + } -/***/ 6855: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + let removeExitHandler; + if (parsed.opts.cleanup) { + removeExitHandler = onExit(() => { + spawned.kill(); + }); + } -"use strict"; + let timeoutId = null; + let timedOut = false; + const cleanup = () => { + if (timeoutId) { + clearTimeout(timeoutId); + timeoutId = null; + } -const path = __webpack_require__(5622); -const niceTry = __webpack_require__(8560); -const resolveCommand = __webpack_require__(7274); -const escape = __webpack_require__(4274); -const readShebang = __webpack_require__(1252); -const semver = __webpack_require__(5911); + if (removeExitHandler) { + removeExitHandler(); + } + }; -const isWin = process.platform === 'win32'; -const isExecutableRegExp = /\.(?:com|exe)$/i; -const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i; + if (parsed.opts.timeout > 0) { + timeoutId = setTimeout(() => { + timeoutId = null; + timedOut = true; + spawned.kill(parsed.opts.killSignal); + }, parsed.opts.timeout); + } -// `options.shell` is supported in Node ^4.8.0, ^5.7.0 and >= 6.0.0 -const supportsShellOption = niceTry(() => semver.satisfies(process.version, '^4.8.0 || ^5.7.0 || >= 6.0.0', true)) || false; + const processDone = new Promise(resolve => { + spawned.on('exit', (code, signal) => { + cleanup(); + resolve({code, signal}); + }); -function detectShebang(parsed) { - parsed.file = resolveCommand(parsed); + spawned.on('error', err => { + cleanup(); + resolve({error: err}); + }); - const shebang = parsed.file && readShebang(parsed.file); + if (spawned.stdin) { + spawned.stdin.on('error', err => { + cleanup(); + resolve({error: err}); + }); + } + }); - if (shebang) { - parsed.args.unshift(parsed.file); - parsed.command = shebang; + function destroy() { + if (spawned.stdout) { + spawned.stdout.destroy(); + } - return resolveCommand(parsed); - } + if (spawned.stderr) { + spawned.stderr.destroy(); + } + } - return parsed.file; -} + const handlePromise = () => pFinally(Promise.all([ + processDone, + getStream(spawned, 'stdout', {encoding, buffer, maxBuffer}), + getStream(spawned, 'stderr', {encoding, buffer, maxBuffer}) + ]).then(arr => { + const result = arr[0]; + result.stdout = arr[1]; + result.stderr = arr[2]; -function parseNonShell(parsed) { - if (!isWin) { - return parsed; - } + if (result.error || result.code !== 0 || result.signal !== null) { + const err = makeError(result, { + joinedCmd, + parsed, + timedOut + }); - // Detect & add support for shebangs - const commandFile = detectShebang(parsed); + // TODO: missing some timeout logic for killed + // https://github.com/nodejs/node/blob/master/lib/child_process.js#L203 + // err.killed = spawned.killed || killed; + err.killed = err.killed || spawned.killed; - // We don't need a shell if the command filename is an executable - const needsShell = !isExecutableRegExp.test(commandFile); + if (!parsed.opts.reject) { + return err; + } - // If a shell is required, use cmd.exe and take care of escaping everything correctly - // Note that `forceShell` is an hidden option used only in tests - if (parsed.options.forceShell || needsShell) { - // Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/` - // The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument - // Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called, - // we need to double escape them - const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile); + throw err; + } - // Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar) - // This is necessary otherwise it will always fail with ENOENT in those cases - parsed.command = path.normalize(parsed.command); + return { + stdout: handleOutput(parsed.opts, result.stdout), + stderr: handleOutput(parsed.opts, result.stderr), + code: 0, + failed: false, + killed: false, + signal: null, + cmd: joinedCmd, + timedOut: false + }; + }), destroy); - // Escape command & arguments - parsed.command = escape.command(parsed.command); - parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars)); + crossSpawn._enoent.hookChildProcess(spawned, parsed.parsed); - const shellCommand = [parsed.command].concat(parsed.args).join(' '); + handleInput(spawned, parsed.opts.input); - parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; - parsed.command = process.env.comspec || 'cmd.exe'; - parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped - } + spawned.then = (onfulfilled, onrejected) => handlePromise().then(onfulfilled, onrejected); + spawned.catch = onrejected => handlePromise().catch(onrejected); - return parsed; -} + return spawned; +}; -function parseShell(parsed) { - // If node supports the shell option, there's no need to mimic its behavior - if (supportsShellOption) { - return parsed; - } +// TODO: set `stderr: 'ignore'` when that option is implemented +module.exports.stdout = (...args) => module.exports(...args).then(x => x.stdout); - // Mimic node shell option - // See https://github.com/nodejs/node/blob/b9f6a2dc059a1062776133f3d4fd848c4da7d150/lib/child_process.js#L335 - const shellCommand = [parsed.command].concat(parsed.args).join(' '); +// TODO: set `stdout: 'ignore'` when that option is implemented +module.exports.stderr = (...args) => module.exports(...args).then(x => x.stderr); - if (isWin) { - parsed.command = typeof parsed.options.shell === 'string' ? parsed.options.shell : process.env.comspec || 'cmd.exe'; - parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; - parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped - } else { - if (typeof parsed.options.shell === 'string') { - parsed.command = parsed.options.shell; - } else if (process.platform === 'android') { - parsed.command = '/system/bin/sh'; - } else { - parsed.command = '/bin/sh'; - } +module.exports.shell = (cmd, opts) => handleShell(module.exports, cmd, opts); - parsed.args = ['-c', shellCommand]; - } +module.exports.sync = (cmd, args, opts) => { + const parsed = handleArgs(cmd, args, opts); + const joinedCmd = joinCmd(cmd, args); - return parsed; -} + if (isStream(parsed.opts.input)) { + throw new TypeError('The `input` option cannot be a stream in sync mode'); + } -function parse(command, args, options) { - // Normalize arguments, similar to nodejs - if (args && !Array.isArray(args)) { - options = args; - args = null; - } + const result = childProcess.spawnSync(parsed.cmd, parsed.args, parsed.opts); + result.code = result.status; - args = args ? args.slice(0) : []; // Clone array to avoid changing the original - options = Object.assign({}, options); // Clone object to avoid changing the original + if (result.error || result.status !== 0 || result.signal !== null) { + const err = makeError(result, { + joinedCmd, + parsed + }); - // Build our parsed object - const parsed = { - command, - args, - options, - file: undefined, - original: { - command, - args, - }, - }; + if (!parsed.opts.reject) { + return err; + } - // Delegate further parsing to shell or non-shell - return options.shell ? parseShell(parsed) : parseNonShell(parsed); -} + throw err; + } -module.exports = parse; + return { + stdout: handleOutput(parsed.opts, result.stdout), + stderr: handleOutput(parsed.opts, result.stderr), + code: 0, + failed: false, + signal: null, + cmd: joinedCmd, + timedOut: false + }; +}; + +module.exports.shellSync = (cmd, opts) => handleShell(module.exports.sync, cmd, opts); /***/ }), -/***/ 4274: -/***/ ((module) => { +/***/ 4689: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { "use strict"; +// Older verions of Node.js might not have `util.getSystemErrorName()`. +// In that case, fall back to a deprecated internal. +const util = __webpack_require__(1669); -// See http://www.robvanderwoude.com/escapechars.php -const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g; +let uv; -function escapeCommand(arg) { - // Escape meta chars - arg = arg.replace(metaCharsRegExp, '^$1'); +if (typeof util.getSystemErrorName === 'function') { + module.exports = util.getSystemErrorName; +} else { + try { + uv = process.binding('uv'); - return arg; -} + if (typeof uv.errname !== 'function') { + throw new TypeError('uv.errname is not a function'); + } + } catch (err) { + console.error('execa/lib/errname: unable to establish process.binding(\'uv\')', err); + uv = null; + } -function escapeArgument(arg, doubleEscapeMetaChars) { - // Convert to string - arg = `${arg}`; + module.exports = code => errname(uv, code); +} - // Algorithm below is based on https://qntm.org/cmd +// Used for testing the fallback behavior +module.exports.__test__ = errname; - // Sequence of backslashes followed by a double quote: - // double up all the backslashes and escape the double quote - arg = arg.replace(/(\\*)"/g, '$1$1\\"'); +function errname(uv, code) { + if (uv) { + return uv.errname(code); + } - // Sequence of backslashes followed by the end of the string - // (which will become a double quote later): - // double up all the backslashes - arg = arg.replace(/(\\*)$/, '$1$1'); + if (!(code < 0)) { + throw new Error('err >= 0'); + } - // All other backslashes occur literally + return `Unknown system error ${code}`; +} - // Quote the whole thing: - arg = `"${arg}"`; - // Escape meta chars - arg = arg.replace(metaCharsRegExp, '^$1'); - // Double escape meta chars if necessary - if (doubleEscapeMetaChars) { - arg = arg.replace(metaCharsRegExp, '^$1'); - } +/***/ }), - return arg; -} +/***/ 166: +/***/ ((module) => { -module.exports.command = escapeCommand; -module.exports.argument = escapeArgument; +"use strict"; +const alias = ['stdin', 'stdout', 'stderr']; -/***/ }), +const hasAlias = opts => alias.some(x => Boolean(opts[x])); -/***/ 1252: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { +module.exports = opts => { + if (!opts) { + return null; + } -"use strict"; + if (opts.stdio && hasAlias(opts)) { + throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${alias.map(x => `\`${x}\``).join(', ')}`); + } + if (typeof opts.stdio === 'string') { + return opts.stdio; + } -const fs = __webpack_require__(5747); -const shebangCommand = __webpack_require__(7032); + const stdio = opts.stdio || []; -function readShebang(command) { - // Read the first 150 bytes from the file - const size = 150; - let buffer; + if (!Array.isArray(stdio)) { + throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``); + } - if (Buffer.alloc) { - // Node.js v4.5+ / v5.10+ - buffer = Buffer.alloc(size); - } else { - // Old Node.js API - buffer = new Buffer(size); - buffer.fill(0); // zero-fill - } + const result = []; + const len = Math.max(stdio.length, alias.length); - let fd; + for (let i = 0; i < len; i++) { + let value = null; - try { - fd = fs.openSync(command, 'r'); - fs.readSync(fd, buffer, 0, size, 0); - fs.closeSync(fd); - } catch (e) { /* Empty */ } + if (stdio[i] !== undefined) { + value = stdio[i]; + } else if (opts[alias[i]] !== undefined) { + value = opts[alias[i]]; + } - // Attempt to extract shebang (null is returned if not a shebang) - return shebangCommand(buffer.toString()); -} + result[i] = value; + } -module.exports = readShebang; + return result; +}; /***/ }), -/***/ 7274: +/***/ 1585: /***/ ((module, __unused_webpack_exports, __webpack_require__) => { "use strict"; +const {PassThrough} = __webpack_require__(2413); -const path = __webpack_require__(5622); -const which = __webpack_require__(4207); -const pathKey = __webpack_require__(539)(); - -function resolveCommandAttempt(parsed, withoutPathExt) { - const cwd = process.cwd(); - const hasCustomCwd = parsed.options.cwd != null; - - // If a custom `cwd` was specified, we need to change the process cwd - // because `which` will do stat calls but does not support a custom cwd - if (hasCustomCwd) { - try { - process.chdir(parsed.options.cwd); - } catch (err) { - /* Empty */ - } - } +module.exports = options => { + options = Object.assign({}, options); - let resolved; + const {array} = options; + let {encoding} = options; + const buffer = encoding === 'buffer'; + let objectMode = false; - try { - resolved = which.sync(parsed.command, { - path: (parsed.options.env || process.env)[pathKey], - pathExt: withoutPathExt ? path.delimiter : undefined, - }); - } catch (e) { - /* Empty */ - } finally { - process.chdir(cwd); - } + if (array) { + objectMode = !(encoding || buffer); + } else { + encoding = encoding || 'utf8'; + } - // If we successfully resolved, ensure that an absolute path is returned - // Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it - if (resolved) { - resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved); - } + if (buffer) { + encoding = null; + } - return resolved; -} + let len = 0; + const ret = []; + const stream = new PassThrough({objectMode}); -function resolveCommand(parsed) { - return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true); -} + if (encoding) { + stream.setEncoding(encoding); + } -module.exports = resolveCommand; + stream.on('data', chunk => { + ret.push(chunk); + if (objectMode) { + len = ret.length; + } else { + len += chunk.length; + } + }); -/***/ }), + stream.getBufferedValue = () => { + if (array) { + return ret; + } -/***/ 8932: -/***/ ((__unused_webpack_module, exports) => { + return buffer ? Buffer.concat(ret, len) : ret.join(''); + }; -"use strict"; + stream.getBufferedLength = () => len; + return stream; +}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -class Deprecation extends Error { - constructor(message) { - super(message); // Maintains proper stack trace (only available on V8) +/***/ }), - /* istanbul ignore next */ +/***/ 1766: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } +"use strict"; - this.name = 'Deprecation'; - } +const pump = __webpack_require__(8341); +const bufferStream = __webpack_require__(1585); +class MaxBufferError extends Error { + constructor() { + super('maxBuffer exceeded'); + this.name = 'MaxBufferError'; + } } -exports.Deprecation = Deprecation; +function getStream(inputStream, options) { + if (!inputStream) { + return Promise.reject(new Error('Expected a stream')); + } + options = Object.assign({maxBuffer: Infinity}, options); -/***/ }), + const {maxBuffer} = options; -/***/ 1205: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + let stream; + return new Promise((resolve, reject) => { + const rejectPromise = error => { + if (error) { // A null check + error.bufferedData = stream.getBufferedValue(); + } + reject(error); + }; -var once = __webpack_require__(1223); + stream = pump(inputStream, bufferStream(options), error => { + if (error) { + rejectPromise(error); + return; + } -var noop = function() {}; + resolve(); + }); -var isRequest = function(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -}; + stream.on('data', () => { + if (stream.getBufferedLength() > maxBuffer) { + rejectPromise(new MaxBufferError()); + } + }); + }).then(() => stream.getBufferedValue()); +} -var isChildProcess = function(stream) { - return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 -}; +module.exports = getStream; +module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'})); +module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true})); +module.exports.MaxBufferError = MaxBufferError; -var eos = function(stream, opts, callback) { - if (typeof opts === 'function') return eos(stream, null, opts); - if (!opts) opts = {}; - callback = once(callback || noop); +/***/ }), - var ws = stream._writableState; - var rs = stream._readableState; - var readable = opts.readable || (opts.readable !== false && stream.readable); - var writable = opts.writable || (opts.writable !== false && stream.writable); - var cancelled = false; +/***/ 8840: +/***/ ((module) => { - var onlegacyfinish = function() { - if (!stream.writable) onfinish(); - }; +"use strict"; - var onfinish = function() { - writable = false; - if (!readable) callback.call(stream); - }; - var onend = function() { - readable = false; - if (!writable) callback.call(stream); - }; +/*! + * isobject + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ - var onexit = function(exitCode) { - callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); - }; +function isObject(val) { + return val != null && typeof val === 'object' && Array.isArray(val) === false; +} - var onerror = function(err) { - callback.call(stream, err); - }; +/*! + * is-plain-object + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ - var onclose = function() { - process.nextTick(onclosenexttick); - }; +function isObjectObject(o) { + return isObject(o) === true + && Object.prototype.toString.call(o) === '[object Object]'; +} - var onclosenexttick = function() { - if (cancelled) return; - if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); - if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); - }; +function isPlainObject(o) { + var ctor,prot; - var onrequest = function() { - stream.req.on('finish', onfinish); - }; + if (isObjectObject(o) === false) return false; - if (isRequest(stream)) { - stream.on('complete', onfinish); - stream.on('abort', onclose); - if (stream.req) onrequest(); - else stream.on('request', onrequest); - } else if (writable && !ws) { // legacy streams - stream.on('end', onlegacyfinish); - stream.on('close', onlegacyfinish); - } + // If has modified constructor + ctor = o.constructor; + if (typeof ctor !== 'function') return false; - if (isChildProcess(stream)) stream.on('exit', onexit); + // If has modified prototype + prot = ctor.prototype; + if (isObjectObject(prot) === false) return false; - stream.on('end', onend); - stream.on('finish', onfinish); - if (opts.error !== false) stream.on('error', onerror); - stream.on('close', onclose); + // If constructor does not have an Object-specific method + if (prot.hasOwnProperty('isPrototypeOf') === false) { + return false; + } - return function() { - cancelled = true; - stream.removeListener('complete', onfinish); - stream.removeListener('abort', onclose); - stream.removeListener('request', onrequest); - if (stream.req) stream.req.removeListener('finish', onfinish); - stream.removeListener('end', onlegacyfinish); - stream.removeListener('close', onlegacyfinish); - stream.removeListener('finish', onfinish); - stream.removeListener('exit', onexit); - stream.removeListener('end', onend); - stream.removeListener('error', onerror); - stream.removeListener('close', onclose); - }; -}; + // Most likely a plain Object + return true; +} -module.exports = eos; +module.exports = isPlainObject; /***/ }), -/***/ 5447: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { +/***/ 1554: +/***/ ((module) => { "use strict"; -const path = __webpack_require__(5622); -const childProcess = __webpack_require__(3129); -const crossSpawn = __webpack_require__(2746); -const stripEof = __webpack_require__(5515); -const npmRunPath = __webpack_require__(502); -const isStream = __webpack_require__(1554); -const _getStream = __webpack_require__(1766); -const pFinally = __webpack_require__(1330); -const onExit = __webpack_require__(4931); -const errname = __webpack_require__(4689); -const stdio = __webpack_require__(166); -const TEN_MEGABYTES = 1000 * 1000 * 10; +var isStream = module.exports = function (stream) { + return stream !== null && typeof stream === 'object' && typeof stream.pipe === 'function'; +}; -function handleArgs(cmd, args, opts) { - let parsed; +isStream.writable = function (stream) { + return isStream(stream) && stream.writable !== false && typeof stream._write === 'function' && typeof stream._writableState === 'object'; +}; - opts = Object.assign({ - extendEnv: true, - env: {} - }, opts); +isStream.readable = function (stream) { + return isStream(stream) && stream.readable !== false && typeof stream._read === 'function' && typeof stream._readableState === 'object'; +}; - if (opts.extendEnv) { - opts.env = Object.assign({}, process.env, opts.env); - } +isStream.duplex = function (stream) { + return isStream.writable(stream) && isStream.readable(stream); +}; - if (opts.__winShell === true) { - delete opts.__winShell; - parsed = { - command: cmd, - args, - options: opts, - file: cmd, - original: { - cmd, - args - } - }; - } else { - parsed = crossSpawn._parse(cmd, args, opts); - } +isStream.transform = function (stream) { + return isStream.duplex(stream) && typeof stream._transform === 'function' && typeof stream._transformState === 'object'; +}; - opts = Object.assign({ - maxBuffer: TEN_MEGABYTES, - buffer: true, - stripEof: true, - preferLocal: true, - localDir: parsed.options.cwd || process.cwd(), - encoding: 'utf8', - reject: true, - cleanup: true - }, parsed.options); - opts.stdio = stdio(opts); +/***/ }), - if (opts.preferLocal) { - opts.env = npmRunPath.env(Object.assign({}, opts, {cwd: opts.localDir})); - } +/***/ 7126: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { - if (opts.detached) { - // #115 - opts.cleanup = false; - } +var fs = __webpack_require__(5747) +var core +if (process.platform === 'win32' || global.TESTING_WINDOWS) { + core = __webpack_require__(2001) +} else { + core = __webpack_require__(9728) +} + +module.exports = isexe +isexe.sync = sync + +function isexe (path, options, cb) { + if (typeof options === 'function') { + cb = options + options = {} + } - if (process.platform === 'win32' && path.basename(parsed.command) === 'cmd.exe') { - // #116 - parsed.args.unshift('/q'); - } + if (!cb) { + if (typeof Promise !== 'function') { + throw new TypeError('callback not provided') + } - return { - cmd: parsed.command, - args: parsed.args, - opts, - parsed - }; -} + return new Promise(function (resolve, reject) { + isexe(path, options || {}, function (er, is) { + if (er) { + reject(er) + } else { + resolve(is) + } + }) + }) + } -function handleInput(spawned, input) { - if (input === null || input === undefined) { - return; - } + core(path, options || {}, function (er, is) { + // ignore EACCES because that just means we aren't allowed to run it + if (er) { + if (er.code === 'EACCES' || options && options.ignoreErrors) { + er = null + is = false + } + } + cb(er, is) + }) +} - if (isStream(input)) { - input.pipe(spawned.stdin); - } else { - spawned.stdin.end(input); - } +function sync (path, options) { + // my kingdom for a filtered catch + try { + return core.sync(path, options || {}) + } catch (er) { + if (options && options.ignoreErrors || er.code === 'EACCES') { + return false + } else { + throw er + } + } } -function handleOutput(opts, val) { - if (val && opts.stripEof) { - val = stripEof(val); - } - return val; -} +/***/ }), -function handleShell(fn, cmd, opts) { - let file = '/bin/sh'; - let args = ['-c', cmd]; +/***/ 9728: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { - opts = Object.assign({}, opts); +module.exports = isexe +isexe.sync = sync - if (process.platform === 'win32') { - opts.__winShell = true; - file = process.env.comspec || 'cmd.exe'; - args = ['/s', '/c', `"${cmd}"`]; - opts.windowsVerbatimArguments = true; - } +var fs = __webpack_require__(5747) - if (opts.shell) { - file = opts.shell; - delete opts.shell; - } +function isexe (path, options, cb) { + fs.stat(path, function (er, stat) { + cb(er, er ? false : checkStat(stat, options)) + }) +} - return fn(file, args, opts); +function sync (path, options) { + return checkStat(fs.statSync(path), options) } -function getStream(process, stream, {encoding, buffer, maxBuffer}) { - if (!process[stream]) { - return null; - } +function checkStat (stat, options) { + return stat.isFile() && checkMode(stat, options) +} - let ret; +function checkMode (stat, options) { + var mod = stat.mode + var uid = stat.uid + var gid = stat.gid - if (!buffer) { - // TODO: Use `ret = util.promisify(stream.finished)(process[stream]);` when targeting Node.js 10 - ret = new Promise((resolve, reject) => { - process[stream] - .once('end', resolve) - .once('error', reject); - }); - } else if (encoding) { - ret = _getStream(process[stream], { - encoding, - maxBuffer - }); - } else { - ret = _getStream.buffer(process[stream], {maxBuffer}); - } + var myUid = options.uid !== undefined ? + options.uid : process.getuid && process.getuid() + var myGid = options.gid !== undefined ? + options.gid : process.getgid && process.getgid() - return ret.catch(err => { - err.stream = stream; - err.message = `${stream} ${err.message}`; - throw err; - }); -} + var u = parseInt('100', 8) + var g = parseInt('010', 8) + var o = parseInt('001', 8) + var ug = u | g -function makeError(result, options) { - const {stdout, stderr} = result; + var ret = (mod & o) || + (mod & g) && gid === myGid || + (mod & u) && uid === myUid || + (mod & ug) && myUid === 0 - let err = result.error; - const {code, signal} = result; + return ret +} - const {parsed, joinedCmd} = options; - const timedOut = options.timedOut || false; - if (!err) { - let output = ''; +/***/ }), - if (Array.isArray(parsed.opts.stdio)) { - if (parsed.opts.stdio[2] !== 'inherit') { - output += output.length > 0 ? stderr : `\n${stderr}`; - } +/***/ 2001: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { - if (parsed.opts.stdio[1] !== 'inherit') { - output += `\n${stdout}`; - } - } else if (parsed.opts.stdio !== 'inherit') { - output = `\n${stderr}${stdout}`; - } +module.exports = isexe +isexe.sync = sync - err = new Error(`Command failed: ${joinedCmd}${output}`); - err.code = code < 0 ? errname(code) : code; - } +var fs = __webpack_require__(5747) - err.stdout = stdout; - err.stderr = stderr; - err.failed = true; - err.signal = signal || null; - err.cmd = joinedCmd; - err.timedOut = timedOut; +function checkPathExt (path, options) { + var pathext = options.pathExt !== undefined ? + options.pathExt : process.env.PATHEXT - return err; + if (!pathext) { + return true + } + + pathext = pathext.split(';') + if (pathext.indexOf('') !== -1) { + return true + } + for (var i = 0; i < pathext.length; i++) { + var p = pathext[i].toLowerCase() + if (p && path.substr(-p.length).toLowerCase() === p) { + return true + } + } + return false } -function joinCmd(cmd, args) { - let joinedCmd = cmd; +function checkStat (stat, path, options) { + if (!stat.isSymbolicLink() && !stat.isFile()) { + return false + } + return checkPathExt(path, options) +} - if (Array.isArray(args) && args.length > 0) { - joinedCmd += ' ' + args.join(' '); - } +function isexe (path, options, cb) { + fs.stat(path, function (er, stat) { + cb(er, er ? false : checkStat(stat, path, options)) + }) +} - return joinedCmd; +function sync (path, options) { + return checkStat(fs.statSync(path), path, options) } -module.exports = (cmd, args, opts) => { - const parsed = handleArgs(cmd, args, opts); - const {encoding, buffer, maxBuffer} = parsed.opts; - const joinedCmd = joinCmd(cmd, args); - let spawned; - try { - spawned = childProcess.spawn(parsed.cmd, parsed.args, parsed.opts); - } catch (err) { - return Promise.reject(err); - } +/***/ }), - let removeExitHandler; - if (parsed.opts.cleanup) { - removeExitHandler = onExit(() => { - spawned.kill(); - }); - } +/***/ 7493: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { - let timeoutId = null; - let timedOut = false; +"use strict"; - const cleanup = () => { - if (timeoutId) { - clearTimeout(timeoutId); - timeoutId = null; - } +const os = __webpack_require__(2087); + +const nameMap = new Map([ + [20, ['Big Sur', '11']], + [19, ['Catalina', '10.15']], + [18, ['Mojave', '10.14']], + [17, ['High Sierra', '10.13']], + [16, ['Sierra', '10.12']], + [15, ['El Capitan', '10.11']], + [14, ['Yosemite', '10.10']], + [13, ['Mavericks', '10.9']], + [12, ['Mountain Lion', '10.8']], + [11, ['Lion', '10.7']], + [10, ['Snow Leopard', '10.6']], + [9, ['Leopard', '10.5']], + [8, ['Tiger', '10.4']], + [7, ['Panther', '10.3']], + [6, ['Jaguar', '10.2']], + [5, ['Puma', '10.1']] +]); + +const macosRelease = release => { + release = Number((release || os.release()).split('.')[0]); - if (removeExitHandler) { - removeExitHandler(); - } + const [name, version] = nameMap.get(release); + + return { + name, + version }; +}; - if (parsed.opts.timeout > 0) { - timeoutId = setTimeout(() => { - timeoutId = null; - timedOut = true; - spawned.kill(parsed.opts.killSignal); - }, parsed.opts.timeout); - } +module.exports = macosRelease; +// TODO: remove this in the next major version +module.exports.default = macosRelease; - const processDone = new Promise(resolve => { - spawned.on('exit', (code, signal) => { - cleanup(); - resolve({code, signal}); - }); - spawned.on('error', err => { - cleanup(); - resolve({error: err}); - }); +/***/ }), - if (spawned.stdin) { - spawned.stdin.on('error', err => { - cleanup(); - resolve({error: err}); - }); - } - }); +/***/ 8560: +/***/ ((module) => { - function destroy() { - if (spawned.stdout) { - spawned.stdout.destroy(); - } +"use strict"; - if (spawned.stderr) { - spawned.stderr.destroy(); - } - } - const handlePromise = () => pFinally(Promise.all([ - processDone, - getStream(spawned, 'stdout', {encoding, buffer, maxBuffer}), - getStream(spawned, 'stderr', {encoding, buffer, maxBuffer}) - ]).then(arr => { - const result = arr[0]; - result.stdout = arr[1]; - result.stderr = arr[2]; +/** + * Tries to execute a function and discards any error that occurs. + * @param {Function} fn - Function that might or might not throw an error. + * @returns {?*} Return-value of the function when no error occurred. + */ +module.exports = function(fn) { - if (result.error || result.code !== 0 || result.signal !== null) { - const err = makeError(result, { - joinedCmd, - parsed, - timedOut - }); + try { return fn() } catch (e) {} - // TODO: missing some timeout logic for killed - // https://github.com/nodejs/node/blob/master/lib/child_process.js#L203 - // err.killed = spawned.killed || killed; - err.killed = err.killed || spawned.killed; +} - if (!parsed.opts.reject) { - return err; - } +/***/ }), - throw err; - } +/***/ 467: +/***/ ((module, exports, __webpack_require__) => { - return { - stdout: handleOutput(parsed.opts, result.stdout), - stderr: handleOutput(parsed.opts, result.stderr), - code: 0, - failed: false, - killed: false, - signal: null, - cmd: joinedCmd, - timedOut: false - }; - }), destroy); +"use strict"; - crossSpawn._enoent.hookChildProcess(spawned, parsed.parsed); - handleInput(spawned, parsed.opts.input); +Object.defineProperty(exports, "__esModule", ({ value: true })); - spawned.then = (onfulfilled, onrejected) => handlePromise().then(onfulfilled, onrejected); - spawned.catch = onrejected => handlePromise().catch(onrejected); +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - return spawned; -}; +var Stream = _interopDefault(__webpack_require__(2413)); +var http = _interopDefault(__webpack_require__(8605)); +var Url = _interopDefault(__webpack_require__(8835)); +var whatwgUrl = _interopDefault(__webpack_require__(8665)); +var https = _interopDefault(__webpack_require__(7211)); +var zlib = _interopDefault(__webpack_require__(8761)); -// TODO: set `stderr: 'ignore'` when that option is implemented -module.exports.stdout = (...args) => module.exports(...args).then(x => x.stdout); +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js -// TODO: set `stdout: 'ignore'` when that option is implemented -module.exports.stderr = (...args) => module.exports(...args).then(x => x.stderr); +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; -module.exports.shell = (cmd, opts) => handleShell(module.exports, cmd, opts); +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); -module.exports.sync = (cmd, args, opts) => { - const parsed = handleArgs(cmd, args, opts); - const joinedCmd = joinCmd(cmd, args); +class Blob { + constructor() { + this[TYPE] = ''; - if (isStream(parsed.opts.input)) { - throw new TypeError('The `input` option cannot be a stream in sync mode'); - } + const blobParts = arguments[0]; + const options = arguments[1]; - const result = childProcess.spawnSync(parsed.cmd, parsed.args, parsed.opts); - result.code = result.status; + const buffers = []; + let size = 0; - if (result.error || result.status !== 0 || result.signal !== null) { - const err = makeError(result, { - joinedCmd, - parsed - }); + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } - if (!parsed.opts.reject) { - return err; + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); } + const span = Math.max(relativeEnd - relativeStart, 0); - throw err; + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; } +} - return { - stdout: handleOutput(parsed.opts, result.stdout), - stderr: handleOutput(parsed.opts, result.stderr), - code: 0, - failed: false, - signal: null, - cmd: joinedCmd, - timedOut: false - }; -}; +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); -module.exports.shellSync = (cmd, opts) => handleShell(module.exports.sync, cmd, opts); + this.message = message; + this.type = type; + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } -/***/ }), + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} -/***/ 4689: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; -"use strict"; +let convert; +try { + convert = __webpack_require__(2877).convert; +} catch (e) {} -// Older verions of Node.js might not have `util.getSystemErrorName()`. -// In that case, fall back to a deprecated internal. -const util = __webpack_require__(1669); +const INTERNALS = Symbol('Body internals'); -let uv; +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; -if (typeof util.getSystemErrorName === 'function') { - module.exports = util.getSystemErrorName; -} else { - try { - uv = process.binding('uv'); +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; - if (typeof uv.errname !== 'function') { - throw new TypeError('uv.errname is not a function'); - } - } catch (err) { - console.error('execa/lib/errname: unable to establish process.binding(\'uv\')', err); - uv = null; + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; - module.exports = code => errname(uv, code); + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } } -// Used for testing the fallback behavior -module.exports.__test__ = errname; +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, -function errname(uv, code) { - if (uv) { - return uv.errname(code); - } + get bodyUsed() { + return this[INTERNALS].disturbed; + }, - if (!(code < 0)) { - throw new Error('err >= 0'); - } + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, - return `Unknown system error ${code}`; -} + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, -/***/ }), + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, -/***/ 166: -/***/ ((module) => { + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, -"use strict"; + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; -const alias = ['stdin', 'stdout', 'stderr']; + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; -const hasAlias = opts => alias.some(x => Boolean(opts[x])); +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); -module.exports = opts => { - if (!opts) { - return null; +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } } +}; - if (opts.stdio && hasAlias(opts)) { - throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${alias.map(x => `\`${x}\``).join(', ')}`); +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); } - if (typeof opts.stdio === 'string') { - return opts.stdio; + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); } - const stdio = opts.stdio || []; + let body = this.body; - if (!Array.isArray(stdio)) { - throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``); + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); } - const result = []; - const len = Math.max(stdio.length, alias.length); + // body is blob + if (isBlob(body)) { + body = body.stream(); + } - for (let i = 0; i < len; i++) { - let value = null; + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } - if (stdio[i] !== undefined) { - value = stdio[i]; - } else if (opts[alias[i]] !== undefined) { - value = opts[alias[i]]; - } + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; - result[i] = value; - } + return new Body.Promise(function (resolve, reject) { + let resTimeout; - return result; -}; + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); -/***/ }), + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } -/***/ 1585: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } -"use strict"; + accumBytes += chunk.length; + accum.push(chunk); + }); -const {PassThrough} = __webpack_require__(2413); + body.on('end', function () { + if (abort) { + return; + } -module.exports = options => { - options = Object.assign({}, options); + clearTimeout(resTimeout); - const {array} = options; - let {encoding} = options; - const buffer = encoding === 'buffer'; - let objectMode = false; + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} - if (array) { - objectMode = !(encoding || buffer); - } else { - encoding = encoding || 'utf8'; +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); } - if (buffer) { - encoding = null; + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); } - let len = 0; - const ret = []; - const stream = new PassThrough({objectMode}); + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); - if (encoding) { - stream.setEncoding(encoding); + // html5 + if (!res && str) { + res = / { - ret.push(chunk); - - if (objectMode) { - len = ret.length; - } else { - len += chunk.length; + // html4 + if (!res && str) { + res = / { - if (array) { - return ret; + if (res) { + res = /charset=(.*)/i.exec(res.pop()); } + } - return buffer ? Buffer.concat(ret, len) : ret.join(''); - }; + // xml + if (!res && str) { + res = /<\?xml.+?encoding=(['"])(.+?)\1/i.exec(str); + } - stream.getBufferedLength = () => len; + // found charset + if (res) { + charset = res.pop(); - return stream; -}; + // prevent decode issues when sites use incorrect encoding + // ref: https://hsivonen.fi/encoding-menu/ + if (charset === 'gb2312' || charset === 'gbk') { + charset = 'gb18030'; + } + } + // turn raw buffers into a single utf-8 buffer + return convert(buffer, 'UTF-8', charset).toString(); +} -/***/ }), +/** + * Detect a URLSearchParams object + * ref: https://github.com/bitinn/node-fetch/issues/296#issuecomment-307598143 + * + * @param Object obj Object to detect by type or brand + * @return String + */ +function isURLSearchParams(obj) { + // Duck-typing as a necessary condition. + if (typeof obj !== 'object' || typeof obj.append !== 'function' || typeof obj.delete !== 'function' || typeof obj.get !== 'function' || typeof obj.getAll !== 'function' || typeof obj.has !== 'function' || typeof obj.set !== 'function') { + return false; + } -/***/ 1766: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + // Brand-checking and more duck-typing as optional condition. + return obj.constructor.name === 'URLSearchParams' || Object.prototype.toString.call(obj) === '[object URLSearchParams]' || typeof obj.sort === 'function'; +} -"use strict"; +/** + * Check if `obj` is a W3C `Blob` object (which `File` inherits from) + * @param {*} obj + * @return {boolean} + */ +function isBlob(obj) { + return typeof obj === 'object' && typeof obj.arrayBuffer === 'function' && typeof obj.type === 'string' && typeof obj.stream === 'function' && typeof obj.constructor === 'function' && typeof obj.constructor.name === 'string' && /^(Blob|File)$/.test(obj.constructor.name) && /^(Blob|File)$/.test(obj[Symbol.toStringTag]); +} -const pump = __webpack_require__(8341); -const bufferStream = __webpack_require__(1585); +/** + * Clone body given Res/Req instance + * + * @param Mixed instance Response or Request instance + * @return Mixed + */ +function clone(instance) { + let p1, p2; + let body = instance.body; -class MaxBufferError extends Error { - constructor() { - super('maxBuffer exceeded'); - this.name = 'MaxBufferError'; + // don't allow cloning a used body + if (instance.bodyUsed) { + throw new Error('cannot clone body after it is used'); } -} -function getStream(inputStream, options) { - if (!inputStream) { - return Promise.reject(new Error('Expected a stream')); + // check that body is a stream and not form-data object + // note: we can't clone the form-data object without having it as a dependency + if (body instanceof Stream && typeof body.getBoundary !== 'function') { + // tee instance body + p1 = new PassThrough(); + p2 = new PassThrough(); + body.pipe(p1); + body.pipe(p2); + // set instance body to teed body and return the other teed body + instance[INTERNALS].body = p1; + body = p2; } - options = Object.assign({maxBuffer: Infinity}, options); - - const {maxBuffer} = options; - - let stream; - return new Promise((resolve, reject) => { - const rejectPromise = error => { - if (error) { // A null check - error.bufferedData = stream.getBufferedValue(); - } - reject(error); - }; - - stream = pump(inputStream, bufferStream(options), error => { - if (error) { - rejectPromise(error); - return; - } - - resolve(); - }); + return body; +} - stream.on('data', () => { - if (stream.getBufferedLength() > maxBuffer) { - rejectPromise(new MaxBufferError()); - } - }); - }).then(() => stream.getBufferedValue()); +/** + * Performs the operation "extract a `Content-Type` value from |object|" as + * specified in the specification: + * https://fetch.spec.whatwg.org/#concept-bodyinit-extract + * + * This function assumes that instance.body is present. + * + * @param Mixed instance Any options.body input + */ +function extractContentType(body) { + if (body === null) { + // body is null + return null; + } else if (typeof body === 'string') { + // body is string + return 'text/plain;charset=UTF-8'; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + return 'application/x-www-form-urlencoded;charset=UTF-8'; + } else if (isBlob(body)) { + // body is blob + return body.type || null; + } else if (Buffer.isBuffer(body)) { + // body is buffer + return null; + } else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + return null; + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + return null; + } else if (typeof body.getBoundary === 'function') { + // detect form data input from form-data module + return `multipart/form-data;boundary=${body.getBoundary()}`; + } else if (body instanceof Stream) { + // body is stream + // can't really do much about this + return null; + } else { + // Body constructor defaults other things to string + return 'text/plain;charset=UTF-8'; + } } -module.exports = getStream; -module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'})); -module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true})); -module.exports.MaxBufferError = MaxBufferError; - - -/***/ }), - -/***/ 8840: -/***/ ((module) => { - -"use strict"; - - -/*! - * isobject +/** + * The Fetch Standard treats this as if "total bytes" is a property on the body. + * For us, we have to explicitly get it with a function. * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. + * ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes + * + * @param Body instance Instance of Body + * @return Number? Number of bytes, or null if not possible */ +function getTotalBytes(instance) { + const body = instance.body; -function isObject(val) { - return val != null && typeof val === 'object' && Array.isArray(val) === false; + + if (body === null) { + // body is null + return 0; + } else if (isBlob(body)) { + return body.size; + } else if (Buffer.isBuffer(body)) { + // body is buffer + return body.length; + } else if (body && typeof body.getLengthSync === 'function') { + // detect form data input from form-data module + if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x + body.hasKnownLength && body.hasKnownLength()) { + // 2.x + return body.getLengthSync(); + } + return null; + } else { + // body is stream + return null; + } } -/*! - * is-plain-object +/** + * Write a Body to a Node.js WritableStream (e.g. http.Request) object. * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. + * @param Body instance Instance of Body + * @return Void */ +function writeToStream(dest, instance) { + const body = instance.body; -function isObjectObject(o) { - return isObject(o) === true - && Object.prototype.toString.call(o) === '[object Object]'; -} - -function isPlainObject(o) { - var ctor,prot; - - if (isObjectObject(o) === false) return false; - - // If has modified constructor - ctor = o.constructor; - if (typeof ctor !== 'function') return false; - - // If has modified prototype - prot = ctor.prototype; - if (isObjectObject(prot) === false) return false; - - // If constructor does not have an Object-specific method - if (prot.hasOwnProperty('isPrototypeOf') === false) { - return false; - } - // Most likely a plain Object - return true; + if (body === null) { + // body is null + dest.end(); + } else if (isBlob(body)) { + body.stream().pipe(dest); + } else if (Buffer.isBuffer(body)) { + // body is buffer + dest.write(body); + dest.end(); + } else { + // body is stream + body.pipe(dest); + } } -module.exports = isPlainObject; - - -/***/ }), +// expose Promise +Body.Promise = global.Promise; -/***/ 1554: -/***/ ((module) => { +/** + * headers.js + * + * Headers class offers convenient helpers + */ -"use strict"; +const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/; +const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/; +function validateName(name) { + name = `${name}`; + if (invalidTokenRegex.test(name) || name === '') { + throw new TypeError(`${name} is not a legal HTTP header name`); + } +} -var isStream = module.exports = function (stream) { - return stream !== null && typeof stream === 'object' && typeof stream.pipe === 'function'; -}; +function validateValue(value) { + value = `${value}`; + if (invalidHeaderCharRegex.test(value)) { + throw new TypeError(`${value} is not a legal HTTP header value`); + } +} -isStream.writable = function (stream) { - return isStream(stream) && stream.writable !== false && typeof stream._write === 'function' && typeof stream._writableState === 'object'; -}; +/** + * Find the key in the map object given a header name. + * + * Returns undefined if not found. + * + * @param String name Header name + * @return String|Undefined + */ +function find(map, name) { + name = name.toLowerCase(); + for (const key in map) { + if (key.toLowerCase() === name) { + return key; + } + } + return undefined; +} -isStream.readable = function (stream) { - return isStream(stream) && stream.readable !== false && typeof stream._read === 'function' && typeof stream._readableState === 'object'; -}; +const MAP = Symbol('map'); +class Headers { + /** + * Headers class + * + * @param Object headers Response headers + * @return Void + */ + constructor() { + let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined; -isStream.duplex = function (stream) { - return isStream.writable(stream) && isStream.readable(stream); -}; + this[MAP] = Object.create(null); -isStream.transform = function (stream) { - return isStream.duplex(stream) && typeof stream._transform === 'function' && typeof stream._transformState === 'object'; -}; + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } -/***/ }), + return; + } -/***/ 7126: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } -var fs = __webpack_require__(5747) -var core -if (process.platform === 'win32' || global.TESTING_WINDOWS) { - core = __webpack_require__(2001) -} else { - core = __webpack_require__(9728) -} + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } -module.exports = isexe -isexe.sync = sync + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } -function isexe (path, options, cb) { - if (typeof options === 'function') { - cb = options - options = {} - } + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } - if (!cb) { - if (typeof Promise !== 'function') { - throw new TypeError('callback not provided') - } + return this[MAP][key].join(', '); + } - return new Promise(function (resolve, reject) { - isexe(path, options || {}, function (er, is) { - if (er) { - reject(er) - } else { - resolve(is) - } - }) - }) - } + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; - core(path, options || {}, function (er, is) { - // ignore EACCES because that just means we aren't allowed to run it - if (er) { - if (er.code === 'EACCES' || options && options.ignoreErrors) { - er = null - is = false - } - } - cb(er, is) - }) -} + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; -function sync (path, options) { - // my kingdom for a filtered catch - try { - return core.sync(path, options || {}) - } catch (er) { - if (options && options.ignoreErrors || er.code === 'EACCES') { - return false - } else { - throw er - } - } -} + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } -/***/ }), + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } -/***/ 9728: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } -module.exports = isexe -isexe.sync = sync + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } -var fs = __webpack_require__(5747) + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } -function isexe (path, options, cb) { - fs.stat(path, function (er, stat) { - cb(er, er ? false : checkStat(stat, options)) - }) -} + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } -function sync (path, options) { - return checkStat(fs.statSync(path), options) -} + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } -function checkStat (stat, options) { - return stat.isFile() && checkMode(stat, options) + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } } +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; -function checkMode (stat, options) { - var mod = stat.mode - var uid = stat.uid - var gid = stat.gid +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); - var myUid = options.uid !== undefined ? - options.uid : process.getuid && process.getuid() - var myGid = options.gid !== undefined ? - options.gid : process.getgid && process.getgid() +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); - var u = parseInt('100', 8) - var g = parseInt('010', 8) - var o = parseInt('001', 8) - var ug = u | g +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; - var ret = (mod & o) || - (mod & g) && gid === myGid || - (mod & u) && uid === myUid || - (mod & ug) && myUid === 0 + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} - return ret +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; } +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } -/***/ }), + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; -/***/ 2001: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } -module.exports = isexe -isexe.sync = sync + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); -var fs = __webpack_require__(5747) +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); -function checkPathExt (path, options) { - var pathext = options.pathExt !== undefined ? - options.pathExt : process.env.PATHEXT +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); - if (!pathext) { - return true - } + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } - pathext = pathext.split(';') - if (pathext.indexOf('') !== -1) { - return true - } - for (var i = 0; i < pathext.length; i++) { - var p = pathext[i].toLowerCase() - if (p && path.substr(-p.length).toLowerCase() === p) { - return true - } - } - return false + return obj; } -function checkStat (stat, path, options) { - if (!stat.isSymbolicLink() && !stat.isFile()) { - return false - } - return checkPathExt(path, options) +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; } -function isexe (path, options, cb) { - fs.stat(path, function (er, stat) { - cb(er, er ? false : checkStat(stat, path, options)) - }) -} +const INTERNALS$1 = Symbol('Response internals'); -function sync (path, options) { - return checkStat(fs.statSync(path), path, options) -} +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; -/***/ }), + Body.call(this, body, opts); -/***/ 7493: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + const status = opts.status || 200; + const headers = new Headers(opts.headers); -"use strict"; + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } -const os = __webpack_require__(2087); + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } -const nameMap = new Map([ - [20, ['Big Sur', '11']], - [19, ['Catalina', '10.15']], - [18, ['Mojave', '10.14']], - [17, ['High Sierra', '10.13']], - [16, ['Sierra', '10.12']], - [15, ['El Capitan', '10.11']], - [14, ['Yosemite', '10.10']], - [13, ['Mavericks', '10.9']], - [12, ['Mountain Lion', '10.8']], - [11, ['Lion', '10.7']], - [10, ['Snow Leopard', '10.6']], - [9, ['Leopard', '10.5']], - [8, ['Tiger', '10.4']], - [7, ['Panther', '10.3']], - [6, ['Jaguar', '10.2']], - [5, ['Puma', '10.1']] -]); + get url() { + return this[INTERNALS$1].url || ''; + } -const macosRelease = release => { - release = Number((release || os.release()).split('.')[0]); + get status() { + return this[INTERNALS$1].status; + } - const [name, version] = nameMap.get(release); + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } - return { - name, - version - }; -}; + get redirected() { + return this[INTERNALS$1].counter > 0; + } -module.exports = macosRelease; -// TODO: remove this in the next major version -module.exports.default = macosRelease; + get statusText() { + return this[INTERNALS$1].statusText; + } + get headers() { + return this[INTERNALS$1].headers; + } -/***/ }), + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} -/***/ 8560: -/***/ ((module) => { +Body.mixIn(Response.prototype); -"use strict"; +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; /** - * Tries to execute a function and discards any error that occurs. - * @param {Function} fn - Function that might or might not throw an error. - * @returns {?*} Return-value of the function when no error occurred. + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} */ -module.exports = function(fn) { - - try { return fn() } catch (e) {} +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); } -/***/ }), - -/***/ 467: -/***/ ((module, exports, __webpack_require__) => { - -"use strict"; - +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; -Object.defineProperty(exports, "__esModule", ({ value: true })); +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} -var Stream = _interopDefault(__webpack_require__(2413)); -var http = _interopDefault(__webpack_require__(8605)); -var Url = _interopDefault(__webpack_require__(8835)); -var https = _interopDefault(__webpack_require__(7211)); -var zlib = _interopDefault(__webpack_require__(8761)); +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; -// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + let parsedURL; -// fix for "Readable" isn't a named export issue -const Readable = Stream.Readable; + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } -const BUFFER = Symbol('buffer'); -const TYPE = Symbol('type'); + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); -class Blob { - constructor() { - this[TYPE] = ''; + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } - const blobParts = arguments[0]; - const options = arguments[1]; + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; - const buffers = []; - let size = 0; + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); - if (blobParts) { - const a = blobParts; - const length = Number(a.length); - for (let i = 0; i < length; i++) { - const element = a[i]; - let buffer; - if (element instanceof Buffer) { - buffer = element; - } else if (ArrayBuffer.isView(element)) { - buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); - } else if (element instanceof ArrayBuffer) { - buffer = Buffer.from(element); - } else if (element instanceof Blob) { - buffer = element[BUFFER]; - } else { - buffer = Buffer.from(typeof element === 'string' ? element : String(element)); - } - size += buffer.length; - buffers.push(buffer); + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); } } - this[BUFFER] = Buffer.concat(buffers); + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; - let type = options && options.type !== undefined && String(options.type).toLowerCase(); - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type; + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; } - get size() { - return this[BUFFER].length; - } - get type() { - return this[TYPE]; - } - text() { - return Promise.resolve(this[BUFFER].toString()); + + get method() { + return this[INTERNALS$2].method; } - arrayBuffer() { - const buf = this[BUFFER]; - const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - return Promise.resolve(ab); + + get url() { + return format_url(this[INTERNALS$2].parsedURL); } - stream() { - const readable = new Readable(); - readable._read = function () {}; - readable.push(this[BUFFER]); - readable.push(null); - return readable; + + get headers() { + return this[INTERNALS$2].headers; } - toString() { - return '[object Blob]'; + + get redirect() { + return this[INTERNALS$2].redirect; } - slice() { - const size = this.size; - const start = arguments[0]; - const end = arguments[1]; - let relativeStart, relativeEnd; - if (start === undefined) { - relativeStart = 0; - } else if (start < 0) { - relativeStart = Math.max(size + start, 0); - } else { - relativeStart = Math.min(start, size); - } - if (end === undefined) { - relativeEnd = size; - } else if (end < 0) { - relativeEnd = Math.max(size + end, 0); - } else { - relativeEnd = Math.min(end, size); - } - const span = Math.max(relativeEnd - relativeStart, 0); + get signal() { + return this[INTERNALS$2].signal; + } - const buffer = this[BUFFER]; - const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); - const blob = new Blob([], { type: arguments[2] }); - blob[BUFFER] = slicedBuffer; - return blob; + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); } } -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, - slice: { enumerable: true } -}); +Body.mixIn(Request.prototype); -Object.defineProperty(Blob.prototype, Symbol.toStringTag, { - value: 'Blob', +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', writable: false, enumerable: false, configurable: true }); -/** - * fetch-error.js - * - * FetchError interface for operational errors - */ +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); /** - * Create FetchError instance + * Convert a Request to Node.js http request options. * - * @param String message Error message for human - * @param String type Error type for machine - * @param String systemError For Node.js system error - * @return FetchError + * @param Request A Request instance + * @return Object The options object to be passed to http.request */ -function FetchError(message, type, systemError) { - Error.call(this, message); - - this.message = message; - this.type = type; - - // when err.type is `system`, err.code contains system error code - if (systemError) { - this.code = this.errno = systemError.code; - } - - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} - -FetchError.prototype = Object.create(Error.prototype); -FetchError.prototype.constructor = FetchError; -FetchError.prototype.name = 'FetchError'; +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); -let convert; -try { - convert = __webpack_require__(2877).convert; -} catch (e) {} + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } -const INTERNALS = Symbol('Body internals'); + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } -// fix an issue where "PassThrough" isn't a named export for node <10 -const PassThrough = Stream.PassThrough; + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } -/** - * Body mixin - * - * Ref: https://fetch.spec.whatwg.org/#body - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -function Body(body) { - var _this = this; + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } - var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, - _ref$size = _ref.size; + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } - let size = _ref$size === undefined ? 0 : _ref$size; - var _ref$timeout = _ref.timeout; - let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } - if (body == null) { - // body is undefined or null - body = null; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - body = Buffer.from(body.toString()); - } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - body = Buffer.from(body); - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); - } else if (body instanceof Stream) ; else { - // none of the above - // coerce to string then buffer - body = Buffer.from(String(body)); + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); } - this[INTERNALS] = { - body, - disturbed: false, - error: null - }; - this.size = size; - this.timeout = timeout; - if (body instanceof Stream) { - body.on('error', function (err) { - const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); - _this[INTERNALS].error = error; - }); + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); } -} -Body.prototype = { - get body() { - return this[INTERNALS].body; - }, + if (!headers.has('Connection') && !agent) { + headers.set('Connection', 'close'); + } - get bodyUsed() { - return this[INTERNALS].disturbed; - }, + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js - /** - * Decode response as ArrayBuffer - * - * @return Promise - */ - arrayBuffer() { - return consumeBody.call(this).then(function (buf) { - return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - }); - }, + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} - /** - * Return raw response as Blob - * - * @return Promise - */ - blob() { - let ct = this.headers && this.headers.get('content-type') || ''; - return consumeBody.call(this).then(function (buf) { - return Object.assign( - // Prevent copying - new Blob([], { - type: ct.toLowerCase() - }), { - [BUFFER]: buf - }); - }); - }, +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ - /** - * Decode response as json - * - * @return Promise - */ - json() { - var _this2 = this; +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); - return consumeBody.call(this).then(function (buffer) { - try { - return JSON.parse(buffer.toString()); - } catch (err) { - return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); - } - }); - }, + this.type = 'aborted'; + this.message = message; - /** - * Decode response as text - * - * @return Promise - */ - text() { - return consumeBody.call(this).then(function (buffer) { - return buffer.toString(); - }); - }, + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} - /** - * Decode response as buffer (non-spec api) - * - * @return Promise - */ - buffer() { - return consumeBody.call(this); - }, +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; - /** - * Decode response as text, while automatically detecting the encoding and - * trying to decode to UTF-8 (non-spec api) - * - * @return Promise - */ - textConverted() { - var _this3 = this; +const URL$1 = Url.URL || whatwgUrl.URL; - return consumeBody.call(this).then(function (buffer) { - return convertBody(buffer, _this3.headers); - }); - } -}; +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; -// In browsers, all properties are enumerable. -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true } -}); +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; -Body.mixIn = function (proto) { - for (const name of Object.getOwnPropertyNames(Body.prototype)) { - // istanbul ignore else: future proof - if (!(name in proto)) { - const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); - Object.defineProperty(proto, name, desc); - } - } + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); }; /** - * Consume and convert an entire Body to a Buffer. - * - * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * Fetch function * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options * @return Promise */ -function consumeBody() { - var _this4 = this; +function fetch(url, opts) { - if (this[INTERNALS].disturbed) { - return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); } - this[INTERNALS].disturbed = true; + Body.Promise = fetch.Promise; - if (this[INTERNALS].error) { - return Body.Promise.reject(this[INTERNALS].error); - } + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); - let body = this.body; + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; - // body is null - if (body === null) { - return Body.Promise.resolve(Buffer.alloc(0)); - } + let response = null; - // body is blob - if (isBlob(body)) { - body = body.stream(); - } + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + request.body.destroy(error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; - // body is buffer - if (Buffer.isBuffer(body)) { - return Body.Promise.resolve(body); - } + if (signal && signal.aborted) { + abort(); + return; + } - // istanbul ignore if: should never happen - if (!(body instanceof Stream)) { - return Body.Promise.resolve(Buffer.alloc(0)); - } + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; - // body is stream - // get ready to actually consume the body - let accum = []; - let accumBytes = 0; - let abort = false; + // send request + const req = send(options); + let reqTimeout; - return new Body.Promise(function (resolve, reject) { - let resTimeout; + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } - // allow timeout on slow response body - if (_this4.timeout) { - resTimeout = setTimeout(function () { - abort = true; - reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); - }, _this4.timeout); + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); } - // handle stream errors - body.on('error', function (err) { - if (err.name === 'AbortError') { - // if the request was aborted, reject with this Error - abort = true; - reject(err); - } else { - // other errors, such as incorrect content-encoding - reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); - } + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + finalize(); }); - body.on('data', function (chunk) { - if (abort || chunk === null) { - return; - } - - if (_this4.size && accumBytes + chunk.length > _this4.size) { - abort = true; - reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); - return; - } + req.on('response', function (res) { + clearTimeout(reqTimeout); - accumBytes += chunk.length; - accum.push(chunk); - }); + const headers = createHeadersLenient(res.headers); - body.on('end', function () { - if (abort) { - return; - } + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); - clearTimeout(resTimeout); + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } - try { - resolve(Buffer.concat(accum, accumBytes)); - } catch (err) { - // handle streams that have accumulated too much data (issue #414) - reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - }); -} + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } -/** - * Detect buffer encoding and convert to target encoding - * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding - * - * @param Buffer buffer Incoming buffer - * @param String encoding Target encoding - * @return String - */ -function convertBody(buffer, headers) { - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function'); - } + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } - const ct = headers.get('content-type'); - let charset = 'utf-8'; - let res, str; + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct); - } + if (!isDomainOrSubdomain(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } - // no charset in content type, peek at response body for at most 1024 bytes - str = buffer.slice(0, 1024).toString(); + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } - // html5 - if (!res && str) { - res = / { +"use strict"; - if (body === null) { - // body is null - return 0; - } else if (isBlob(body)) { - return body.size; - } else if (Buffer.isBuffer(body)) { - // body is buffer - return body.length; - } else if (body && typeof body.getLengthSync === 'function') { - // detect form data input from form-data module - if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x - body.hasKnownLength && body.hasKnownLength()) { - // 2.x - return body.getLengthSync(); - } - return null; - } else { - // body is stream - return null; - } -} +const path = __webpack_require__(5622); +const pathKey = __webpack_require__(539); -/** - * Write a Body to a Node.js WritableStream (e.g. http.Request) object. - * - * @param Body instance Instance of Body - * @return Void - */ -function writeToStream(dest, instance) { - const body = instance.body; +module.exports = opts => { + opts = Object.assign({ + cwd: process.cwd(), + path: process.env[pathKey()] + }, opts); + let prev; + let pth = path.resolve(opts.cwd); + const ret = []; - if (body === null) { - // body is null - dest.end(); - } else if (isBlob(body)) { - body.stream().pipe(dest); - } else if (Buffer.isBuffer(body)) { - // body is buffer - dest.write(body); - dest.end(); - } else { - // body is stream - body.pipe(dest); + while (prev !== pth) { + ret.push(path.join(pth, 'node_modules/.bin')); + prev = pth; + pth = path.resolve(pth, '..'); } -} -// expose Promise -Body.Promise = global.Promise; + // ensure the running `node` binary is used + ret.push(path.dirname(process.execPath)); -/** - * headers.js - * - * Headers class offers convenient helpers - */ + return ret.concat(opts.path).join(path.delimiter); +}; -const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/; -const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/; +module.exports.env = opts => { + opts = Object.assign({ + env: process.env + }, opts); -function validateName(name) { - name = `${name}`; - if (invalidTokenRegex.test(name) || name === '') { - throw new TypeError(`${name} is not a legal HTTP header name`); - } -} + const env = Object.assign({}, opts.env); + const path = pathKey({env}); -function validateValue(value) { - value = `${value}`; - if (invalidHeaderCharRegex.test(value)) { - throw new TypeError(`${value} is not a legal HTTP header value`); - } -} + opts.path = env[path]; + env[path] = module.exports(opts); -/** - * Find the key in the map object given a header name. - * - * Returns undefined if not found. - * - * @param String name Header name - * @return String|Undefined - */ -function find(map, name) { - name = name.toLowerCase(); - for (const key in map) { - if (key.toLowerCase() === name) { - return key; - } - } - return undefined; -} + return env; +}; -const MAP = Symbol('map'); -class Headers { - /** - * Headers class - * - * @param Object headers Response headers - * @return Void - */ - constructor() { - let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined; - this[MAP] = Object.create(null); +/***/ }), + +/***/ 1223: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +var wrappy = __webpack_require__(2940) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) - if (init instanceof Headers) { - const rawHeaders = init.raw(); - const headerNames = Object.keys(rawHeaders); +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value); - } - } +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} - return; - } - // We don't worry about converting prop to ByteString here as append() - // will handle it. - if (init == null) ; else if (typeof init === 'object') { - const method = init[Symbol.iterator]; - if (method != null) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable'); - } +/***/ }), - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = []; - for (const pair of init) { - if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable'); - } - pairs.push(Array.from(pair)); - } +/***/ 4824: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { - for (const pair of pairs) { - if (pair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple'); - } - this.append(pair[0], pair[1]); - } - } else { - // record - for (const key of Object.keys(init)) { - const value = init[key]; - this.append(key, value); - } - } - } else { - throw new TypeError('Provided initializer must be an object'); - } - } +"use strict"; - /** - * Return combined header value given name - * - * @param String name Header name - * @return Mixed - */ - get(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key === undefined) { - return null; - } +const os = __webpack_require__(2087); +const macosRelease = __webpack_require__(7493); +const winRelease = __webpack_require__(3515); - return this[MAP][key].join(', '); +const osName = (platform, release) => { + if (!platform && release) { + throw new Error('You can\'t specify a `release` without specifying `platform`'); } - /** - * Iterate over all headers - * - * @param Function callback Executed for each item with parameters (value, name, thisArg) - * @param Boolean thisArg `this` context for callback function - * @return Void - */ - forEach(callback) { - let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + platform = platform || os.platform(); - let pairs = getHeaders(this); - let i = 0; - while (i < pairs.length) { - var _pairs$i = pairs[i]; - const name = _pairs$i[0], - value = _pairs$i[1]; + let id; - callback.call(thisArg, value, name, this); - pairs = getHeaders(this); - i++; + if (platform === 'darwin') { + if (!release && os.platform() === 'darwin') { + release = os.release(); } - } - /** - * Overwrite header values given name - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - set(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - this[MAP][key !== undefined ? key : name] = [value]; + const prefix = release ? (Number(release.split('.')[0]) > 15 ? 'macOS' : 'OS X') : 'macOS'; + id = release ? macosRelease(release).name : ''; + return prefix + (id ? ' ' + id : ''); } - /** - * Append a value onto existing header - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - append(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - if (key !== undefined) { - this[MAP][key].push(value); - } else { - this[MAP][name] = [value]; + if (platform === 'linux') { + if (!release && os.platform() === 'linux') { + release = os.release(); } - } - /** - * Check for header name existence - * - * @param String name Header name - * @return Boolean - */ - has(name) { - name = `${name}`; - validateName(name); - return find(this[MAP], name) !== undefined; + id = release ? release.replace(/^(\d+\.\d+).*/, '$1') : ''; + return 'Linux' + (id ? ' ' + id : ''); } - /** - * Delete all header values given name - * - * @param String name Header name - * @return Void - */ - delete(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key !== undefined) { - delete this[MAP][key]; + if (platform === 'win32') { + if (!release && os.platform() === 'win32') { + release = os.release(); } - } - /** - * Return raw headers (non-spec api) - * - * @return Object - */ - raw() { - return this[MAP]; + id = release ? winRelease(release) : ''; + return 'Windows' + (id ? ' ' + id : ''); } - /** - * Get an iterator on keys. - * - * @return Iterator - */ - keys() { - return createHeadersIterator(this, 'key'); - } + return platform; +}; - /** - * Get an iterator on values. - * - * @return Iterator - */ - values() { - return createHeadersIterator(this, 'value'); - } +module.exports = osName; - /** - * Get an iterator on entries. - * - * This is the default iterator of the Headers object. - * - * @return Iterator - */ - [Symbol.iterator]() { - return createHeadersIterator(this, 'key+value'); - } -} -Headers.prototype.entries = Headers.prototype[Symbol.iterator]; -Object.defineProperty(Headers.prototype, Symbol.toStringTag, { - value: 'Headers', - writable: false, - enumerable: false, - configurable: true -}); +/***/ }), -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true } -}); +/***/ 1330: +/***/ ((module) => { -function getHeaders(headers) { - let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; +"use strict"; - const keys = Object.keys(headers[MAP]).sort(); - return keys.map(kind === 'key' ? function (k) { - return k.toLowerCase(); - } : kind === 'value' ? function (k) { - return headers[MAP][k].join(', '); - } : function (k) { - return [k.toLowerCase(), headers[MAP][k].join(', ')]; - }); -} +module.exports = (promise, onFinally) => { + onFinally = onFinally || (() => {}); + + return promise.then( + val => new Promise(resolve => { + resolve(onFinally()); + }).then(() => val), + err => new Promise(resolve => { + resolve(onFinally()); + }).then(() => { + throw err; + }) + ); +}; -const INTERNAL = Symbol('internal'); -function createHeadersIterator(target, kind) { - const iterator = Object.create(HeadersIteratorPrototype); - iterator[INTERNAL] = { - target, - kind, - index: 0 - }; - return iterator; -} +/***/ }), -const HeadersIteratorPrototype = Object.setPrototypeOf({ - next() { - // istanbul ignore if - if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { - throw new TypeError('Value of `this` is not a HeadersIterator'); - } +/***/ 539: +/***/ ((module) => { - var _INTERNAL = this[INTERNAL]; - const target = _INTERNAL.target, - kind = _INTERNAL.kind, - index = _INTERNAL.index; +"use strict"; - const values = getHeaders(target, kind); - const len = values.length; - if (index >= len) { - return { - value: undefined, - done: true - }; - } +module.exports = opts => { + opts = opts || {}; - this[INTERNAL].index = index + 1; + const env = opts.env || process.env; + const platform = opts.platform || process.platform; - return { - value: values[index], - done: false - }; + if (platform !== 'win32') { + return 'PATH'; } -}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); -Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { - value: 'HeadersIterator', - writable: false, - enumerable: false, - configurable: true -}); + return Object.keys(env).find(x => x.toUpperCase() === 'PATH') || 'Path'; +}; -/** - * Export the Headers object in a form that Node.js can consume. - * - * @param Headers headers - * @return Object - */ -function exportNodeCompatibleHeaders(headers) { - const obj = Object.assign({ __proto__: null }, headers[MAP]); - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host'); - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0]; - } +/***/ }), - return obj; +/***/ 8341: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +var once = __webpack_require__(1223) +var eos = __webpack_require__(1205) +var fs = __webpack_require__(5747) // we only need fs to get the ReadStream and WriteStream prototypes + +var noop = function () {} +var ancient = /^v?\.0/.test(process.version) + +var isFn = function (fn) { + return typeof fn === 'function' } -/** - * Create a Headers object from an object of headers, ignoring those that do - * not conform to HTTP grammar productions. - * - * @param Object obj Object of headers - * @return Headers - */ -function createHeadersLenient(obj) { - const headers = new Headers(); - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue; - } - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue; - } - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val]; - } else { - headers[MAP][name].push(val); - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]]; - } - } - return headers; +var isFS = function (stream) { + if (!ancient) return false // newer node version do not need to care about fs is a special way + if (!fs) return false // browser + return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close) } -const INTERNALS$1 = Symbol('Response internals'); +var isRequest = function (stream) { + return stream.setHeader && isFn(stream.abort) +} -// fix an issue where "STATUS_CODES" aren't a named export for node <10 -const STATUS_CODES = http.STATUS_CODES; +var destroyer = function (stream, reading, writing, callback) { + callback = once(callback) -/** - * Response class - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -class Response { - constructor() { - let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; - let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + var closed = false + stream.on('close', function () { + closed = true + }) - Body.call(this, body, opts); + eos(stream, {readable: reading, writable: writing}, function (err) { + if (err) return callback(err) + closed = true + callback() + }) - const status = opts.status || 200; - const headers = new Headers(opts.headers); + var destroyed = false + return function (err) { + if (closed) return + if (destroyed) return + destroyed = true - if (body != null && !headers.has('Content-Type')) { - const contentType = extractContentType(body); - if (contentType) { - headers.append('Content-Type', contentType); - } - } + if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks + if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want - this[INTERNALS$1] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter - }; - } + if (isFn(stream.destroy)) return stream.destroy() - get url() { - return this[INTERNALS$1].url || ''; - } + callback(err || new Error('stream was destroyed')) + } +} - get status() { - return this[INTERNALS$1].status; - } +var call = function (fn) { + fn() +} - /** - * Convenience property representing if the request ended normally - */ - get ok() { - return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; - } +var pipe = function (from, to) { + return from.pipe(to) +} - get redirected() { - return this[INTERNALS$1].counter > 0; - } +var pump = function () { + var streams = Array.prototype.slice.call(arguments) + var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop - get statusText() { - return this[INTERNALS$1].statusText; - } + if (Array.isArray(streams[0])) streams = streams[0] + if (streams.length < 2) throw new Error('pump requires two streams per minimum') - get headers() { - return this[INTERNALS$1].headers; - } + var error + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1 + var writing = i > 0 + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err + if (err) destroys.forEach(call) + if (reading) return + destroys.forEach(call) + callback(error) + }) + }) - /** - * Clone this response - * - * @return Response - */ - clone() { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected - }); - } + return streams.reduce(pipe) } -Body.mixIn(Response.prototype); - -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true } -}); - -Object.defineProperty(Response.prototype, Symbol.toStringTag, { - value: 'Response', - writable: false, - enumerable: false, - configurable: true -}); +module.exports = pump -const INTERNALS$2 = Symbol('Request internals'); -// fix an issue where "format", "parse" aren't a named export for node <10 -const parse_url = Url.parse; -const format_url = Url.format; +/***/ }), -const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; +/***/ 5911: +/***/ ((module, exports) => { -/** - * Check if a value is an instance of Request. - * - * @param Mixed input - * @return Boolean - */ -function isRequest(input) { - return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; -} +exports = module.exports = SemVer -function isAbortSignal(signal) { - const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); - return !!(proto && proto.constructor.name === 'AbortSignal'); +var debug +/* istanbul ignore next */ +if (typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG)) { + debug = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.unshift('SEMVER') + console.log.apply(console, args) + } +} else { + debug = function () {} } -/** - * Request class - * - * @param Mixed input Url or Request instance - * @param Object init Custom options - * @return Void - */ -class Request { - constructor(input) { - let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +exports.SEMVER_SPEC_VERSION = '2.0.0' - let parsedURL; +var MAX_LENGTH = 256 +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || + /* istanbul ignore next */ 9007199254740991 - // normalize input - if (!isRequest(input)) { - if (input && input.href) { - // in order to support Node.js' Url objects; though WHATWG's URL objects - // will fall into this branch also (since their `toString()` will return - // `href` property anyway) - parsedURL = parse_url(input.href); - } else { - // coerce input to a string before attempting to parse - parsedURL = parse_url(`${input}`); - } - input = {}; - } else { - parsedURL = parse_url(input.url); - } +// Max safe segment length for coercion. +var MAX_SAFE_COMPONENT_LENGTH = 16 - let method = init.method || input.method || 'GET'; - method = method.toUpperCase(); +// The actual regexps go on exports.re +var re = exports.re = [] +var src = exports.src = [] +var R = 0 - if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { - throw new TypeError('Request with GET/HEAD method cannot have body'); - } +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. - let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. - Body.call(this, inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0 - }); +var NUMERICIDENTIFIER = R++ +src[NUMERICIDENTIFIER] = '0|[1-9]\\d*' +var NUMERICIDENTIFIERLOOSE = R++ +src[NUMERICIDENTIFIERLOOSE] = '[0-9]+' - const headers = new Headers(init.headers || input.headers || {}); +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. - if (inputBody != null && !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody); - if (contentType) { - headers.append('Content-Type', contentType); - } - } +var NONNUMERICIDENTIFIER = R++ +src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' - let signal = isRequest(input) ? input.signal : null; - if ('signal' in init) signal = init.signal; +// ## Main Version +// Three dot-separated numeric identifiers. - if (signal != null && !isAbortSignal(signal)) { - throw new TypeError('Expected signal to be an instanceof AbortSignal'); - } +var MAINVERSION = R++ +src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')' - this[INTERNALS$2] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal - }; +var MAINVERSIONLOOSE = R++ +src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')' - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; - this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; - this.counter = init.counter || input.counter || 0; - this.agent = init.agent || input.agent; - } +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. - get method() { - return this[INTERNALS$2].method; - } +var PRERELEASEIDENTIFIER = R++ +src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + + '|' + src[NONNUMERICIDENTIFIER] + ')' - get url() { - return format_url(this[INTERNALS$2].parsedURL); - } +var PRERELEASEIDENTIFIERLOOSE = R++ +src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + + '|' + src[NONNUMERICIDENTIFIER] + ')' - get headers() { - return this[INTERNALS$2].headers; - } +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. - get redirect() { - return this[INTERNALS$2].redirect; - } +var PRERELEASE = R++ +src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + + '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))' - get signal() { - return this[INTERNALS$2].signal; - } +var PRERELEASELOOSE = R++ +src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))' - /** - * Clone this request - * - * @return Request - */ - clone() { - return new Request(this); - } -} +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. -Body.mixIn(Request.prototype); +var BUILDIDENTIFIER = R++ +src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+' -Object.defineProperty(Request.prototype, Symbol.toStringTag, { - value: 'Request', - writable: false, - enumerable: false, - configurable: true -}); +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true } -}); +var BUILD = R++ +src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + + '(?:\\.' + src[BUILDIDENTIFIER] + ')*))' -/** - * Convert a Request to Node.js http request options. - * - * @param Request A Request instance - * @return Object The options object to be passed to http.request - */ -function getNodeRequestOptions(request) { - const parsedURL = request[INTERNALS$2].parsedURL; - const headers = new Headers(request[INTERNALS$2].headers); +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*'); - } +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. - // Basic fetch - if (!parsedURL.protocol || !parsedURL.hostname) { - throw new TypeError('Only absolute URLs are supported'); - } +var FULL = R++ +var FULLPLAIN = 'v?' + src[MAINVERSION] + + src[PRERELEASE] + '?' + + src[BUILD] + '?' - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported'); - } +src[FULL] = '^' + FULLPLAIN + '$' - if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { - throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); - } +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + + src[PRERELEASELOOSE] + '?' + + src[BUILD] + '?' + +var LOOSE = R++ +src[LOOSE] = '^' + LOOSEPLAIN + '$' + +var GTLT = R++ +src[GTLT] = '((?:<|>)?=?)' + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +var XRANGEIDENTIFIERLOOSE = R++ +src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' +var XRANGEIDENTIFIER = R++ +src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*' + +var XRANGEPLAIN = R++ +src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:' + src[PRERELEASE] + ')?' + + src[BUILD] + '?' + + ')?)?' - // HTTP-network-or-cache fetch steps 2.4-2.7 - let contentLengthValue = null; - if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { - contentLengthValue = '0'; - } - if (request.body != null) { - const totalBytes = getTotalBytes(request); - if (typeof totalBytes === 'number') { - contentLengthValue = String(totalBytes); - } - } - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue); - } +var XRANGEPLAINLOOSE = R++ +src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[PRERELEASELOOSE] + ')?' + + src[BUILD] + '?' + + ')?)?' - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); - } +var XRANGE = R++ +src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$' +var XRANGELOOSE = R++ +src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$' - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate'); - } +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +var COERCE = R++ +src[COERCE] = '(?:^|[^\\d])' + + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:$|[^\\d])' - let agent = request.agent; - if (typeof agent === 'function') { - agent = agent(parsedURL); - } +// Tilde ranges. +// Meaning is "reasonably at or greater than" +var LONETILDE = R++ +src[LONETILDE] = '(?:~>?)' - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close'); - } +var TILDETRIM = R++ +src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+' +re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g') +var tildeTrimReplace = '$1~' - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js +var TILDE = R++ +src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$' +var TILDELOOSE = R++ +src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$' - return Object.assign({}, parsedURL, { - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent - }); -} +// Caret ranges. +// Meaning is "at least and backwards compatible with" +var LONECARET = R++ +src[LONECARET] = '(?:\\^)' -/** - * abort-error.js - * - * AbortError interface for cancelled requests - */ +var CARETTRIM = R++ +src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+' +re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g') +var caretTrimReplace = '$1^' -/** - * Create AbortError instance - * - * @param String message Error message for human - * @return AbortError - */ -function AbortError(message) { - Error.call(this, message); +var CARET = R++ +src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$' +var CARETLOOSE = R++ +src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$' - this.type = 'aborted'; - this.message = message; +// A simple gt/lt/eq thing, or just "" to indicate "any version" +var COMPARATORLOOSE = R++ +src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$' +var COMPARATOR = R++ +src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$' - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +var COMPARATORTRIM = R++ +src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + + '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')' -AbortError.prototype = Object.create(Error.prototype); -AbortError.prototype.constructor = AbortError; -AbortError.prototype.name = 'AbortError'; +// this one has to use the /g flag +re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g') +var comparatorTrimReplace = '$1$2$3' -// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 -const PassThrough$1 = Stream.PassThrough; -const resolve_url = Url.resolve; +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +var HYPHENRANGE = R++ +src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAIN] + ')' + + '\\s*$' -/** - * Fetch function - * - * @param Mixed url Absolute url or Request instance - * @param Object opts Fetch options - * @return Promise - */ -function fetch(url, opts) { +var HYPHENRANGELOOSE = R++ +src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s*$' - // allow custom promise - if (!fetch.Promise) { - throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); - } +// Star ranges basically just allow anything at all. +var STAR = R++ +src[STAR] = '(<|>)?=?\\s*\\*' - Body.Promise = fetch.Promise; +// Compile to actual regexp objects. +// All are flag-free, unless they were created above with a flag. +for (var i = 0; i < R; i++) { + debug(i, src[i]) + if (!re[i]) { + re[i] = new RegExp(src[i]) + } +} - // wrap http.request into fetch - return new fetch.Promise(function (resolve, reject) { - // build request object - const request = new Request(url, opts); - const options = getNodeRequestOptions(request); +exports.parse = parse +function parse (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } - const send = (options.protocol === 'https:' ? https : http).request; - const signal = request.signal; + if (version instanceof SemVer) { + return version + } - let response = null; + if (typeof version !== 'string') { + return null + } - const abort = function abort() { - let error = new AbortError('The user aborted a request.'); - reject(error); - if (request.body && request.body instanceof Stream.Readable) { - request.body.destroy(error); - } - if (!response || !response.body) return; - response.body.emit('error', error); - }; + if (version.length > MAX_LENGTH) { + return null + } - if (signal && signal.aborted) { - abort(); - return; - } + var r = options.loose ? re[LOOSE] : re[FULL] + if (!r.test(version)) { + return null + } - const abortAndFinalize = function abortAndFinalize() { - abort(); - finalize(); - }; + try { + return new SemVer(version, options) + } catch (er) { + return null + } +} - // send request - const req = send(options); - let reqTimeout; +exports.valid = valid +function valid (version, options) { + var v = parse(version, options) + return v ? v.version : null +} - if (signal) { - signal.addEventListener('abort', abortAndFinalize); - } +exports.clean = clean +function clean (version, options) { + var s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} - function finalize() { - req.abort(); - if (signal) signal.removeEventListener('abort', abortAndFinalize); - clearTimeout(reqTimeout); - } +exports.SemVer = SemVer - if (request.timeout) { - req.once('socket', function (socket) { - reqTimeout = setTimeout(function () { - reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); - finalize(); - }, request.timeout); - }); - } +function SemVer (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + if (version instanceof SemVer) { + if (version.loose === options.loose) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') + } - req.on('error', function (err) { - reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); - finalize(); - }); + if (!(this instanceof SemVer)) { + return new SemVer(version, options) + } - req.on('response', function (res) { - clearTimeout(reqTimeout); + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose - const headers = createHeadersLenient(res.headers); + var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL]) - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location'); + if (!m) { + throw new TypeError('Invalid Version: ' + version) + } - // HTTP fetch step 5.3 - const locationURL = location === null ? null : resolve_url(request.url, location); + this.raw = version - // HTTP fetch step 5.5 - switch (request.redirect) { - case 'error': - reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); - finalize(); - return; - case 'manual': - // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL); - } catch (err) { - // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request - reject(err); - } - } - break; - case 'follow': - // HTTP-redirect fetch step 2 - if (locationURL === null) { - break; - } + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); - finalize(); - return; - } + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout, - size: request.size - }; + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { - reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); - finalize(); - return; - } + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { - requestOpts.method = 'GET'; - requestOpts.body = undefined; - requestOpts.headers.delete('content-length'); - } + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map(function (id) { + if (/^[0-9]+$/.test(id)) { + var num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))); - finalize(); - return; - } - } + this.build = m[5] ? m[5].split('.') : [] + this.format() +} - // prepare response - res.once('end', function () { - if (signal) signal.removeEventListener('abort', abortAndFinalize); - }); - let body = res.pipe(new PassThrough$1()); +SemVer.prototype.format = function () { + this.version = this.major + '.' + this.minor + '.' + this.patch + if (this.prerelease.length) { + this.version += '-' + this.prerelease.join('.') + } + return this.version +} - const response_options = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter - }; +SemVer.prototype.toString = function () { + return this.version +} - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding'); +SemVer.prototype.compare = function (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } - // HTTP-network fetch step 12.1.1.4: handle content codings + return this.compareMain(other) || this.comparePre(other) +} - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { - response = new Response(body, response_options); - resolve(response); - return; - } +SemVer.prototype.compareMain = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } - // For Node v6+ - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.Z_SYNC_FLUSH, - finishFlush: zlib.Z_SYNC_FLUSH - }; + return compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) +} - // for gzip - if (codings == 'gzip' || codings == 'x-gzip') { - body = body.pipe(zlib.createGunzip(zlibOptions)); - response = new Response(body, response_options); - resolve(response); - return; - } +SemVer.prototype.comparePre = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } - // for deflate - if (codings == 'deflate' || codings == 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new PassThrough$1()); - raw.once('data', function (chunk) { - // see http://stackoverflow.com/questions/37519828 - if ((chunk[0] & 0x0F) === 0x08) { - body = body.pipe(zlib.createInflate()); - } else { - body = body.pipe(zlib.createInflateRaw()); - } - response = new Response(body, response_options); - resolve(response); - }); - return; - } + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } - // for br - if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { - body = body.pipe(zlib.createBrotliDecompress()); - response = new Response(body, response_options); - resolve(response); - return; - } + var i = 0 + do { + var a = this.prerelease[i] + var b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) +} + +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. +SemVer.prototype.inc = function (release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier) + this.inc('pre', identifier) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier) + } + this.inc('pre', identifier) + break - // otherwise, use response as-is - response = new Response(body, response_options); - resolve(response); - }); + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if (this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) { + this.prerelease = [0] + } else { + var i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + this.prerelease.push(0) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0] + } + } else { + this.prerelease = [identifier, 0] + } + } + break - writeToStream(req, request); - }); + default: + throw new Error('invalid increment argument: ' + release) + } + this.format() + this.raw = this.version + return this } -/** - * Redirect code matching - * - * @param Number code Status code - * @return Boolean - */ -fetch.isRedirect = function (code) { - return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; -}; - -// expose Promise -fetch.Promise = global.Promise; - -module.exports = exports = fetch; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.default = exports; -exports.Headers = Headers; -exports.Request = Request; -exports.Response = Response; -exports.FetchError = FetchError; +exports.inc = inc +function inc (version, release, loose, identifier) { + if (typeof (loose) === 'string') { + identifier = loose + loose = undefined + } -/***/ }), + try { + return new SemVer(version, loose).inc(release, identifier).version + } catch (er) { + return null + } +} -/***/ 502: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { +exports.diff = diff +function diff (version1, version2) { + if (eq(version1, version2)) { + return null + } else { + var v1 = parse(version1) + var v2 = parse(version2) + var prefix = '' + if (v1.prerelease.length || v2.prerelease.length) { + prefix = 'pre' + var defaultResult = 'prerelease' + } + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return prefix + key + } + } + } + return defaultResult // may be undefined + } +} -"use strict"; +exports.compareIdentifiers = compareIdentifiers -const path = __webpack_require__(5622); -const pathKey = __webpack_require__(539); +var numeric = /^[0-9]+$/ +function compareIdentifiers (a, b) { + var anum = numeric.test(a) + var bnum = numeric.test(b) -module.exports = opts => { - opts = Object.assign({ - cwd: process.cwd(), - path: process.env[pathKey()] - }, opts); + if (anum && bnum) { + a = +a + b = +b + } - let prev; - let pth = path.resolve(opts.cwd); - const ret = []; + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} - while (prev !== pth) { - ret.push(path.join(pth, 'node_modules/.bin')); - prev = pth; - pth = path.resolve(pth, '..'); - } +exports.rcompareIdentifiers = rcompareIdentifiers +function rcompareIdentifiers (a, b) { + return compareIdentifiers(b, a) +} - // ensure the running `node` binary is used - ret.push(path.dirname(process.execPath)); +exports.major = major +function major (a, loose) { + return new SemVer(a, loose).major +} - return ret.concat(opts.path).join(path.delimiter); -}; +exports.minor = minor +function minor (a, loose) { + return new SemVer(a, loose).minor +} -module.exports.env = opts => { - opts = Object.assign({ - env: process.env - }, opts); +exports.patch = patch +function patch (a, loose) { + return new SemVer(a, loose).patch +} - const env = Object.assign({}, opts.env); - const path = pathKey({env}); +exports.compare = compare +function compare (a, b, loose) { + return new SemVer(a, loose).compare(new SemVer(b, loose)) +} - opts.path = env[path]; - env[path] = module.exports(opts); +exports.compareLoose = compareLoose +function compareLoose (a, b) { + return compare(a, b, true) +} - return env; -}; +exports.rcompare = rcompare +function rcompare (a, b, loose) { + return compare(b, a, loose) +} +exports.sort = sort +function sort (list, loose) { + return list.sort(function (a, b) { + return exports.compare(a, b, loose) + }) +} -/***/ }), +exports.rsort = rsort +function rsort (list, loose) { + return list.sort(function (a, b) { + return exports.rcompare(a, b, loose) + }) +} -/***/ 1223: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { +exports.gt = gt +function gt (a, b, loose) { + return compare(a, b, loose) > 0 +} -var wrappy = __webpack_require__(2940) -module.exports = wrappy(once) -module.exports.strict = wrappy(onceStrict) +exports.lt = lt +function lt (a, b, loose) { + return compare(a, b, loose) < 0 +} -once.proto = once(function () { - Object.defineProperty(Function.prototype, 'once', { - value: function () { - return once(this) - }, - configurable: true - }) +exports.eq = eq +function eq (a, b, loose) { + return compare(a, b, loose) === 0 +} - Object.defineProperty(Function.prototype, 'onceStrict', { - value: function () { - return onceStrict(this) - }, - configurable: true - }) -}) +exports.neq = neq +function neq (a, b, loose) { + return compare(a, b, loose) !== 0 +} -function once (fn) { - var f = function () { - if (f.called) return f.value - f.called = true - return f.value = fn.apply(this, arguments) - } - f.called = false - return f +exports.gte = gte +function gte (a, b, loose) { + return compare(a, b, loose) >= 0 } -function onceStrict (fn) { - var f = function () { - if (f.called) - throw new Error(f.onceError) - f.called = true - return f.value = fn.apply(this, arguments) - } - var name = fn.name || 'Function wrapped with `once`' - f.onceError = name + " shouldn't be called more than once" - f.called = false - return f +exports.lte = lte +function lte (a, b, loose) { + return compare(a, b, loose) <= 0 } +exports.cmp = cmp +function cmp (a, op, b, loose) { + switch (op) { + case '===': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a === b -/***/ }), + case '!==': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a !== b -/***/ 4824: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + case '': + case '=': + case '==': + return eq(a, b, loose) -"use strict"; + case '!=': + return neq(a, b, loose) -const os = __webpack_require__(2087); -const macosRelease = __webpack_require__(7493); -const winRelease = __webpack_require__(3515); + case '>': + return gt(a, b, loose) -const osName = (platform, release) => { - if (!platform && release) { - throw new Error('You can\'t specify a `release` without specifying `platform`'); - } + case '>=': + return gte(a, b, loose) - platform = platform || os.platform(); + case '<': + return lt(a, b, loose) - let id; + case '<=': + return lte(a, b, loose) - if (platform === 'darwin') { - if (!release && os.platform() === 'darwin') { - release = os.release(); - } + default: + throw new TypeError('Invalid operator: ' + op) + } +} - const prefix = release ? (Number(release.split('.')[0]) > 15 ? 'macOS' : 'OS X') : 'macOS'; - id = release ? macosRelease(release).name : ''; - return prefix + (id ? ' ' + id : ''); - } +exports.Comparator = Comparator +function Comparator (comp, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } - if (platform === 'linux') { - if (!release && os.platform() === 'linux') { - release = os.release(); - } + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } - id = release ? release.replace(/^(\d+\.\d+).*/, '$1') : ''; - return 'Linux' + (id ? ' ' + id : ''); - } + if (!(this instanceof Comparator)) { + return new Comparator(comp, options) + } - if (platform === 'win32') { - if (!release && os.platform() === 'win32') { - release = os.release(); - } + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) - id = release ? winRelease(release) : ''; - return 'Windows' + (id ? ' ' + id : ''); - } + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } - return platform; -}; + debug('comp', this) +} -module.exports = osName; +var ANY = {} +Comparator.prototype.parse = function (comp) { + var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR] + var m = comp.match(r) + if (!m) { + throw new TypeError('Invalid comparator: ' + comp) + } -/***/ }), + this.operator = m[1] + if (this.operator === '=') { + this.operator = '' + } -/***/ 1330: -/***/ ((module) => { + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } +} -"use strict"; +Comparator.prototype.toString = function () { + return this.value +} -module.exports = (promise, onFinally) => { - onFinally = onFinally || (() => {}); +Comparator.prototype.test = function (version) { + debug('Comparator.test', version, this.options.loose) - return promise.then( - val => new Promise(resolve => { - resolve(onFinally()); - }).then(() => val), - err => new Promise(resolve => { - resolve(onFinally()); - }).then(() => { - throw err; - }) - ); -}; + if (this.semver === ANY) { + return true + } + if (typeof version === 'string') { + version = new SemVer(version, this.options) + } -/***/ }), + return cmp(version, this.operator, this.semver, this.options) +} -/***/ 539: -/***/ ((module) => { +Comparator.prototype.intersects = function (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } -"use strict"; + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } -module.exports = opts => { - opts = opts || {}; + var rangeTmp - const env = opts.env || process.env; - const platform = opts.platform || process.platform; + if (this.operator === '') { + rangeTmp = new Range(comp.value, options) + return satisfies(this.value, rangeTmp, options) + } else if (comp.operator === '') { + rangeTmp = new Range(this.value, options) + return satisfies(comp.semver, rangeTmp, options) + } - if (platform !== 'win32') { - return 'PATH'; - } + var sameDirectionIncreasing = + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '>=' || comp.operator === '>') + var sameDirectionDecreasing = + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '<=' || comp.operator === '<') + var sameSemVer = this.semver.version === comp.semver.version + var differentDirectionsInclusive = + (this.operator === '>=' || this.operator === '<=') && + (comp.operator === '>=' || comp.operator === '<=') + var oppositeDirectionsLessThan = + cmp(this.semver, '<', comp.semver, options) && + ((this.operator === '>=' || this.operator === '>') && + (comp.operator === '<=' || comp.operator === '<')) + var oppositeDirectionsGreaterThan = + cmp(this.semver, '>', comp.semver, options) && + ((this.operator === '<=' || this.operator === '<') && + (comp.operator === '>=' || comp.operator === '>')) - return Object.keys(env).find(x => x.toUpperCase() === 'PATH') || 'Path'; -}; + return sameDirectionIncreasing || sameDirectionDecreasing || + (sameSemVer && differentDirectionsInclusive) || + oppositeDirectionsLessThan || oppositeDirectionsGreaterThan +} + +exports.Range = Range +function Range (range, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (range instanceof Range) { + if (range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease) { + return range + } else { + return new Range(range.raw, options) + } + } + if (range instanceof Comparator) { + return new Range(range.value, options) + } -/***/ }), + if (!(this instanceof Range)) { + return new Range(range, options) + } -/***/ 8341: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease -var once = __webpack_require__(1223) -var eos = __webpack_require__(1205) -var fs = __webpack_require__(5747) // we only need fs to get the ReadStream and WriteStream prototypes + // First, split based on boolean or || + this.raw = range + this.set = range.split(/\s*\|\|\s*/).map(function (range) { + return this.parseRange(range.trim()) + }, this).filter(function (c) { + // throw out any that are not relevant for whatever reason + return c.length + }) -var noop = function () {} -var ancient = /^v?\.0/.test(process.version) + if (!this.set.length) { + throw new TypeError('Invalid SemVer Range: ' + range) + } -var isFn = function (fn) { - return typeof fn === 'function' + this.format() } -var isFS = function (stream) { - if (!ancient) return false // newer node version do not need to care about fs is a special way - if (!fs) return false // browser - return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close) +Range.prototype.format = function () { + this.range = this.set.map(function (comps) { + return comps.join(' ').trim() + }).join('||').trim() + return this.range } -var isRequest = function (stream) { - return stream.setHeader && isFn(stream.abort) +Range.prototype.toString = function () { + return this.range } -var destroyer = function (stream, reading, writing, callback) { - callback = once(callback) - - var closed = false - stream.on('close', function () { - closed = true - }) +Range.prototype.parseRange = function (range) { + var loose = this.options.loose + range = range.trim() + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE] + range = range.replace(hr, hyphenReplace) + debug('hyphen replace', range) + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, re[COMPARATORTRIM]) - eos(stream, {readable: reading, writable: writing}, function (err) { - if (err) return callback(err) - closed = true - callback() - }) + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[TILDETRIM], tildeTrimReplace) - var destroyed = false - return function (err) { - if (closed) return - if (destroyed) return - destroyed = true + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[CARETTRIM], caretTrimReplace) - if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks - if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want + // normalize spaces + range = range.split(/\s+/).join(' ') - if (isFn(stream.destroy)) return stream.destroy() + // At this point, the range is completely trimmed and + // ready to be split into comparators. - callback(err || new Error('stream was destroyed')) + var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR] + var set = range.split(' ').map(function (comp) { + return parseComparator(comp, this.options) + }, this).join(' ').split(/\s+/) + if (this.options.loose) { + // in loose mode, throw out any that are not valid comparators + set = set.filter(function (comp) { + return !!comp.match(compRe) + }) } -} - -var call = function (fn) { - fn() -} + set = set.map(function (comp) { + return new Comparator(comp, this.options) + }, this) -var pipe = function (from, to) { - return from.pipe(to) + return set } -var pump = function () { - var streams = Array.prototype.slice.call(arguments) - var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop - - if (Array.isArray(streams[0])) streams = streams[0] - if (streams.length < 2) throw new Error('pump requires two streams per minimum') +Range.prototype.intersects = function (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } - var error - var destroys = streams.map(function (stream, i) { - var reading = i < streams.length - 1 - var writing = i > 0 - return destroyer(stream, reading, writing, function (err) { - if (!error) error = err - if (err) destroys.forEach(call) - if (reading) return - destroys.forEach(call) - callback(error) + return this.set.some(function (thisComparators) { + return thisComparators.every(function (thisComparator) { + return range.set.some(function (rangeComparators) { + return rangeComparators.every(function (rangeComparator) { + return thisComparator.intersects(rangeComparator, options) + }) + }) }) }) +} - return streams.reduce(pipe) +// Mostly just for testing and legacy API reasons +exports.toComparators = toComparators +function toComparators (range, options) { + return new Range(range, options).set.map(function (comp) { + return comp.map(function (c) { + return c.value + }).join(' ').trim().split(' ') + }) } -module.exports = pump +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +function parseComparator (comp, options) { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} +function isX (id) { + return !id || id.toLowerCase() === 'x' || id === '*' +} -/***/ }), +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 +function replaceTildes (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceTilde(comp, options) + }).join(' ') +} -/***/ 5911: -/***/ ((module, exports) => { +function replaceTilde (comp, options) { + var r = options.loose ? re[TILDELOOSE] : re[TILDE] + return comp.replace(r, function (_, M, m, p, pr) { + debug('tilde', comp, _, M, m, p, pr) + var ret -exports = module.exports = SemVer + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0 + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else if (pr) { + debug('replaceTilde pr', pr) + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } else { + // ~1.2.3 == >=1.2.3 <1.3.0 + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } -var debug -/* istanbul ignore next */ -if (typeof process === 'object' && - process.env && - process.env.NODE_DEBUG && - /\bsemver\b/i.test(process.env.NODE_DEBUG)) { - debug = function () { - var args = Array.prototype.slice.call(arguments, 0) - args.unshift('SEMVER') - console.log.apply(console, args) - } -} else { - debug = function () {} + debug('tilde return', ret) + return ret + }) } -// Note: this is the semver.org version of the spec that it implements -// Not necessarily the package version of this code. -exports.SEMVER_SPEC_VERSION = '2.0.0' - -var MAX_LENGTH = 256 -var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || - /* istanbul ignore next */ 9007199254740991 - -// Max safe segment length for coercion. -var MAX_SAFE_COMPONENT_LENGTH = 16 +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 +// ^1.2.3 --> >=1.2.3 <2.0.0 +// ^1.2.0 --> >=1.2.0 <2.0.0 +function replaceCarets (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceCaret(comp, options) + }).join(' ') +} -// The actual regexps go on exports.re -var re = exports.re = [] -var src = exports.src = [] -var R = 0 +function replaceCaret (comp, options) { + debug('caret', comp, options) + var r = options.loose ? re[CARETLOOSE] : re[CARET] + return comp.replace(r, function (_, M, m, p, pr) { + debug('caret', comp, _, M, m, p, pr) + var ret -// The following Regular Expressions can be used for tokenizing, -// validating, and parsing SemVer version strings. + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + if (M === '0') { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else { + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + (+M + 1) + '.0.0' + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + (+M + 1) + '.0.0' + } + } -// ## Numeric Identifier -// A single `0`, or a non-zero digit followed by zero or more digits. + debug('caret return', ret) + return ret + }) +} -var NUMERICIDENTIFIER = R++ -src[NUMERICIDENTIFIER] = '0|[1-9]\\d*' -var NUMERICIDENTIFIERLOOSE = R++ -src[NUMERICIDENTIFIERLOOSE] = '[0-9]+' +function replaceXRanges (comp, options) { + debug('replaceXRanges', comp, options) + return comp.split(/\s+/).map(function (comp) { + return replaceXRange(comp, options) + }).join(' ') +} -// ## Non-numeric Identifier -// Zero or more digits, followed by a letter or hyphen, and then zero or -// more letters, digits, or hyphens. +function replaceXRange (comp, options) { + comp = comp.trim() + var r = options.loose ? re[XRANGELOOSE] : re[XRANGE] + return comp.replace(r, function (ret, gtlt, M, m, p, pr) { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + var xM = isX(M) + var xm = xM || isX(m) + var xp = xm || isX(p) + var anyX = xp -var NONNUMERICIDENTIFIER = R++ -src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' + if (gtlt === '=' && anyX) { + gtlt = '' + } -// ## Main Version -// Three dot-separated numeric identifiers. + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 -var MAINVERSION = R++ -src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')' + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + // >1.2.3 => >= 1.2.4 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } -var MAINVERSIONLOOSE = R++ -src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')' + ret = gtlt + M + '.' + m + '.' + p + } else if (xm) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (xp) { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } -// ## Pre-release Version Identifier -// A numeric identifier, or a non-numeric identifier. + debug('xRange return', ret) -var PRERELEASEIDENTIFIER = R++ -src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + - '|' + src[NONNUMERICIDENTIFIER] + ')' + return ret + }) +} -var PRERELEASEIDENTIFIERLOOSE = R++ -src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + - '|' + src[NONNUMERICIDENTIFIER] + ')' +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +function replaceStars (comp, options) { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(re[STAR], '') +} -// ## Pre-release Version -// Hyphen, followed by one or more dot-separated pre-release version -// identifiers. +// This function is passed to string.replace(re[HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0 +function hyphenReplace ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = '>=' + fM + '.0.0' + } else if (isX(fp)) { + from = '>=' + fM + '.' + fm + '.0' + } else { + from = '>=' + from + } -var PRERELEASE = R++ -src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + - '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))' + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = '<' + (+tM + 1) + '.0.0' + } else if (isX(tp)) { + to = '<' + tM + '.' + (+tm + 1) + '.0' + } else if (tpr) { + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr + } else { + to = '<=' + to + } -var PRERELEASELOOSE = R++ -src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + - '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))' + return (from + ' ' + to).trim() +} -// ## Build Metadata Identifier -// Any combination of digits, letters, or hyphens. +// if ANY of the sets match ALL of its comparators, then pass +Range.prototype.test = function (version) { + if (!version) { + return false + } -var BUILDIDENTIFIER = R++ -src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+' + if (typeof version === 'string') { + version = new SemVer(version, this.options) + } -// ## Build Metadata -// Plus sign, followed by one or more period-separated build metadata -// identifiers. + for (var i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false +} -var BUILD = R++ -src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + - '(?:\\.' + src[BUILDIDENTIFIER] + ')*))' +function testSet (set, version, options) { + for (var i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } -// ## Full Version String -// A main version, followed optionally by a pre-release version and -// build metadata. + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === ANY) { + continue + } -// Note that the only major, minor, patch, and pre-release sections of -// the version string are capturing groups. The build metadata is not a -// capturing group, because it should not ever be used in version -// comparison. + if (set[i].semver.prerelease.length > 0) { + var allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } -var FULL = R++ -var FULLPLAIN = 'v?' + src[MAINVERSION] + - src[PRERELEASE] + '?' + - src[BUILD] + '?' + // Version has a -pre, but it's not one of the ones we like. + return false + } -src[FULL] = '^' + FULLPLAIN + '$' + return true +} -// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. -// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty -// common in the npm registry. -var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + - src[PRERELEASELOOSE] + '?' + - src[BUILD] + '?' +exports.satisfies = satisfies +function satisfies (version, range, options) { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} -var LOOSE = R++ -src[LOOSE] = '^' + LOOSEPLAIN + '$' +exports.maxSatisfying = maxSatisfying +function maxSatisfying (versions, range, options) { + var max = null + var maxSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} -var GTLT = R++ -src[GTLT] = '((?:<|>)?=?)' +exports.minSatisfying = minSatisfying +function minSatisfying (versions, range, options) { + var min = null + var minSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min +} -// Something like "2.*" or "1.2.x". -// Note that "x.x" is a valid xRange identifer, meaning "any version" -// Only the first item is strictly required. -var XRANGEIDENTIFIERLOOSE = R++ -src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' -var XRANGEIDENTIFIER = R++ -src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*' +exports.minVersion = minVersion +function minVersion (range, loose) { + range = new Range(range, loose) -var XRANGEPLAIN = R++ -src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:' + src[PRERELEASE] + ')?' + - src[BUILD] + '?' + - ')?)?' + var minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } -var XRANGEPLAINLOOSE = R++ -src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:' + src[PRERELEASELOOSE] + ')?' + - src[BUILD] + '?' + - ')?)?' + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } -var XRANGE = R++ -src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$' -var XRANGELOOSE = R++ -src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$' + minver = null + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] -// Coercion. -// Extract anything that could conceivably be a part of a valid semver -var COERCE = R++ -src[COERCE] = '(?:^|[^\\d])' + - '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:$|[^\\d])' + comparators.forEach(function (comparator) { + // Clone to avoid manipulating the comparator's semver object. + var compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!minver || gt(minver, compver)) { + minver = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error('Unexpected operation: ' + comparator.operator) + } + }) + } -// Tilde ranges. -// Meaning is "reasonably at or greater than" -var LONETILDE = R++ -src[LONETILDE] = '(?:~>?)' + if (minver && range.test(minver)) { + return minver + } -var TILDETRIM = R++ -src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+' -re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g') -var tildeTrimReplace = '$1~' + return null +} -var TILDE = R++ -src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$' -var TILDELOOSE = R++ -src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$' +exports.validRange = validRange +function validRange (range, options) { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} -// Caret ranges. -// Meaning is "at least and backwards compatible with" -var LONECARET = R++ -src[LONECARET] = '(?:\\^)' +// Determine if version is less than all the versions possible in the range +exports.ltr = ltr +function ltr (version, range, options) { + return outside(version, range, '<', options) +} -var CARETTRIM = R++ -src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+' -re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g') -var caretTrimReplace = '$1^' +// Determine if version is greater than all the versions possible in the range. +exports.gtr = gtr +function gtr (version, range, options) { + return outside(version, range, '>', options) +} -var CARET = R++ -src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$' -var CARETLOOSE = R++ -src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$' +exports.outside = outside +function outside (version, range, hilo, options) { + version = new SemVer(version, options) + range = new Range(range, options) -// A simple gt/lt/eq thing, or just "" to indicate "any version" -var COMPARATORLOOSE = R++ -src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$' -var COMPARATOR = R++ -src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$' + var gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } -// An expression to strip any whitespace between the gtlt and the thing -// it modifies, so that `> 1.2.3` ==> `>1.2.3` -var COMPARATORTRIM = R++ -src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + - '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')' + // If it satisifes the range it is not outside + if (satisfies(version, range, options)) { + return false + } -// this one has to use the /g flag -re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g') -var comparatorTrimReplace = '$1$2$3' + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. -// Something like `1.2.3 - 1.2.4` -// Note that these all use the loose form, because they'll be -// checked against either the strict or loose comparator form -// later. -var HYPHENRANGE = R++ -src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + - '\\s+-\\s+' + - '(' + src[XRANGEPLAIN] + ')' + - '\\s*$' + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] -var HYPHENRANGELOOSE = R++ -src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + - '\\s+-\\s+' + - '(' + src[XRANGEPLAINLOOSE] + ')' + - '\\s*$' + var high = null + var low = null -// Star ranges basically just allow anything at all. -var STAR = R++ -src[STAR] = '(<|>)?=?\\s*\\*' + comparators.forEach(function (comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) -// Compile to actual regexp objects. -// All are flag-free, unless they were created above with a flag. -for (var i = 0; i < R; i++) { - debug(i, src[i]) - if (!re[i]) { - re[i] = new RegExp(src[i]) - } -} + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } -exports.parse = parse -function parse (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false } } + return true +} + +exports.prerelease = prerelease +function prerelease (version, options) { + var parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} +exports.intersects = intersects +function intersects (r1, r2, options) { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2) +} + +exports.coerce = coerce +function coerce (version) { if (version instanceof SemVer) { return version } @@ -7304,1829 +9933,3142 @@ function parse (version, options) { return null } - if (version.length > MAX_LENGTH) { - return null - } - - var r = options.loose ? re[LOOSE] : re[FULL] - if (!r.test(version)) { - return null - } + var match = version.match(re[COERCE]) - try { - return new SemVer(version, options) - } catch (er) { + if (match == null) { return null } -} - -exports.valid = valid -function valid (version, options) { - var v = parse(version, options) - return v ? v.version : null -} -exports.clean = clean -function clean (version, options) { - var s = parse(version.trim().replace(/^[=v]+/, ''), options) - return s ? s.version : null + return parse(match[1] + + '.' + (match[2] || '0') + + '.' + (match[3] || '0')) } -exports.SemVer = SemVer - -function SemVer (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - if (version instanceof SemVer) { - if (version.loose === options.loose) { - return version - } else { - version = version.version - } - } else if (typeof version !== 'string') { - throw new TypeError('Invalid Version: ' + version) - } - if (version.length > MAX_LENGTH) { - throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') - } +/***/ }), - if (!(this instanceof SemVer)) { - return new SemVer(version, options) - } +/***/ 7032: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { - debug('SemVer', version, options) - this.options = options - this.loose = !!options.loose +"use strict"; - var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL]) +var shebangRegex = __webpack_require__(2638); - if (!m) { - throw new TypeError('Invalid Version: ' + version) - } +module.exports = function (str) { + var match = str.match(shebangRegex); - this.raw = version + if (!match) { + return null; + } - // these are actually numbers - this.major = +m[1] - this.minor = +m[2] - this.patch = +m[3] + var arr = match[0].replace(/#! ?/, '').split(' '); + var bin = arr[0].split('/').pop(); + var arg = arr[1]; - if (this.major > MAX_SAFE_INTEGER || this.major < 0) { - throw new TypeError('Invalid major version') - } + return (bin === 'env' ? + arg : + bin + (arg ? ' ' + arg : '') + ); +}; - if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { - throw new TypeError('Invalid minor version') - } - if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { - throw new TypeError('Invalid patch version') - } +/***/ }), - // numberify any prerelease numeric ids - if (!m[4]) { - this.prerelease = [] - } else { - this.prerelease = m[4].split('.').map(function (id) { - if (/^[0-9]+$/.test(id)) { - var num = +id - if (num >= 0 && num < MAX_SAFE_INTEGER) { - return num - } - } - return id - }) - } +/***/ 2638: +/***/ ((module) => { - this.build = m[5] ? m[5].split('.') : [] - this.format() -} +"use strict"; -SemVer.prototype.format = function () { - this.version = this.major + '.' + this.minor + '.' + this.patch - if (this.prerelease.length) { - this.version += '-' + this.prerelease.join('.') - } - return this.version -} +module.exports = /^#!.*/; -SemVer.prototype.toString = function () { - return this.version -} -SemVer.prototype.compare = function (other) { - debug('SemVer.compare', this.version, this.options, other) - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } +/***/ }), - return this.compareMain(other) || this.comparePre(other) -} +/***/ 4931: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { -SemVer.prototype.compareMain = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } +// Note: since nyc uses this module to output coverage, any lines +// that are in the direct sync flow of nyc's outputCoverage are +// ignored, since we can never get coverage for them. +var assert = __webpack_require__(2357) +var signals = __webpack_require__(3710) +var isWin = /^win/i.test(process.platform) - return compareIdentifiers(this.major, other.major) || - compareIdentifiers(this.minor, other.minor) || - compareIdentifiers(this.patch, other.patch) +var EE = __webpack_require__(8614) +/* istanbul ignore if */ +if (typeof EE !== 'function') { + EE = EE.EventEmitter } -SemVer.prototype.comparePre = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - // NOT having a prerelease is > having one - if (this.prerelease.length && !other.prerelease.length) { - return -1 - } else if (!this.prerelease.length && other.prerelease.length) { - return 1 - } else if (!this.prerelease.length && !other.prerelease.length) { - return 0 - } - - var i = 0 - do { - var a = this.prerelease[i] - var b = other.prerelease[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) - } - } while (++i) +var emitter +if (process.__signal_exit_emitter__) { + emitter = process.__signal_exit_emitter__ +} else { + emitter = process.__signal_exit_emitter__ = new EE() + emitter.count = 0 + emitter.emitted = {} } -// preminor will bump the version up to the next minor release, and immediately -// down to pre-release. premajor and prepatch work the same way. -SemVer.prototype.inc = function (release, identifier) { - switch (release) { - case 'premajor': - this.prerelease.length = 0 - this.patch = 0 - this.minor = 0 - this.major++ - this.inc('pre', identifier) - break - case 'preminor': - this.prerelease.length = 0 - this.patch = 0 - this.minor++ - this.inc('pre', identifier) - break - case 'prepatch': - // If this is already a prerelease, it will bump to the next version - // drop any prereleases that might already exist, since they are not - // relevant at this point. - this.prerelease.length = 0 - this.inc('patch', identifier) - this.inc('pre', identifier) - break - // If the input is a non-prerelease version, this acts the same as - // prepatch. - case 'prerelease': - if (this.prerelease.length === 0) { - this.inc('patch', identifier) - } - this.inc('pre', identifier) - break +// Because this emitter is a global, we have to check to see if a +// previous version of this library failed to enable infinite listeners. +// I know what you're about to say. But literally everything about +// signal-exit is a compromise with evil. Get used to it. +if (!emitter.infinite) { + emitter.setMaxListeners(Infinity) + emitter.infinite = true +} - case 'major': - // If this is a pre-major version, bump up to the same major version. - // Otherwise increment major. - // 1.0.0-5 bumps to 1.0.0 - // 1.1.0 bumps to 2.0.0 - if (this.minor !== 0 || - this.patch !== 0 || - this.prerelease.length === 0) { - this.major++ - } - this.minor = 0 - this.patch = 0 - this.prerelease = [] - break - case 'minor': - // If this is a pre-minor version, bump up to the same minor version. - // Otherwise increment minor. - // 1.2.0-5 bumps to 1.2.0 - // 1.2.1 bumps to 1.3.0 - if (this.patch !== 0 || this.prerelease.length === 0) { - this.minor++ - } - this.patch = 0 - this.prerelease = [] - break - case 'patch': - // If this is not a pre-release version, it will increment the patch. - // If it is a pre-release it will bump up to the same patch version. - // 1.2.0-5 patches to 1.2.0 - // 1.2.0 patches to 1.2.1 - if (this.prerelease.length === 0) { - this.patch++ - } - this.prerelease = [] - break - // This probably shouldn't be used publicly. - // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. - case 'pre': - if (this.prerelease.length === 0) { - this.prerelease = [0] - } else { - var i = this.prerelease.length - while (--i >= 0) { - if (typeof this.prerelease[i] === 'number') { - this.prerelease[i]++ - i = -2 - } - } - if (i === -1) { - // didn't increment anything - this.prerelease.push(0) - } - } - if (identifier) { - // 1.2.0-beta.1 bumps to 1.2.0-beta.2, - // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 - if (this.prerelease[0] === identifier) { - if (isNaN(this.prerelease[1])) { - this.prerelease = [identifier, 0] - } - } else { - this.prerelease = [identifier, 0] - } - } - break +module.exports = function (cb, opts) { + assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler') - default: - throw new Error('invalid increment argument: ' + release) + if (loaded === false) { + load() } - this.format() - this.raw = this.version - return this + + var ev = 'exit' + if (opts && opts.alwaysLast) { + ev = 'afterexit' + } + + var remove = function () { + emitter.removeListener(ev, cb) + if (emitter.listeners('exit').length === 0 && + emitter.listeners('afterexit').length === 0) { + unload() + } + } + emitter.on(ev, cb) + + return remove } -exports.inc = inc -function inc (version, release, loose, identifier) { - if (typeof (loose) === 'string') { - identifier = loose - loose = undefined +module.exports.unload = unload +function unload () { + if (!loaded) { + return } + loaded = false - try { - return new SemVer(version, loose).inc(release, identifier).version - } catch (er) { - return null + signals.forEach(function (sig) { + try { + process.removeListener(sig, sigListeners[sig]) + } catch (er) {} + }) + process.emit = originalProcessEmit + process.reallyExit = originalProcessReallyExit + emitter.count -= 1 +} + +function emit (event, code, signal) { + if (emitter.emitted[event]) { + return } + emitter.emitted[event] = true + emitter.emit(event, code, signal) } -exports.diff = diff -function diff (version1, version2) { - if (eq(version1, version2)) { - return null - } else { - var v1 = parse(version1) - var v2 = parse(version2) - var prefix = '' - if (v1.prerelease.length || v2.prerelease.length) { - prefix = 'pre' - var defaultResult = 'prerelease' - } - for (var key in v1) { - if (key === 'major' || key === 'minor' || key === 'patch') { - if (v1[key] !== v2[key]) { - return prefix + key - } +// { : , ... } +var sigListeners = {} +signals.forEach(function (sig) { + sigListeners[sig] = function listener () { + // If there are no other listeners, an exit is coming! + // Simplest way: remove us and then re-send the signal. + // We know that this will kill the process, so we can + // safely emit now. + var listeners = process.listeners(sig) + if (listeners.length === emitter.count) { + unload() + emit('exit', null, sig) + /* istanbul ignore next */ + emit('afterexit', null, sig) + /* istanbul ignore next */ + if (isWin && sig === 'SIGHUP') { + // "SIGHUP" throws an `ENOSYS` error on Windows, + // so use a supported signal instead + sig = 'SIGINT' } + process.kill(process.pid, sig) } - return defaultResult // may be undefined } +}) + +module.exports.signals = function () { + return signals } -exports.compareIdentifiers = compareIdentifiers +module.exports.load = load -var numeric = /^[0-9]+$/ -function compareIdentifiers (a, b) { - var anum = numeric.test(a) - var bnum = numeric.test(b) +var loaded = false - if (anum && bnum) { - a = +a - b = +b +function load () { + if (loaded) { + return } + loaded = true - return a === b ? 0 - : (anum && !bnum) ? -1 - : (bnum && !anum) ? 1 - : a < b ? -1 - : 1 -} + // This is the number of onSignalExit's that are in play. + // It's important so that we can count the correct number of + // listeners on signals, and don't wait for the other one to + // handle it instead of us. + emitter.count += 1 -exports.rcompareIdentifiers = rcompareIdentifiers -function rcompareIdentifiers (a, b) { - return compareIdentifiers(b, a) -} + signals = signals.filter(function (sig) { + try { + process.on(sig, sigListeners[sig]) + return true + } catch (er) { + return false + } + }) -exports.major = major -function major (a, loose) { - return new SemVer(a, loose).major + process.emit = processEmit + process.reallyExit = processReallyExit } -exports.minor = minor -function minor (a, loose) { - return new SemVer(a, loose).minor +var originalProcessReallyExit = process.reallyExit +function processReallyExit (code) { + process.exitCode = code || 0 + emit('exit', process.exitCode, null) + /* istanbul ignore next */ + emit('afterexit', process.exitCode, null) + /* istanbul ignore next */ + originalProcessReallyExit.call(process, process.exitCode) } -exports.patch = patch -function patch (a, loose) { - return new SemVer(a, loose).patch +var originalProcessEmit = process.emit +function processEmit (ev, arg) { + if (ev === 'exit') { + if (arg !== undefined) { + process.exitCode = arg + } + var ret = originalProcessEmit.apply(this, arguments) + emit('exit', process.exitCode, null) + /* istanbul ignore next */ + emit('afterexit', process.exitCode, null) + return ret + } else { + return originalProcessEmit.apply(this, arguments) + } } -exports.compare = compare -function compare (a, b, loose) { - return new SemVer(a, loose).compare(new SemVer(b, loose)) -} -exports.compareLoose = compareLoose -function compareLoose (a, b) { - return compare(a, b, true) -} +/***/ }), + +/***/ 3710: +/***/ ((module) => { + +// This is not the set of all possible signals. +// +// It IS, however, the set of all signals that trigger +// an exit on either Linux or BSD systems. Linux is a +// superset of the signal names supported on BSD, and +// the unknown signals just fail to register, so we can +// catch that easily enough. +// +// Don't bother with SIGKILL. It's uncatchable, which +// means that we can't fire any callbacks anyway. +// +// If a user does happen to register a handler on a non- +// fatal signal like SIGWINCH or something, and then +// exit, it'll end up firing `process.emit('exit')`, so +// the handler will be fired anyway. +// +// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised +// artificially, inherently leave the process in a +// state from which it is not safe to try and enter JS +// listeners. +module.exports = [ + 'SIGABRT', + 'SIGALRM', + 'SIGHUP', + 'SIGINT', + 'SIGTERM' +] -exports.rcompare = rcompare -function rcompare (a, b, loose) { - return compare(b, a, loose) +if (process.platform !== 'win32') { + module.exports.push( + 'SIGVTALRM', + 'SIGXCPU', + 'SIGXFSZ', + 'SIGUSR2', + 'SIGTRAP', + 'SIGSYS', + 'SIGQUIT', + 'SIGIOT' + // should detect profiler and enable/disable accordingly. + // see #21 + // 'SIGPROF' + ) } -exports.sort = sort -function sort (list, loose) { - return list.sort(function (a, b) { - return exports.compare(a, b, loose) - }) +if (process.platform === 'linux') { + module.exports.push( + 'SIGIO', + 'SIGPOLL', + 'SIGPWR', + 'SIGSTKFLT', + 'SIGUNUSED' + ) } -exports.rsort = rsort -function rsort (list, loose) { - return list.sort(function (a, b) { - return exports.rcompare(a, b, loose) - }) -} -exports.gt = gt -function gt (a, b, loose) { - return compare(a, b, loose) > 0 -} +/***/ }), -exports.lt = lt -function lt (a, b, loose) { - return compare(a, b, loose) < 0 -} +/***/ 5515: +/***/ ((module) => { -exports.eq = eq -function eq (a, b, loose) { - return compare(a, b, loose) === 0 -} +"use strict"; -exports.neq = neq -function neq (a, b, loose) { - return compare(a, b, loose) !== 0 -} +module.exports = function (x) { + var lf = typeof x === 'string' ? '\n' : '\n'.charCodeAt(); + var cr = typeof x === 'string' ? '\r' : '\r'.charCodeAt(); -exports.gte = gte -function gte (a, b, loose) { - return compare(a, b, loose) >= 0 -} + if (x[x.length - 1] === lf) { + x = x.slice(0, x.length - 1); + } -exports.lte = lte -function lte (a, b, loose) { - return compare(a, b, loose) <= 0 -} + if (x[x.length - 1] === cr) { + x = x.slice(0, x.length - 1); + } -exports.cmp = cmp -function cmp (a, op, b, loose) { - switch (op) { - case '===': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a === b + return x; +}; - case '!==': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a !== b - case '': - case '=': - case '==': - return eq(a, b, loose) +/***/ }), - case '!=': - return neq(a, b, loose) +/***/ 4256: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { - case '>': - return gt(a, b, loose) +"use strict"; - case '>=': - return gte(a, b, loose) - case '<': - return lt(a, b, loose) +var punycode = __webpack_require__(4213); +var mappingTable = __webpack_require__(68); - case '<=': - return lte(a, b, loose) +var PROCESSING_OPTIONS = { + TRANSITIONAL: 0, + NONTRANSITIONAL: 1 +}; - default: - throw new TypeError('Invalid operator: ' + op) - } +function normalize(str) { // fix bug in v8 + return str.split('\u0000').map(function (s) { return s.normalize('NFC'); }).join('\u0000'); } -exports.Comparator = Comparator -function Comparator (comp, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } +function findStatus(val) { + var start = 0; + var end = mappingTable.length - 1; - if (comp instanceof Comparator) { - if (comp.loose === !!options.loose) { - return comp + while (start <= end) { + var mid = Math.floor((start + end) / 2); + + var target = mappingTable[mid]; + if (target[0][0] <= val && target[0][1] >= val) { + return target; + } else if (target[0][0] > val) { + end = mid - 1; } else { - comp = comp.value + start = mid + 1; } } - if (!(this instanceof Comparator)) { - return new Comparator(comp, options) - } - - debug('comparator', comp, options) - this.options = options - this.loose = !!options.loose - this.parse(comp) + return null; +} - if (this.semver === ANY) { - this.value = '' - } else { - this.value = this.operator + this.semver.version - } +var regexAstralSymbols = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g; - debug('comp', this) +function countSymbols(string) { + return string + // replace every surrogate pair with a BMP symbol + .replace(regexAstralSymbols, '_') + // then get the length + .length; } -var ANY = {} -Comparator.prototype.parse = function (comp) { - var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR] - var m = comp.match(r) +function mapChars(domain_name, useSTD3, processing_option) { + var hasError = false; + var processed = ""; - if (!m) { - throw new TypeError('Invalid comparator: ' + comp) - } + var len = countSymbols(domain_name); + for (var i = 0; i < len; ++i) { + var codePoint = domain_name.codePointAt(i); + var status = findStatus(codePoint); - this.operator = m[1] - if (this.operator === '=') { - this.operator = '' - } + switch (status[1]) { + case "disallowed": + hasError = true; + processed += String.fromCodePoint(codePoint); + break; + case "ignored": + break; + case "mapped": + processed += String.fromCodePoint.apply(String, status[2]); + break; + case "deviation": + if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) { + processed += String.fromCodePoint.apply(String, status[2]); + } else { + processed += String.fromCodePoint(codePoint); + } + break; + case "valid": + processed += String.fromCodePoint(codePoint); + break; + case "disallowed_STD3_mapped": + if (useSTD3) { + hasError = true; + processed += String.fromCodePoint(codePoint); + } else { + processed += String.fromCodePoint.apply(String, status[2]); + } + break; + case "disallowed_STD3_valid": + if (useSTD3) { + hasError = true; + } - // if it literally is just '>' or '' then allow anything. - if (!m[2]) { - this.semver = ANY - } else { - this.semver = new SemVer(m[2], this.options.loose) + processed += String.fromCodePoint(codePoint); + break; + } } -} -Comparator.prototype.toString = function () { - return this.value + return { + string: processed, + error: hasError + }; } -Comparator.prototype.test = function (version) { - debug('Comparator.test', version, this.options.loose) - - if (this.semver === ANY) { - return true - } +var combiningMarksRegex = /[\u0300-\u036F\u0483-\u0489\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u0610-\u061A\u064B-\u065F\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0859-\u085B\u08E4-\u0903\u093A-\u093C\u093E-\u094F\u0951-\u0957\u0962\u0963\u0981-\u0983\u09BC\u09BE-\u09C4\u09C7\u09C8\u09CB-\u09CD\u09D7\u09E2\u09E3\u0A01-\u0A03\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AE2\u0AE3\u0B01-\u0B03\u0B3C\u0B3E-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B62\u0B63\u0B82\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C00-\u0C03\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0C81-\u0C83\u0CBC\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CE2\u0CE3\u0D01-\u0D03\u0D3E-\u0D44\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0D62\u0D63\u0D82\u0D83\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB\u0EBC\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86\u0F87\u0F8D-\u0F97\u0F99-\u0FBC\u0FC6\u102B-\u103E\u1056-\u1059\u105E-\u1060\u1062-\u1064\u1067-\u106D\u1071-\u1074\u1082-\u108D\u108F\u109A-\u109D\u135D-\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17B4-\u17D3\u17DD\u180B-\u180D\u18A9\u1920-\u192B\u1930-\u193B\u19B0-\u19C0\u19C8\u19C9\u1A17-\u1A1B\u1A55-\u1A5E\u1A60-\u1A7C\u1A7F\u1AB0-\u1ABE\u1B00-\u1B04\u1B34-\u1B44\u1B6B-\u1B73\u1B80-\u1B82\u1BA1-\u1BAD\u1BE6-\u1BF3\u1C24-\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE8\u1CED\u1CF2-\u1CF4\u1CF8\u1CF9\u1DC0-\u1DF5\u1DFC-\u1DFF\u20D0-\u20F0\u2CEF-\u2CF1\u2D7F\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F-\uA672\uA674-\uA67D\uA69F\uA6F0\uA6F1\uA802\uA806\uA80B\uA823-\uA827\uA880\uA881\uA8B4-\uA8C4\uA8E0-\uA8F1\uA926-\uA92D\uA947-\uA953\uA980-\uA983\uA9B3-\uA9C0\uA9E5\uAA29-\uAA36\uAA43\uAA4C\uAA4D\uAA7B-\uAA7D\uAAB0\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uAAEB-\uAAEF\uAAF5\uAAF6\uABE3-\uABEA\uABEC\uABED\uFB1E\uFE00-\uFE0F\uFE20-\uFE2D]|\uD800[\uDDFD\uDEE0\uDF76-\uDF7A]|\uD802[\uDE01-\uDE03\uDE05\uDE06\uDE0C-\uDE0F\uDE38-\uDE3A\uDE3F\uDEE5\uDEE6]|\uD804[\uDC00-\uDC02\uDC38-\uDC46\uDC7F-\uDC82\uDCB0-\uDCBA\uDD00-\uDD02\uDD27-\uDD34\uDD73\uDD80-\uDD82\uDDB3-\uDDC0\uDE2C-\uDE37\uDEDF-\uDEEA\uDF01-\uDF03\uDF3C\uDF3E-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF62\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDCB0-\uDCC3\uDDAF-\uDDB5\uDDB8-\uDDC0\uDE30-\uDE40\uDEAB-\uDEB7]|\uD81A[\uDEF0-\uDEF4\uDF30-\uDF36]|\uD81B[\uDF51-\uDF7E\uDF8F-\uDF92]|\uD82F[\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD83A[\uDCD0-\uDCD6]|\uDB40[\uDD00-\uDDEF]/; - if (typeof version === 'string') { - version = new SemVer(version, this.options) +function validateLabel(label, processing_option) { + if (label.substr(0, 4) === "xn--") { + label = punycode.toUnicode(label); + processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL; } - return cmp(version, this.operator, this.semver, this.options) -} + var error = false; -Comparator.prototype.intersects = function (comp, options) { - if (!(comp instanceof Comparator)) { - throw new TypeError('a Comparator is required') + if (normalize(label) !== label || + (label[3] === "-" && label[4] === "-") || + label[0] === "-" || label[label.length - 1] === "-" || + label.indexOf(".") !== -1 || + label.search(combiningMarksRegex) === 0) { + error = true; } - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false + var len = countSymbols(label); + for (var i = 0; i < len; ++i) { + var status = findStatus(label.codePointAt(i)); + if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== "valid") || + (processing === PROCESSING_OPTIONS.NONTRANSITIONAL && + status[1] !== "valid" && status[1] !== "deviation")) { + error = true; + break; } } - var rangeTmp + return { + label: label, + error: error + }; +} - if (this.operator === '') { - rangeTmp = new Range(comp.value, options) - return satisfies(this.value, rangeTmp, options) - } else if (comp.operator === '') { - rangeTmp = new Range(this.value, options) - return satisfies(comp.semver, rangeTmp, options) - } +function processing(domain_name, useSTD3, processing_option) { + var result = mapChars(domain_name, useSTD3, processing_option); + result.string = normalize(result.string); - var sameDirectionIncreasing = - (this.operator === '>=' || this.operator === '>') && - (comp.operator === '>=' || comp.operator === '>') - var sameDirectionDecreasing = - (this.operator === '<=' || this.operator === '<') && - (comp.operator === '<=' || comp.operator === '<') - var sameSemVer = this.semver.version === comp.semver.version - var differentDirectionsInclusive = - (this.operator === '>=' || this.operator === '<=') && - (comp.operator === '>=' || comp.operator === '<=') - var oppositeDirectionsLessThan = - cmp(this.semver, '<', comp.semver, options) && - ((this.operator === '>=' || this.operator === '>') && - (comp.operator === '<=' || comp.operator === '<')) - var oppositeDirectionsGreaterThan = - cmp(this.semver, '>', comp.semver, options) && - ((this.operator === '<=' || this.operator === '<') && - (comp.operator === '>=' || comp.operator === '>')) + var labels = result.string.split("."); + for (var i = 0; i < labels.length; ++i) { + try { + var validation = validateLabel(labels[i]); + labels[i] = validation.label; + result.error = result.error || validation.error; + } catch(e) { + result.error = true; + } + } - return sameDirectionIncreasing || sameDirectionDecreasing || - (sameSemVer && differentDirectionsInclusive) || - oppositeDirectionsLessThan || oppositeDirectionsGreaterThan + return { + string: labels.join("."), + error: result.error + }; } - -exports.Range = Range -function Range (range, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false + +module.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) { + var result = processing(domain_name, useSTD3, processing_option); + var labels = result.string.split("."); + labels = labels.map(function(l) { + try { + return punycode.toASCII(l); + } catch(e) { + result.error = true; + return l; } - } + }); - if (range instanceof Range) { - if (range.loose === !!options.loose && - range.includePrerelease === !!options.includePrerelease) { - return range - } else { - return new Range(range.raw, options) + if (verifyDnsLength) { + var total = labels.slice(0, labels.length - 1).join(".").length; + if (total.length > 253 || total.length === 0) { + result.error = true; } - } - if (range instanceof Comparator) { - return new Range(range.value, options) + for (var i=0; i < labels.length; ++i) { + if (labels.length > 63 || labels.length === 0) { + result.error = true; + break; + } + } } - if (!(this instanceof Range)) { - return new Range(range, options) - } + if (result.error) return null; + return labels.join("."); +}; - this.options = options - this.loose = !!options.loose - this.includePrerelease = !!options.includePrerelease +module.exports.toUnicode = function(domain_name, useSTD3) { + var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL); - // First, split based on boolean or || - this.raw = range - this.set = range.split(/\s*\|\|\s*/).map(function (range) { - return this.parseRange(range.trim()) - }, this).filter(function (c) { - // throw out any that are not relevant for whatever reason - return c.length - }) + return { + domain: result.string, + error: result.error + }; +}; - if (!this.set.length) { - throw new TypeError('Invalid SemVer Range: ' + range) - } +module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS; - this.format() -} -Range.prototype.format = function () { - this.range = this.set.map(function (comps) { - return comps.join(' ').trim() - }).join('||').trim() - return this.range -} +/***/ }), -Range.prototype.toString = function () { - return this.range -} +/***/ 4294: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { -Range.prototype.parseRange = function (range) { - var loose = this.options.loose - range = range.trim() - // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE] - range = range.replace(hr, hyphenReplace) - debug('hyphen replace', range) - // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace) - debug('comparator trim', range, re[COMPARATORTRIM]) +module.exports = __webpack_require__(4219); - // `~ 1.2.3` => `~1.2.3` - range = range.replace(re[TILDETRIM], tildeTrimReplace) - // `^ 1.2.3` => `^1.2.3` - range = range.replace(re[CARETTRIM], caretTrimReplace) +/***/ }), - // normalize spaces - range = range.split(/\s+/).join(' ') +/***/ 4219: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - // At this point, the range is completely trimmed and - // ready to be split into comparators. +"use strict"; - var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR] - var set = range.split(' ').map(function (comp) { - return parseComparator(comp, this.options) - }, this).join(' ').split(/\s+/) - if (this.options.loose) { - // in loose mode, throw out any that are not valid comparators - set = set.filter(function (comp) { - return !!comp.match(compRe) - }) - } - set = set.map(function (comp) { - return new Comparator(comp, this.options) - }, this) - return set -} +var net = __webpack_require__(1631); +var tls = __webpack_require__(4016); +var http = __webpack_require__(8605); +var https = __webpack_require__(7211); +var events = __webpack_require__(8614); +var assert = __webpack_require__(2357); +var util = __webpack_require__(1669); -Range.prototype.intersects = function (range, options) { - if (!(range instanceof Range)) { - throw new TypeError('a Range is required') - } - return this.set.some(function (thisComparators) { - return thisComparators.every(function (thisComparator) { - return range.set.some(function (rangeComparators) { - return rangeComparators.every(function (rangeComparator) { - return thisComparator.intersects(rangeComparator, options) - }) - }) - }) - }) -} +exports.httpOverHttp = httpOverHttp; +exports.httpsOverHttp = httpsOverHttp; +exports.httpOverHttps = httpOverHttps; +exports.httpsOverHttps = httpsOverHttps; -// Mostly just for testing and legacy API reasons -exports.toComparators = toComparators -function toComparators (range, options) { - return new Range(range, options).set.map(function (comp) { - return comp.map(function (c) { - return c.value - }).join(' ').trim().split(' ') - }) -} -// comprised of xranges, tildes, stars, and gtlt's at this point. -// already replaced the hyphen ranges -// turn into a set of JUST comparators. -function parseComparator (comp, options) { - debug('comp', comp, options) - comp = replaceCarets(comp, options) - debug('caret', comp) - comp = replaceTildes(comp, options) - debug('tildes', comp) - comp = replaceXRanges(comp, options) - debug('xrange', comp) - comp = replaceStars(comp, options) - debug('stars', comp) - return comp +function httpOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + return agent; } -function isX (id) { - return !id || id.toLowerCase() === 'x' || id === '*' +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; } -// ~, ~> --> * (any, kinda silly) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 -function replaceTildes (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceTilde(comp, options) - }).join(' ') +function httpOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + return agent; } -function replaceTilde (comp, options) { - var r = options.loose ? re[TILDELOOSE] : re[TILDE] - return comp.replace(r, function (_, M, m, p, pr) { - debug('tilde', comp, _, M, m, p, pr) - var ret - - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - // ~1.2 == >=1.2.0 <1.3.0 - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else if (pr) { - debug('replaceTilde pr', pr) - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } else { - // ~1.2.3 == >=1.2.3 <1.3.0 - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' - } - - debug('tilde return', ret) - return ret - }) +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; } -// ^ --> * (any, kinda silly) -// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 -// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 -// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 -// ^1.2.3 --> >=1.2.3 <2.0.0 -// ^1.2.0 --> >=1.2.0 <2.0.0 -function replaceCarets (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceCaret(comp, options) - }).join(' ') -} -function replaceCaret (comp, options) { - debug('caret', comp, options) - var r = options.loose ? re[CARETLOOSE] : re[CARET] - return comp.replace(r, function (_, M, m, p, pr) { - debug('caret', comp, _, M, m, p, pr) - var ret +function TunnelingAgent(options) { + var self = this; + self.options = options || {}; + self.proxyOptions = self.options.proxy || {}; + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; + self.requests = []; + self.sockets = []; - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - if (M === '0') { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else { - ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' - } - } else if (pr) { - debug('replaceCaret pr', pr) - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + (+M + 1) + '.0.0' - } - } else { - debug('no pr') - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' - } - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + (+M + 1) + '.0.0' + self.on('free', function onFree(socket, host, port, localAddress) { + var options = toOptions(host, port, localAddress); + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i]; + if (pending.host === options.host && pending.port === options.port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1); + pending.request.onSocket(socket); + return; } } - - debug('caret return', ret) - return ret - }) + socket.destroy(); + self.removeSocket(socket); + }); } +util.inherits(TunnelingAgent, events.EventEmitter); -function replaceXRanges (comp, options) { - debug('replaceXRanges', comp, options) - return comp.split(/\s+/).map(function (comp) { - return replaceXRange(comp, options) - }).join(' ') -} +TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { + var self = this; + var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); -function replaceXRange (comp, options) { - comp = comp.trim() - var r = options.loose ? re[XRANGELOOSE] : re[XRANGE] - return comp.replace(r, function (ret, gtlt, M, m, p, pr) { - debug('xRange', comp, ret, gtlt, M, m, p, pr) - var xM = isX(M) - var xm = xM || isX(m) - var xp = xm || isX(p) - var anyX = xp + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push(options); + return; + } - if (gtlt === '=' && anyX) { - gtlt = '' + // If we are under maxSockets create a new one. + self.createSocket(options, function(socket) { + socket.on('free', onFree); + socket.on('close', onCloseOrRemove); + socket.on('agentRemove', onCloseOrRemove); + req.onSocket(socket); + + function onFree() { + self.emit('free', socket, options); } - if (xM) { - if (gtlt === '>' || gtlt === '<') { - // nothing is allowed - ret = '<0.0.0' - } else { - // nothing is forbidden - ret = '*' - } - } else if (gtlt && anyX) { - // we know patch is an x, because we have any x at all. - // replace X with 0 - if (xm) { - m = 0 - } - p = 0 + function onCloseOrRemove(err) { + self.removeSocket(socket); + socket.removeListener('free', onFree); + socket.removeListener('close', onCloseOrRemove); + socket.removeListener('agentRemove', onCloseOrRemove); + } + }); +}; - if (gtlt === '>') { - // >1 => >=2.0.0 - // >1.2 => >=1.3.0 - // >1.2.3 => >= 1.2.4 - gtlt = '>=' - if (xm) { - M = +M + 1 - m = 0 - p = 0 - } else { - m = +m + 1 - p = 0 - } - } else if (gtlt === '<=') { - // <=0.7.x is actually <0.8.0, since any 0.7.x should - // pass. Similarly, <=7.x is actually <8.0.0, etc. - gtlt = '<' - if (xm) { - M = +M + 1 - } else { - m = +m + 1 - } - } +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this; + var placeholder = {}; + self.sockets.push(placeholder); - ret = gtlt + M + '.' + m + '.' + p - } else if (xm) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (xp) { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + var connectOptions = mergeOptions({}, self.proxyOptions, { + method: 'CONNECT', + path: options.host + ':' + options.port, + agent: false, + headers: { + host: options.host + ':' + options.port } + }); + if (options.localAddress) { + connectOptions.localAddress = options.localAddress; + } + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {}; + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + new Buffer(connectOptions.proxyAuth).toString('base64'); + } - debug('xRange return', ret) + debug('making CONNECT request'); + var connectReq = self.request(connectOptions); + connectReq.useChunkedEncodingByDefault = false; // for v0.6 + connectReq.once('response', onResponse); // for v0.6 + connectReq.once('upgrade', onUpgrade); // for v0.6 + connectReq.once('connect', onConnect); // for v0.7 or later + connectReq.once('error', onError); + connectReq.end(); - return ret - }) -} + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true; + } -// Because * is AND-ed with everything else in the comparator, -// and '' means "any version", just remove the *s entirely. -function replaceStars (comp, options) { - debug('replaceStars', comp, options) - // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(re[STAR], '') -} + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head); + }); + } -// This function is passed to string.replace(re[HYPHENRANGE]) -// M, m, patch, prerelease, build -// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 -// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do -// 1.2 - 3.4 => >=1.2.0 <3.5.0 -function hyphenReplace ($0, - from, fM, fm, fp, fpr, fb, - to, tM, tm, tp, tpr, tb) { - if (isX(fM)) { - from = '' - } else if (isX(fm)) { - from = '>=' + fM + '.0.0' - } else if (isX(fp)) { - from = '>=' + fM + '.' + fm + '.0' - } else { - from = '>=' + from + function onConnect(res, socket, head) { + connectReq.removeAllListeners(); + socket.removeAllListeners(); + + if (res.statusCode !== 200) { + debug('tunneling socket could not be established, statusCode=%d', + res.statusCode); + socket.destroy(); + var error = new Error('tunneling socket could not be established, ' + + 'statusCode=' + res.statusCode); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + if (head.length > 0) { + debug('got illegal response body from proxy'); + socket.destroy(); + var error = new Error('got illegal response body from proxy'); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + debug('tunneling connection has established'); + self.sockets[self.sockets.indexOf(placeholder)] = socket; + return cb(socket); } - if (isX(tM)) { - to = '' - } else if (isX(tm)) { - to = '<' + (+tM + 1) + '.0.0' - } else if (isX(tp)) { - to = '<' + tM + '.' + (+tm + 1) + '.0' - } else if (tpr) { - to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr - } else { - to = '<=' + to + function onError(cause) { + connectReq.removeAllListeners(); + + debug('tunneling socket could not be established, cause=%s\n', + cause.message, cause.stack); + var error = new Error('tunneling socket could not be established, ' + + 'cause=' + cause.message); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + } +}; + +TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { + var pos = this.sockets.indexOf(socket) + if (pos === -1) { + return; + } + this.sockets.splice(pos, 1); + + var pending = this.requests.shift(); + if (pending) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createSocket(pending, function(socket) { + pending.request.onSocket(socket); + }); + } +}; + +function createSecureSocket(options, cb) { + var self = this; + TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { + var hostHeader = options.request.getHeader('host'); + var tlsOptions = mergeOptions({}, self.options, { + socket: socket, + servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host + }); + + // 0 is dummy port for v0.6 + var secureSocket = tls.connect(0, tlsOptions); + self.sockets[self.sockets.indexOf(socket)] = secureSocket; + cb(secureSocket); + }); +} + + +function toOptions(host, port, localAddress) { + if (typeof host === 'string') { // since v0.10 + return { + host: host, + port: port, + localAddress: localAddress + }; } - - return (from + ' ' + to).trim() + return host; // for v0.11 or later } -// if ANY of the sets match ALL of its comparators, then pass -Range.prototype.test = function (version) { - if (!version) { - return false +function mergeOptions(target) { + for (var i = 1, len = arguments.length; i < len; ++i) { + var overrides = arguments[i]; + if (typeof overrides === 'object') { + var keys = Object.keys(overrides); + for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { + var k = keys[j]; + if (overrides[k] !== undefined) { + target[k] = overrides[k]; + } + } + } } + return target; +} - if (typeof version === 'string') { - version = new SemVer(version, this.options) - } - for (var i = 0; i < this.set.length; i++) { - if (testSet(this.set[i], version, this.options)) { - return true +var debug; +if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { + debug = function() { + var args = Array.prototype.slice.call(arguments); + if (typeof args[0] === 'string') { + args[0] = 'TUNNEL: ' + args[0]; + } else { + args.unshift('TUNNEL:'); } + console.error.apply(console, args); } - return false +} else { + debug = function() {}; } +exports.debug = debug; // for test -function testSet (set, version, options) { - for (var i = 0; i < set.length; i++) { - if (!set[i].test(version)) { - return false - } - } - if (version.prerelease.length && !options.includePrerelease) { - // Find the set of versions that are allowed to have prereleases - // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 - // That should allow `1.2.3-pr.2` to pass. - // However, `1.2.4-alpha.notready` should NOT be allowed, - // even though it's within the range set by the comparators. - for (i = 0; i < set.length; i++) { - debug(set[i].semver) - if (set[i].semver === ANY) { - continue - } +/***/ }), - if (set[i].semver.prerelease.length > 0) { - var allowed = set[i].semver - if (allowed.major === version.major && - allowed.minor === version.minor && - allowed.patch === version.patch) { - return true - } - } - } +/***/ 5030: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - // Version has a -pre, but it's not one of the ones we like. - return false - } +"use strict"; - return true -} -exports.satisfies = satisfies -function satisfies (version, range, options) { - try { - range = new Range(range, options) - } catch (er) { - return false - } - return range.test(version) -} +Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.maxSatisfying = maxSatisfying -function maxSatisfying (versions, range, options) { - var max = null - var maxSV = null +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var osName = _interopDefault(__webpack_require__(4824)); + +function getUserAgent() { try { - var rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!max || maxSV.compare(v) === -1) { - // compare(max, v, true) - max = v - maxSV = new SemVer(max, options) - } + return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`; + } catch (error) { + if (/wmic os get Caption/.test(error.message)) { + return "Windows "; } - }) - return max -} -exports.minSatisfying = minSatisfying -function minSatisfying (versions, range, options) { - var min = null - var minSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null + return ""; } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!min || minSV.compare(v) === 1) { - // compare(min, v, true) - min = v - minSV = new SemVer(min, options) - } - } - }) - return min } -exports.minVersion = minVersion -function minVersion (range, loose) { - range = new Range(range, loose) +exports.getUserAgent = getUserAgent; +//# sourceMappingURL=index.js.map - var minver = new SemVer('0.0.0') - if (range.test(minver)) { - return minver - } - minver = new SemVer('0.0.0-0') - if (range.test(minver)) { - return minver - } +/***/ }), - minver = null - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] +/***/ 4552: +/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => { - comparators.forEach(function (comparator) { - // Clone to avoid manipulating the comparator's semver object. - var compver = new SemVer(comparator.semver.version) - switch (comparator.operator) { - case '>': - if (compver.prerelease.length === 0) { - compver.patch++ - } else { - compver.prerelease.push(0) - } - compver.raw = compver.format() - /* fallthrough */ - case '': - case '>=': - if (!minver || gt(minver, compver)) { - minver = compver - } - break - case '<': - case '<=': - /* Ignore maximum versions */ - break - /* istanbul ignore next */ - default: - throw new Error('Unexpected operation: ' + comparator.operator) - } - }) - } +"use strict"; +// ESM COMPAT FLAG +__webpack_require__.r(__webpack_exports__); + +// EXPORTS +__webpack_require__.d(__webpack_exports__, { + "v1": () => /* reexport */ esm_node_v1, + "v3": () => /* reexport */ esm_node_v3, + "v4": () => /* reexport */ esm_node_v4, + "v5": () => /* reexport */ esm_node_v5, + "NIL": () => /* reexport */ nil, + "version": () => /* reexport */ esm_node_version, + "validate": () => /* reexport */ esm_node_validate, + "stringify": () => /* reexport */ esm_node_stringify, + "parse": () => /* reexport */ esm_node_parse +}); - if (minver && range.test(minver)) { - return minver - } +// EXTERNAL MODULE: external "crypto" +var external_crypto_ = __webpack_require__(6417); +var external_crypto_default = /*#__PURE__*/__webpack_require__.n(external_crypto_); - return null -} +// CONCATENATED MODULE: ./node_modules/uuid/dist/esm-node/rng.js -exports.validRange = validRange -function validRange (range, options) { - try { - // Return '*' instead of '' so that truthiness works. - // This will throw if it's invalid anyway - return new Range(range, options).range || '*' - } catch (er) { - return null +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + external_crypto_default().randomFillSync(rnds8Pool); + poolPtr = 0; } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); } +// CONCATENATED MODULE: ./node_modules/uuid/dist/esm-node/regex.js +/* harmony default export */ const regex = (/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i); +// CONCATENATED MODULE: ./node_modules/uuid/dist/esm-node/validate.js -// Determine if version is less than all the versions possible in the range -exports.ltr = ltr -function ltr (version, range, options) { - return outside(version, range, '<', options) + +function validate(uuid) { + return typeof uuid === 'string' && regex.test(uuid); } -// Determine if version is greater than all the versions possible in the range. -exports.gtr = gtr -function gtr (version, range, options) { - return outside(version, range, '>', options) +/* harmony default export */ const esm_node_validate = (validate); +// CONCATENATED MODULE: ./node_modules/uuid/dist/esm-node/stringify.js + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ + +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); } -exports.outside = outside -function outside (version, range, hilo, options) { - version = new SemVer(version, options) - range = new Range(range, options) +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields - var gtfn, ltefn, ltfn, comp, ecomp - switch (hilo) { - case '>': - gtfn = gt - ltefn = lte - ltfn = lt - comp = '>' - ecomp = '>=' - break - case '<': - gtfn = lt - ltefn = gte - ltfn = gt - comp = '<' - ecomp = '<=' - break - default: - throw new TypeError('Must provide a hilo val of "<" or ">"') + if (!esm_node_validate(uuid)) { + throw TypeError('Stringified UUID is invalid'); } - // If it satisfies the range it is not outside - if (satisfies(version, range, options)) { - return false + return uuid; +} + +/* harmony default export */ const esm_node_stringify = (stringify); +// CONCATENATED MODULE: ./node_modules/uuid/dist/esm-node/v1.js + + // **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || rng)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); } - // From now on, variable terms are as if we're in "gtr" mode. - // but note that everything is flipped for the "ltr" function. + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] + msecs += 12219292800000; // `time_low` - var high = null - var low = null + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` - comparators.forEach(function (comparator) { - if (comparator.semver === ANY) { - comparator = new Comparator('>=0.0.0') - } - high = high || comparator - low = low || comparator - if (gtfn(comparator.semver, high.semver, options)) { - high = comparator - } else if (ltfn(comparator.semver, low.semver, options)) { - low = comparator - } - }) + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` - // If the edge version comparator has a operator then our version - // isn't outside it - if (high.operator === comp || high.operator === ecomp) { - return false - } + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version - // If the lowest version comparator has an operator and our version - // is less than it then it isn't higher than the range - if ((!low.operator || low.operator === comp) && - ltefn(version, low.semver)) { - return false - } else if (low.operator === ecomp && ltfn(version, low.semver)) { - return false - } + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; } - return true -} -exports.prerelease = prerelease -function prerelease (version, options) { - var parsed = parse(version, options) - return (parsed && parsed.prerelease.length) ? parsed.prerelease : null + return buf || esm_node_stringify(b); } -exports.intersects = intersects -function intersects (r1, r2, options) { - r1 = new Range(r1, options) - r2 = new Range(r2, options) - return r1.intersects(r2) -} +/* harmony default export */ const esm_node_v1 = (v1); +// CONCATENATED MODULE: ./node_modules/uuid/dist/esm-node/parse.js -exports.coerce = coerce -function coerce (version) { - if (version instanceof SemVer) { - return version - } - if (typeof version !== 'string') { - return null +function parse(uuid) { + if (!esm_node_validate(uuid)) { + throw TypeError('Invalid UUID'); } - var match = version.match(re[COERCE]) + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ - if (match == null) { - return null - } + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ - return parse(match[1] + - '.' + (match[2] || '0') + - '.' + (match[3] || '0')) + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; } +/* harmony default export */ const esm_node_parse = (parse); +// CONCATENATED MODULE: ./node_modules/uuid/dist/esm-node/v35.js -/***/ }), -/***/ 7032: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { -"use strict"; +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape -var shebangRegex = __webpack_require__(2638); + const bytes = []; -module.exports = function (str) { - var match = str.match(shebangRegex); + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } - if (!match) { - return null; - } + return bytes; +} - var arr = match[0].replace(/#! ?/, '').split(' '); - var bin = arr[0].split('/').pop(); - var arg = arr[1]; +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +/* harmony default export */ function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } - return (bin === 'env' ? - arg : - bin + (arg ? ' ' + arg : '') - ); -}; + if (typeof namespace === 'string') { + namespace = esm_node_parse(namespace); + } + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` -/***/ }), -/***/ 2638: -/***/ ((module) => { + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; -"use strict"; + if (buf) { + offset = offset || 0; -module.exports = /^#!.*/; + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + return buf; + } -/***/ }), + return esm_node_stringify(bytes); + } // Function#name is not settable on some platforms (#270) -/***/ 4931: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { -// Note: since nyc uses this module to output coverage, any lines -// that are in the direct sync flow of nyc's outputCoverage are -// ignored, since we can never get coverage for them. -var assert = __webpack_require__(2357) -var signals = __webpack_require__(3710) -var isWin = /^win/i.test(process.platform) + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support -var EE = __webpack_require__(8614) -/* istanbul ignore if */ -if (typeof EE !== 'function') { - EE = EE.EventEmitter -} -var emitter -if (process.__signal_exit_emitter__) { - emitter = process.__signal_exit_emitter__ -} else { - emitter = process.__signal_exit_emitter__ = new EE() - emitter.count = 0 - emitter.emitted = {} + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; } +// CONCATENATED MODULE: ./node_modules/uuid/dist/esm-node/md5.js -// Because this emitter is a global, we have to check to see if a -// previous version of this library failed to enable infinite listeners. -// I know what you're about to say. But literally everything about -// signal-exit is a compromise with evil. Get used to it. -if (!emitter.infinite) { - emitter.setMaxListeners(Infinity) - emitter.infinite = true + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return external_crypto_default().createHash('md5').update(bytes).digest(); } -module.exports = function (cb, opts) { - assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler') +/* harmony default export */ const esm_node_md5 = (md5); +// CONCATENATED MODULE: ./node_modules/uuid/dist/esm-node/v3.js - if (loaded === false) { - load() - } - var ev = 'exit' - if (opts && opts.alwaysLast) { - ev = 'afterexit' - } +const v3 = v35('v3', 0x30, esm_node_md5); +/* harmony default export */ const esm_node_v3 = (v3); +// CONCATENATED MODULE: ./node_modules/uuid/dist/esm-node/v4.js - var remove = function () { - emitter.removeListener(ev, cb) - if (emitter.listeners('exit').length === 0 && - emitter.listeners('afterexit').length === 0) { - unload() + + +function v4(options, buf, offset) { + options = options || {}; + const rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; } + + return buf; } - emitter.on(ev, cb) - return remove + return esm_node_stringify(rnds); } -module.exports.unload = unload -function unload () { - if (!loaded) { - return - } - loaded = false +/* harmony default export */ const esm_node_v4 = (v4); +// CONCATENATED MODULE: ./node_modules/uuid/dist/esm-node/sha1.js - signals.forEach(function (sig) { - try { - process.removeListener(sig, sigListeners[sig]) - } catch (er) {} - }) - process.emit = originalProcessEmit - process.reallyExit = originalProcessReallyExit - emitter.count -= 1 -} -function emit (event, code, signal) { - if (emitter.emitted[event]) { - return +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); } - emitter.emitted[event] = true - emitter.emit(event, code, signal) + + return external_crypto_default().createHash('sha1').update(bytes).digest(); } -// { : , ... } -var sigListeners = {} -signals.forEach(function (sig) { - sigListeners[sig] = function listener () { - // If there are no other listeners, an exit is coming! - // Simplest way: remove us and then re-send the signal. - // We know that this will kill the process, so we can - // safely emit now. - var listeners = process.listeners(sig) - if (listeners.length === emitter.count) { - unload() - emit('exit', null, sig) - /* istanbul ignore next */ - emit('afterexit', null, sig) - /* istanbul ignore next */ - if (isWin && sig === 'SIGHUP') { - // "SIGHUP" throws an `ENOSYS` error on Windows, - // so use a supported signal instead - sig = 'SIGINT' - } - process.kill(process.pid, sig) - } +/* harmony default export */ const esm_node_sha1 = (sha1); +// CONCATENATED MODULE: ./node_modules/uuid/dist/esm-node/v5.js + + +const v5 = v35('v5', 0x50, esm_node_sha1); +/* harmony default export */ const esm_node_v5 = (v5); +// CONCATENATED MODULE: ./node_modules/uuid/dist/esm-node/nil.js +/* harmony default export */ const nil = ('00000000-0000-0000-0000-000000000000'); +// CONCATENATED MODULE: ./node_modules/uuid/dist/esm-node/version.js + + +function version(uuid) { + if (!esm_node_validate(uuid)) { + throw TypeError('Invalid UUID'); } -}) -module.exports.signals = function () { - return signals + return parseInt(uuid.substr(14, 1), 16); } -module.exports.load = load +/* harmony default export */ const esm_node_version = (version); +// CONCATENATED MODULE: ./node_modules/uuid/dist/esm-node/index.js -var loaded = false -function load () { - if (loaded) { - return - } - loaded = true - // This is the number of onSignalExit's that are in play. - // It's important so that we can count the correct number of - // listeners on signals, and don't wait for the other one to - // handle it instead of us. - emitter.count += 1 - signals = signals.filter(function (sig) { - try { - process.on(sig, sigListeners[sig]) - return true - } catch (er) { - return false - } - }) - process.emit = processEmit - process.reallyExit = processReallyExit + + + + + +/***/ }), + +/***/ 4886: +/***/ ((module) => { + +"use strict"; + + +var conversions = {}; +module.exports = conversions; + +function sign(x) { + return x < 0 ? -1 : 1; } -var originalProcessReallyExit = process.reallyExit -function processReallyExit (code) { - process.exitCode = code || 0 - emit('exit', process.exitCode, null) - /* istanbul ignore next */ - emit('afterexit', process.exitCode, null) - /* istanbul ignore next */ - originalProcessReallyExit.call(process, process.exitCode) +function evenRound(x) { + // Round x to the nearest integer, choosing the even integer if it lies halfway between two. + if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor) + return Math.floor(x); + } else { + return Math.round(x); + } } -var originalProcessEmit = process.emit -function processEmit (ev, arg) { - if (ev === 'exit') { - if (arg !== undefined) { - process.exitCode = arg +function createNumberConversion(bitLength, typeOpts) { + if (!typeOpts.unsigned) { + --bitLength; + } + const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength); + const upperBound = Math.pow(2, bitLength) - 1; + + const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength); + const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1); + + return function(V, opts) { + if (!opts) opts = {}; + + let x = +V; + + if (opts.enforceRange) { + if (!Number.isFinite(x)) { + throw new TypeError("Argument is not a finite number"); + } + + x = sign(x) * Math.floor(Math.abs(x)); + if (x < lowerBound || x > upperBound) { + throw new TypeError("Argument is not in byte range"); + } + + return x; + } + + if (!isNaN(x) && opts.clamp) { + x = evenRound(x); + + if (x < lowerBound) x = lowerBound; + if (x > upperBound) x = upperBound; + return x; + } + + if (!Number.isFinite(x) || x === 0) { + return 0; + } + + x = sign(x) * Math.floor(Math.abs(x)); + x = x % moduloVal; + + if (!typeOpts.unsigned && x >= moduloBound) { + return x - moduloVal; + } else if (typeOpts.unsigned) { + if (x < 0) { + x += moduloVal; + } else if (x === -0) { // don't return negative zero + return 0; + } + } + + return x; } - var ret = originalProcessEmit.apply(this, arguments) - emit('exit', process.exitCode, null) - /* istanbul ignore next */ - emit('afterexit', process.exitCode, null) - return ret - } else { - return originalProcessEmit.apply(this, arguments) - } } +conversions["void"] = function () { + return undefined; +}; -/***/ }), +conversions["boolean"] = function (val) { + return !!val; +}; -/***/ 3710: -/***/ ((module) => { +conversions["byte"] = createNumberConversion(8, { unsigned: false }); +conversions["octet"] = createNumberConversion(8, { unsigned: true }); -// This is not the set of all possible signals. -// -// It IS, however, the set of all signals that trigger -// an exit on either Linux or BSD systems. Linux is a -// superset of the signal names supported on BSD, and -// the unknown signals just fail to register, so we can -// catch that easily enough. -// -// Don't bother with SIGKILL. It's uncatchable, which -// means that we can't fire any callbacks anyway. -// -// If a user does happen to register a handler on a non- -// fatal signal like SIGWINCH or something, and then -// exit, it'll end up firing `process.emit('exit')`, so -// the handler will be fired anyway. -// -// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised -// artificially, inherently leave the process in a -// state from which it is not safe to try and enter JS -// listeners. -module.exports = [ - 'SIGABRT', - 'SIGALRM', - 'SIGHUP', - 'SIGINT', - 'SIGTERM' -] +conversions["short"] = createNumberConversion(16, { unsigned: false }); +conversions["unsigned short"] = createNumberConversion(16, { unsigned: true }); -if (process.platform !== 'win32') { - module.exports.push( - 'SIGVTALRM', - 'SIGXCPU', - 'SIGXFSZ', - 'SIGUSR2', - 'SIGTRAP', - 'SIGSYS', - 'SIGQUIT', - 'SIGIOT' - // should detect profiler and enable/disable accordingly. - // see #21 - // 'SIGPROF' - ) -} +conversions["long"] = createNumberConversion(32, { unsigned: false }); +conversions["unsigned long"] = createNumberConversion(32, { unsigned: true }); -if (process.platform === 'linux') { - module.exports.push( - 'SIGIO', - 'SIGPOLL', - 'SIGPWR', - 'SIGSTKFLT', - 'SIGUNUSED' - ) -} +conversions["long long"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 }); +conversions["unsigned long long"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 }); +conversions["double"] = function (V) { + const x = +V; -/***/ }), + if (!Number.isFinite(x)) { + throw new TypeError("Argument is not a finite floating-point value"); + } -/***/ 5515: -/***/ ((module) => { + return x; +}; -"use strict"; +conversions["unrestricted double"] = function (V) { + const x = +V; -module.exports = function (x) { - var lf = typeof x === 'string' ? '\n' : '\n'.charCodeAt(); - var cr = typeof x === 'string' ? '\r' : '\r'.charCodeAt(); + if (isNaN(x)) { + throw new TypeError("Argument is NaN"); + } - if (x[x.length - 1] === lf) { - x = x.slice(0, x.length - 1); - } + return x; +}; - if (x[x.length - 1] === cr) { - x = x.slice(0, x.length - 1); - } +// not quite valid, but good enough for JS +conversions["float"] = conversions["double"]; +conversions["unrestricted float"] = conversions["unrestricted double"]; - return x; +conversions["DOMString"] = function (V, opts) { + if (!opts) opts = {}; + + if (opts.treatNullAsEmptyString && V === null) { + return ""; + } + + return String(V); }; +conversions["ByteString"] = function (V, opts) { + const x = String(V); + let c = undefined; + for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) { + if (c > 255) { + throw new TypeError("Argument is not a valid bytestring"); + } + } -/***/ }), + return x; +}; -/***/ 4294: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { +conversions["USVString"] = function (V) { + const S = String(V); + const n = S.length; + const U = []; + for (let i = 0; i < n; ++i) { + const c = S.charCodeAt(i); + if (c < 0xD800 || c > 0xDFFF) { + U.push(String.fromCodePoint(c)); + } else if (0xDC00 <= c && c <= 0xDFFF) { + U.push(String.fromCodePoint(0xFFFD)); + } else { + if (i === n - 1) { + U.push(String.fromCodePoint(0xFFFD)); + } else { + const d = S.charCodeAt(i + 1); + if (0xDC00 <= d && d <= 0xDFFF) { + const a = c & 0x3FF; + const b = d & 0x3FF; + U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b)); + ++i; + } else { + U.push(String.fromCodePoint(0xFFFD)); + } + } + } + } -module.exports = __webpack_require__(4219); + return U.join(''); +}; + +conversions["Date"] = function (V, opts) { + if (!(V instanceof Date)) { + throw new TypeError("Argument is not a Date object"); + } + if (isNaN(V)) { + return undefined; + } + + return V; +}; + +conversions["RegExp"] = function (V, opts) { + if (!(V instanceof RegExp)) { + V = new RegExp(V); + } + + return V; +}; /***/ }), -/***/ 4219: +/***/ 7537: /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; +const usm = __webpack_require__(2158); -var net = __webpack_require__(1631); -var tls = __webpack_require__(4016); -var http = __webpack_require__(8605); -var https = __webpack_require__(7211); -var events = __webpack_require__(8614); -var assert = __webpack_require__(2357); -var util = __webpack_require__(1669); +exports.implementation = class URLImpl { + constructor(constructorArgs) { + const url = constructorArgs[0]; + const base = constructorArgs[1]; + + let parsedBase = null; + if (base !== undefined) { + parsedBase = usm.basicURLParse(base); + if (parsedBase === "failure") { + throw new TypeError("Invalid base URL"); + } + } + const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase }); + if (parsedURL === "failure") { + throw new TypeError("Invalid URL"); + } -exports.httpOverHttp = httpOverHttp; -exports.httpsOverHttp = httpsOverHttp; -exports.httpOverHttps = httpOverHttps; -exports.httpsOverHttps = httpsOverHttps; + this._url = parsedURL; + // TODO: query stuff + } -function httpOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - return agent; -} + get href() { + return usm.serializeURL(this._url); + } -function httpsOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} + set href(v) { + const parsedURL = usm.basicURLParse(v); + if (parsedURL === "failure") { + throw new TypeError("Invalid URL"); + } -function httpOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - return agent; -} + this._url = parsedURL; + } -function httpsOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} + get origin() { + return usm.serializeURLOrigin(this._url); + } + get protocol() { + return this._url.scheme + ":"; + } -function TunnelingAgent(options) { - var self = this; - self.options = options || {}; - self.proxyOptions = self.options.proxy || {}; - self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; - self.requests = []; - self.sockets = []; + set protocol(v) { + usm.basicURLParse(v + ":", { url: this._url, stateOverride: "scheme start" }); + } - self.on('free', function onFree(socket, host, port, localAddress) { - var options = toOptions(host, port, localAddress); - for (var i = 0, len = self.requests.length; i < len; ++i) { - var pending = self.requests[i]; - if (pending.host === options.host && pending.port === options.port) { - // Detect the request to connect same origin server, - // reuse the connection. - self.requests.splice(i, 1); - pending.request.onSocket(socket); - return; - } + get username() { + return this._url.username; + } + + set username(v) { + if (usm.cannotHaveAUsernamePasswordPort(this._url)) { + return; } - socket.destroy(); - self.removeSocket(socket); - }); -} -util.inherits(TunnelingAgent, events.EventEmitter); -TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { - var self = this; - var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); + usm.setTheUsername(this._url, v); + } - if (self.sockets.length >= this.maxSockets) { - // We are over limit so we'll add it to the queue. - self.requests.push(options); - return; + get password() { + return this._url.password; } - // If we are under maxSockets create a new one. - self.createSocket(options, function(socket) { - socket.on('free', onFree); - socket.on('close', onCloseOrRemove); - socket.on('agentRemove', onCloseOrRemove); - req.onSocket(socket); + set password(v) { + if (usm.cannotHaveAUsernamePasswordPort(this._url)) { + return; + } - function onFree() { - self.emit('free', socket, options); + usm.setThePassword(this._url, v); + } + + get host() { + const url = this._url; + + if (url.host === null) { + return ""; } - function onCloseOrRemove(err) { - self.removeSocket(socket); - socket.removeListener('free', onFree); - socket.removeListener('close', onCloseOrRemove); - socket.removeListener('agentRemove', onCloseOrRemove); + if (url.port === null) { + return usm.serializeHost(url.host); } - }); -}; -TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { - var self = this; - var placeholder = {}; - self.sockets.push(placeholder); + return usm.serializeHost(url.host) + ":" + usm.serializeInteger(url.port); + } - var connectOptions = mergeOptions({}, self.proxyOptions, { - method: 'CONNECT', - path: options.host + ':' + options.port, - agent: false, - headers: { - host: options.host + ':' + options.port + set host(v) { + if (this._url.cannotBeABaseURL) { + return; + } + + usm.basicURLParse(v, { url: this._url, stateOverride: "host" }); + } + + get hostname() { + if (this._url.host === null) { + return ""; + } + + return usm.serializeHost(this._url.host); + } + + set hostname(v) { + if (this._url.cannotBeABaseURL) { + return; + } + + usm.basicURLParse(v, { url: this._url, stateOverride: "hostname" }); + } + + get port() { + if (this._url.port === null) { + return ""; + } + + return usm.serializeInteger(this._url.port); + } + + set port(v) { + if (usm.cannotHaveAUsernamePasswordPort(this._url)) { + return; + } + + if (v === "") { + this._url.port = null; + } else { + usm.basicURLParse(v, { url: this._url, stateOverride: "port" }); } - }); - if (options.localAddress) { - connectOptions.localAddress = options.localAddress; } - if (connectOptions.proxyAuth) { - connectOptions.headers = connectOptions.headers || {}; - connectOptions.headers['Proxy-Authorization'] = 'Basic ' + - new Buffer(connectOptions.proxyAuth).toString('base64'); + + get pathname() { + if (this._url.cannotBeABaseURL) { + return this._url.path[0]; + } + + if (this._url.path.length === 0) { + return ""; + } + + return "/" + this._url.path.join("/"); } - debug('making CONNECT request'); - var connectReq = self.request(connectOptions); - connectReq.useChunkedEncodingByDefault = false; // for v0.6 - connectReq.once('response', onResponse); // for v0.6 - connectReq.once('upgrade', onUpgrade); // for v0.6 - connectReq.once('connect', onConnect); // for v0.7 or later - connectReq.once('error', onError); - connectReq.end(); + set pathname(v) { + if (this._url.cannotBeABaseURL) { + return; + } - function onResponse(res) { - // Very hacky. This is necessary to avoid http-parser leaks. - res.upgrade = true; + this._url.path = []; + usm.basicURLParse(v, { url: this._url, stateOverride: "path start" }); } - function onUpgrade(res, socket, head) { - // Hacky. - process.nextTick(function() { - onConnect(res, socket, head); - }); + get search() { + if (this._url.query === null || this._url.query === "") { + return ""; + } + + return "?" + this._url.query; } - function onConnect(res, socket, head) { - connectReq.removeAllListeners(); - socket.removeAllListeners(); + set search(v) { + // TODO: query stuff - if (res.statusCode !== 200) { - debug('tunneling socket could not be established, statusCode=%d', - res.statusCode); - socket.destroy(); - var error = new Error('tunneling socket could not be established, ' + - 'statusCode=' + res.statusCode); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; - } - if (head.length > 0) { - debug('got illegal response body from proxy'); - socket.destroy(); - var error = new Error('got illegal response body from proxy'); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); + const url = this._url; + + if (v === "") { + url.query = null; return; } - debug('tunneling connection has established'); - self.sockets[self.sockets.indexOf(placeholder)] = socket; - return cb(socket); + + const input = v[0] === "?" ? v.substring(1) : v; + url.query = ""; + usm.basicURLParse(input, { url, stateOverride: "query" }); } - function onError(cause) { - connectReq.removeAllListeners(); + get hash() { + if (this._url.fragment === null || this._url.fragment === "") { + return ""; + } - debug('tunneling socket could not be established, cause=%s\n', - cause.message, cause.stack); - var error = new Error('tunneling socket could not be established, ' + - 'cause=' + cause.message); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); + return "#" + this._url.fragment; } -}; -TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { - var pos = this.sockets.indexOf(socket) - if (pos === -1) { - return; + set hash(v) { + if (v === "") { + this._url.fragment = null; + return; + } + + const input = v[0] === "#" ? v.substring(1) : v; + this._url.fragment = ""; + usm.basicURLParse(input, { url: this._url, stateOverride: "fragment" }); } - this.sockets.splice(pos, 1); - var pending = this.requests.shift(); - if (pending) { - // If we have pending requests and a socket gets closed a new one - // needs to be created to take over in the pool for the one that closed. - this.createSocket(pending, function(socket) { - pending.request.onSocket(socket); - }); + toJSON() { + return this.href; } }; -function createSecureSocket(options, cb) { - var self = this; - TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { - var hostHeader = options.request.getHeader('host'); - var tlsOptions = mergeOptions({}, self.options, { - socket: socket, - servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host - }); - // 0 is dummy port for v0.6 - var secureSocket = tls.connect(0, tlsOptions); - self.sockets[self.sockets.indexOf(socket)] = secureSocket; - cb(secureSocket); - }); -} +/***/ }), +/***/ 3394: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { -function toOptions(host, port, localAddress) { - if (typeof host === 'string') { // since v0.10 - return { - host: host, - port: port, - localAddress: localAddress - }; +"use strict"; + + +const conversions = __webpack_require__(4886); +const utils = __webpack_require__(3185); +const Impl = __webpack_require__(7537); + +const impl = utils.implSymbol; + +function URL(url) { + if (!this || this[impl] || !(this instanceof URL)) { + throw new TypeError("Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function."); } - return host; // for v0.11 or later + if (arguments.length < 1) { + throw new TypeError("Failed to construct 'URL': 1 argument required, but only " + arguments.length + " present."); + } + const args = []; + for (let i = 0; i < arguments.length && i < 2; ++i) { + args[i] = arguments[i]; + } + args[0] = conversions["USVString"](args[0]); + if (args[1] !== undefined) { + args[1] = conversions["USVString"](args[1]); + } + + module.exports.setup(this, args); } -function mergeOptions(target) { - for (var i = 1, len = arguments.length; i < len; ++i) { - var overrides = arguments[i]; - if (typeof overrides === 'object') { - var keys = Object.keys(overrides); - for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { - var k = keys[j]; - if (overrides[k] !== undefined) { - target[k] = overrides[k]; - } - } - } +URL.prototype.toJSON = function toJSON() { + if (!this || !module.exports.is(this)) { + throw new TypeError("Illegal invocation"); } - return target; -} + const args = []; + for (let i = 0; i < arguments.length && i < 0; ++i) { + args[i] = arguments[i]; + } + return this[impl].toJSON.apply(this[impl], args); +}; +Object.defineProperty(URL.prototype, "href", { + get() { + return this[impl].href; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].href = V; + }, + enumerable: true, + configurable: true +}); +URL.prototype.toString = function () { + if (!this || !module.exports.is(this)) { + throw new TypeError("Illegal invocation"); + } + return this.href; +}; -var debug; -if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { - debug = function() { - var args = Array.prototype.slice.call(arguments); - if (typeof args[0] === 'string') { - args[0] = 'TUNNEL: ' + args[0]; - } else { - args.unshift('TUNNEL:'); - } - console.error.apply(console, args); +Object.defineProperty(URL.prototype, "origin", { + get() { + return this[impl].origin; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "protocol", { + get() { + return this[impl].protocol; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].protocol = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "username", { + get() { + return this[impl].username; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].username = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "password", { + get() { + return this[impl].password; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].password = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "host", { + get() { + return this[impl].host; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].host = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "hostname", { + get() { + return this[impl].hostname; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].hostname = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "port", { + get() { + return this[impl].port; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].port = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "pathname", { + get() { + return this[impl].pathname; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].pathname = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "search", { + get() { + return this[impl].search; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].search = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "hash", { + get() { + return this[impl].hash; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].hash = V; + }, + enumerable: true, + configurable: true +}); + + +module.exports = { + is(obj) { + return !!obj && obj[impl] instanceof Impl.implementation; + }, + create(constructorArgs, privateData) { + let obj = Object.create(URL.prototype); + this.setup(obj, constructorArgs, privateData); + return obj; + }, + setup(obj, constructorArgs, privateData) { + if (!privateData) privateData = {}; + privateData.wrapper = obj; + + obj[impl] = new Impl.implementation(constructorArgs, privateData); + obj[impl][utils.wrapperSymbol] = obj; + }, + interface: URL, + expose: { + Window: { URL: URL }, + Worker: { URL: URL } } -} else { - debug = function() {}; -} -exports.debug = debug; // for test +}; + /***/ }), -/***/ 5030: +/***/ 8665: /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.URL = __webpack_require__(3394).interface; +exports.serializeURL = __webpack_require__(2158).serializeURL; +exports.serializeURLOrigin = __webpack_require__(2158).serializeURLOrigin; +exports.basicURLParse = __webpack_require__(2158).basicURLParse; +exports.setTheUsername = __webpack_require__(2158).setTheUsername; +exports.setThePassword = __webpack_require__(2158).setThePassword; +exports.serializeHost = __webpack_require__(2158).serializeHost; +exports.serializeInteger = __webpack_require__(2158).serializeInteger; +exports.parseURL = __webpack_require__(2158).parseURL; -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } -var osName = _interopDefault(__webpack_require__(4824)); +/***/ }), -function getUserAgent() { - try { - return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`; - } catch (error) { - if (/wmic os get Caption/.test(error.message)) { - return "Windows "; - } +/***/ 2158: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { - return ""; +"use strict"; + +const punycode = __webpack_require__(4213); +const tr46 = __webpack_require__(4256); + +const specialSchemes = { + ftp: 21, + file: null, + gopher: 70, + http: 80, + https: 443, + ws: 80, + wss: 443 +}; + +const failure = Symbol("failure"); + +function countSymbols(str) { + return punycode.ucs2.decode(str).length; +} + +function at(input, idx) { + const c = input[idx]; + return isNaN(c) ? undefined : String.fromCodePoint(c); +} + +function isASCIIDigit(c) { + return c >= 0x30 && c <= 0x39; +} + +function isASCIIAlpha(c) { + return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A); +} + +function isASCIIAlphanumeric(c) { + return isASCIIAlpha(c) || isASCIIDigit(c); +} + +function isASCIIHex(c) { + return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66); +} + +function isSingleDot(buffer) { + return buffer === "." || buffer.toLowerCase() === "%2e"; +} + +function isDoubleDot(buffer) { + buffer = buffer.toLowerCase(); + return buffer === ".." || buffer === "%2e." || buffer === ".%2e" || buffer === "%2e%2e"; +} + +function isWindowsDriveLetterCodePoints(cp1, cp2) { + return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124); +} + +function isWindowsDriveLetterString(string) { + return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === ":" || string[1] === "|"); +} + +function isNormalizedWindowsDriveLetterString(string) { + return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === ":"; +} + +function containsForbiddenHostCodePoint(string) { + return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|%|\/|:|\?|@|\[|\\|\]/) !== -1; +} + +function containsForbiddenHostCodePointExcludingPercent(string) { + return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|\/|:|\?|@|\[|\\|\]/) !== -1; +} + +function isSpecialScheme(scheme) { + return specialSchemes[scheme] !== undefined; +} + +function isSpecial(url) { + return isSpecialScheme(url.scheme); +} + +function defaultPort(scheme) { + return specialSchemes[scheme]; +} + +function percentEncode(c) { + let hex = c.toString(16).toUpperCase(); + if (hex.length === 1) { + hex = "0" + hex; + } + + return "%" + hex; +} + +function utf8PercentEncode(c) { + const buf = new Buffer(c); + + let str = ""; + + for (let i = 0; i < buf.length; ++i) { + str += percentEncode(buf[i]); + } + + return str; +} + +function utf8PercentDecode(str) { + const input = new Buffer(str); + const output = []; + for (let i = 0; i < input.length; ++i) { + if (input[i] !== 37) { + output.push(input[i]); + } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) { + output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16)); + i += 2; + } else { + output.push(input[i]); + } + } + return new Buffer(output).toString(); +} + +function isC0ControlPercentEncode(c) { + return c <= 0x1F || c > 0x7E; +} + +const extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]); +function isPathPercentEncode(c) { + return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c); +} + +const extraUserinfoPercentEncodeSet = + new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]); +function isUserinfoPercentEncode(c) { + return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c); +} + +function percentEncodeChar(c, encodeSetPredicate) { + const cStr = String.fromCodePoint(c); + + if (encodeSetPredicate(c)) { + return utf8PercentEncode(cStr); + } + + return cStr; +} + +function parseIPv4Number(input) { + let R = 10; + + if (input.length >= 2 && input.charAt(0) === "0" && input.charAt(1).toLowerCase() === "x") { + input = input.substring(2); + R = 16; + } else if (input.length >= 2 && input.charAt(0) === "0") { + input = input.substring(1); + R = 8; + } + + if (input === "") { + return 0; + } + + const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/); + if (regex.test(input)) { + return failure; + } + + return parseInt(input, R); +} + +function parseIPv4(input) { + const parts = input.split("."); + if (parts[parts.length - 1] === "") { + if (parts.length > 1) { + parts.pop(); + } + } + + if (parts.length > 4) { + return input; + } + + const numbers = []; + for (const part of parts) { + if (part === "") { + return input; + } + const n = parseIPv4Number(part); + if (n === failure) { + return input; + } + + numbers.push(n); + } + + for (let i = 0; i < numbers.length - 1; ++i) { + if (numbers[i] > 255) { + return failure; + } + } + if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) { + return failure; + } + + let ipv4 = numbers.pop(); + let counter = 0; + + for (const n of numbers) { + ipv4 += n * Math.pow(256, 3 - counter); + ++counter; + } + + return ipv4; +} + +function serializeIPv4(address) { + let output = ""; + let n = address; + + for (let i = 1; i <= 4; ++i) { + output = String(n % 256) + output; + if (i !== 4) { + output = "." + output; + } + n = Math.floor(n / 256); + } + + return output; +} + +function parseIPv6(input) { + const address = [0, 0, 0, 0, 0, 0, 0, 0]; + let pieceIndex = 0; + let compress = null; + let pointer = 0; + + input = punycode.ucs2.decode(input); + + if (input[pointer] === 58) { + if (input[pointer + 1] !== 58) { + return failure; + } + + pointer += 2; + ++pieceIndex; + compress = pieceIndex; + } + + while (pointer < input.length) { + if (pieceIndex === 8) { + return failure; + } + + if (input[pointer] === 58) { + if (compress !== null) { + return failure; + } + ++pointer; + ++pieceIndex; + compress = pieceIndex; + continue; + } + + let value = 0; + let length = 0; + + while (length < 4 && isASCIIHex(input[pointer])) { + value = value * 0x10 + parseInt(at(input, pointer), 16); + ++pointer; + ++length; + } + + if (input[pointer] === 46) { + if (length === 0) { + return failure; + } + + pointer -= length; + + if (pieceIndex > 6) { + return failure; + } + + let numbersSeen = 0; + + while (input[pointer] !== undefined) { + let ipv4Piece = null; + + if (numbersSeen > 0) { + if (input[pointer] === 46 && numbersSeen < 4) { + ++pointer; + } else { + return failure; + } + } + + if (!isASCIIDigit(input[pointer])) { + return failure; + } + + while (isASCIIDigit(input[pointer])) { + const number = parseInt(at(input, pointer)); + if (ipv4Piece === null) { + ipv4Piece = number; + } else if (ipv4Piece === 0) { + return failure; + } else { + ipv4Piece = ipv4Piece * 10 + number; + } + if (ipv4Piece > 255) { + return failure; + } + ++pointer; + } + + address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece; + + ++numbersSeen; + + if (numbersSeen === 2 || numbersSeen === 4) { + ++pieceIndex; + } + } + + if (numbersSeen !== 4) { + return failure; + } + + break; + } else if (input[pointer] === 58) { + ++pointer; + if (input[pointer] === undefined) { + return failure; + } + } else if (input[pointer] !== undefined) { + return failure; + } + + address[pieceIndex] = value; + ++pieceIndex; + } + + if (compress !== null) { + let swaps = pieceIndex - compress; + pieceIndex = 7; + while (pieceIndex !== 0 && swaps > 0) { + const temp = address[compress + swaps - 1]; + address[compress + swaps - 1] = address[pieceIndex]; + address[pieceIndex] = temp; + --pieceIndex; + --swaps; + } + } else if (compress === null && pieceIndex !== 8) { + return failure; + } + + return address; +} + +function serializeIPv6(address) { + let output = ""; + const seqResult = findLongestZeroSequence(address); + const compress = seqResult.idx; + let ignore0 = false; + + for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) { + if (ignore0 && address[pieceIndex] === 0) { + continue; + } else if (ignore0) { + ignore0 = false; + } + + if (compress === pieceIndex) { + const separator = pieceIndex === 0 ? "::" : ":"; + output += separator; + ignore0 = true; + continue; + } + + output += address[pieceIndex].toString(16); + + if (pieceIndex !== 7) { + output += ":"; + } + } + + return output; +} + +function parseHost(input, isSpecialArg) { + if (input[0] === "[") { + if (input[input.length - 1] !== "]") { + return failure; + } + + return parseIPv6(input.substring(1, input.length - 1)); + } + + if (!isSpecialArg) { + return parseOpaqueHost(input); + } + + const domain = utf8PercentDecode(input); + const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false); + if (asciiDomain === null) { + return failure; + } + + if (containsForbiddenHostCodePoint(asciiDomain)) { + return failure; + } + + const ipv4Host = parseIPv4(asciiDomain); + if (typeof ipv4Host === "number" || ipv4Host === failure) { + return ipv4Host; + } + + return asciiDomain; +} + +function parseOpaqueHost(input) { + if (containsForbiddenHostCodePointExcludingPercent(input)) { + return failure; + } + + let output = ""; + const decoded = punycode.ucs2.decode(input); + for (let i = 0; i < decoded.length; ++i) { + output += percentEncodeChar(decoded[i], isC0ControlPercentEncode); + } + return output; +} + +function findLongestZeroSequence(arr) { + let maxIdx = null; + let maxLen = 1; // only find elements > 1 + let currStart = null; + let currLen = 0; + + for (let i = 0; i < arr.length; ++i) { + if (arr[i] !== 0) { + if (currLen > maxLen) { + maxIdx = currStart; + maxLen = currLen; + } + + currStart = null; + currLen = 0; + } else { + if (currStart === null) { + currStart = i; + } + ++currLen; + } + } + + // if trailing zeros + if (currLen > maxLen) { + maxIdx = currStart; + maxLen = currLen; + } + + return { + idx: maxIdx, + len: maxLen + }; +} + +function serializeHost(host) { + if (typeof host === "number") { + return serializeIPv4(host); + } + + // IPv6 serializer + if (host instanceof Array) { + return "[" + serializeIPv6(host) + "]"; + } + + return host; +} + +function trimControlChars(url) { + return url.replace(/^[\u0000-\u001F\u0020]+|[\u0000-\u001F\u0020]+$/g, ""); +} + +function trimTabAndNewline(url) { + return url.replace(/\u0009|\u000A|\u000D/g, ""); +} + +function shortenPath(url) { + const path = url.path; + if (path.length === 0) { + return; + } + if (url.scheme === "file" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) { + return; + } + + path.pop(); +} + +function includesCredentials(url) { + return url.username !== "" || url.password !== ""; +} + +function cannotHaveAUsernamePasswordPort(url) { + return url.host === null || url.host === "" || url.cannotBeABaseURL || url.scheme === "file"; +} + +function isNormalizedWindowsDriveLetter(string) { + return /^[A-Za-z]:$/.test(string); +} + +function URLStateMachine(input, base, encodingOverride, url, stateOverride) { + this.pointer = 0; + this.input = input; + this.base = base || null; + this.encodingOverride = encodingOverride || "utf-8"; + this.stateOverride = stateOverride; + this.url = url; + this.failure = false; + this.parseError = false; + + if (!this.url) { + this.url = { + scheme: "", + username: "", + password: "", + host: null, + port: null, + path: [], + query: null, + fragment: null, + + cannotBeABaseURL: false + }; + + const res = trimControlChars(this.input); + if (res !== this.input) { + this.parseError = true; + } + this.input = res; + } + + const res = trimTabAndNewline(this.input); + if (res !== this.input) { + this.parseError = true; + } + this.input = res; + + this.state = stateOverride || "scheme start"; + + this.buffer = ""; + this.atFlag = false; + this.arrFlag = false; + this.passwordTokenSeenFlag = false; + + this.input = punycode.ucs2.decode(this.input); + + for (; this.pointer <= this.input.length; ++this.pointer) { + const c = this.input[this.pointer]; + const cStr = isNaN(c) ? undefined : String.fromCodePoint(c); + + // exec state machine + const ret = this["parse " + this.state](c, cStr); + if (!ret) { + break; // terminate algorithm + } else if (ret === failure) { + this.failure = true; + break; + } + } +} + +URLStateMachine.prototype["parse scheme start"] = function parseSchemeStart(c, cStr) { + if (isASCIIAlpha(c)) { + this.buffer += cStr.toLowerCase(); + this.state = "scheme"; + } else if (!this.stateOverride) { + this.state = "no scheme"; + --this.pointer; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +URLStateMachine.prototype["parse scheme"] = function parseScheme(c, cStr) { + if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) { + this.buffer += cStr.toLowerCase(); + } else if (c === 58) { + if (this.stateOverride) { + if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) { + return false; + } + + if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) { + return false; + } + + if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === "file") { + return false; + } + + if (this.url.scheme === "file" && (this.url.host === "" || this.url.host === null)) { + return false; + } + } + this.url.scheme = this.buffer; + this.buffer = ""; + if (this.stateOverride) { + return false; + } + if (this.url.scheme === "file") { + if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) { + this.parseError = true; + } + this.state = "file"; + } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) { + this.state = "special relative or authority"; + } else if (isSpecial(this.url)) { + this.state = "special authority slashes"; + } else if (this.input[this.pointer + 1] === 47) { + this.state = "path or authority"; + ++this.pointer; + } else { + this.url.cannotBeABaseURL = true; + this.url.path.push(""); + this.state = "cannot-be-a-base-URL path"; + } + } else if (!this.stateOverride) { + this.buffer = ""; + this.state = "no scheme"; + this.pointer = -1; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +URLStateMachine.prototype["parse no scheme"] = function parseNoScheme(c) { + if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) { + return failure; + } else if (this.base.cannotBeABaseURL && c === 35) { + this.url.scheme = this.base.scheme; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.url.cannotBeABaseURL = true; + this.state = "fragment"; + } else if (this.base.scheme === "file") { + this.state = "file"; + --this.pointer; + } else { + this.state = "relative"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special relative or authority"] = function parseSpecialRelativeOrAuthority(c) { + if (c === 47 && this.input[this.pointer + 1] === 47) { + this.state = "special authority ignore slashes"; + ++this.pointer; + } else { + this.parseError = true; + this.state = "relative"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse path or authority"] = function parsePathOrAuthority(c) { + if (c === 47) { + this.state = "authority"; + } else { + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse relative"] = function parseRelative(c) { + this.url.scheme = this.base.scheme; + if (isNaN(c)) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + } else if (c === 47) { + this.state = "relative slash"; + } else if (c === 63) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.state = "fragment"; + } else if (isSpecial(this.url) && c === 92) { + this.parseError = true; + this.state = "relative slash"; + } else { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(0, this.base.path.length - 1); + + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse relative slash"] = function parseRelativeSlash(c) { + if (isSpecial(this.url) && (c === 47 || c === 92)) { + if (c === 92) { + this.parseError = true; + } + this.state = "special authority ignore slashes"; + } else if (c === 47) { + this.state = "authority"; + } else { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special authority slashes"] = function parseSpecialAuthoritySlashes(c) { + if (c === 47 && this.input[this.pointer + 1] === 47) { + this.state = "special authority ignore slashes"; + ++this.pointer; + } else { + this.parseError = true; + this.state = "special authority ignore slashes"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special authority ignore slashes"] = function parseSpecialAuthorityIgnoreSlashes(c) { + if (c !== 47 && c !== 92) { + this.state = "authority"; + --this.pointer; + } else { + this.parseError = true; + } + + return true; +}; + +URLStateMachine.prototype["parse authority"] = function parseAuthority(c, cStr) { + if (c === 64) { + this.parseError = true; + if (this.atFlag) { + this.buffer = "%40" + this.buffer; + } + this.atFlag = true; + + // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars + const len = countSymbols(this.buffer); + for (let pointer = 0; pointer < len; ++pointer) { + const codePoint = this.buffer.codePointAt(pointer); + + if (codePoint === 58 && !this.passwordTokenSeenFlag) { + this.passwordTokenSeenFlag = true; + continue; + } + const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode); + if (this.passwordTokenSeenFlag) { + this.url.password += encodedCodePoints; + } else { + this.url.username += encodedCodePoints; + } + } + this.buffer = ""; + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92)) { + if (this.atFlag && this.buffer === "") { + this.parseError = true; + return failure; + } + this.pointer -= countSymbols(this.buffer) + 1; + this.buffer = ""; + this.state = "host"; + } else { + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse hostname"] = +URLStateMachine.prototype["parse host"] = function parseHostName(c, cStr) { + if (this.stateOverride && this.url.scheme === "file") { + --this.pointer; + this.state = "file host"; + } else if (c === 58 && !this.arrFlag) { + if (this.buffer === "") { + this.parseError = true; + return failure; + } + + const host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + + this.url.host = host; + this.buffer = ""; + this.state = "port"; + if (this.stateOverride === "hostname") { + return false; + } + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92)) { + --this.pointer; + if (isSpecial(this.url) && this.buffer === "") { + this.parseError = true; + return failure; + } else if (this.stateOverride && this.buffer === "" && + (includesCredentials(this.url) || this.url.port !== null)) { + this.parseError = true; + return false; + } + + const host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + + this.url.host = host; + this.buffer = ""; + this.state = "path start"; + if (this.stateOverride) { + return false; + } + } else { + if (c === 91) { + this.arrFlag = true; + } else if (c === 93) { + this.arrFlag = false; + } + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse port"] = function parsePort(c, cStr) { + if (isASCIIDigit(c)) { + this.buffer += cStr; + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92) || + this.stateOverride) { + if (this.buffer !== "") { + const port = parseInt(this.buffer); + if (port > Math.pow(2, 16) - 1) { + this.parseError = true; + return failure; + } + this.url.port = port === defaultPort(this.url.scheme) ? null : port; + this.buffer = ""; + } + if (this.stateOverride) { + return false; + } + this.state = "path start"; + --this.pointer; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +const fileOtherwiseCodePoints = new Set([47, 92, 63, 35]); + +URLStateMachine.prototype["parse file"] = function parseFile(c) { + this.url.scheme = "file"; + + if (c === 47 || c === 92) { + if (c === 92) { + this.parseError = true; + } + this.state = "file slash"; + } else if (this.base !== null && this.base.scheme === "file") { + if (isNaN(c)) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + } else if (c === 63) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.state = "fragment"; + } else { + if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points + !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) || + (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points + !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + shortenPath(this.url); + } else { + this.parseError = true; + } + + this.state = "path"; + --this.pointer; + } + } else { + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse file slash"] = function parseFileSlash(c) { + if (c === 47 || c === 92) { + if (c === 92) { + this.parseError = true; + } + this.state = "file host"; + } else { + if (this.base !== null && this.base.scheme === "file") { + if (isNormalizedWindowsDriveLetterString(this.base.path[0])) { + this.url.path.push(this.base.path[0]); + } else { + this.url.host = this.base.host; + } + } + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse file host"] = function parseFileHost(c, cStr) { + if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) { + --this.pointer; + if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) { + this.parseError = true; + this.state = "path"; + } else if (this.buffer === "") { + this.url.host = ""; + if (this.stateOverride) { + return false; + } + this.state = "path start"; + } else { + let host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + if (host === "localhost") { + host = ""; + } + this.url.host = host; + + if (this.stateOverride) { + return false; + } + + this.buffer = ""; + this.state = "path start"; + } + } else { + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse path start"] = function parsePathStart(c) { + if (isSpecial(this.url)) { + if (c === 92) { + this.parseError = true; + } + this.state = "path"; + + if (c !== 47 && c !== 92) { + --this.pointer; + } + } else if (!this.stateOverride && c === 63) { + this.url.query = ""; + this.state = "query"; + } else if (!this.stateOverride && c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } else if (c !== undefined) { + this.state = "path"; + if (c !== 47) { + --this.pointer; + } + } + + return true; +}; + +URLStateMachine.prototype["parse path"] = function parsePath(c) { + if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) || + (!this.stateOverride && (c === 63 || c === 35))) { + if (isSpecial(this.url) && c === 92) { + this.parseError = true; + } + + if (isDoubleDot(this.buffer)) { + shortenPath(this.url); + if (c !== 47 && !(isSpecial(this.url) && c === 92)) { + this.url.path.push(""); + } + } else if (isSingleDot(this.buffer) && c !== 47 && + !(isSpecial(this.url) && c === 92)) { + this.url.path.push(""); + } else if (!isSingleDot(this.buffer)) { + if (this.url.scheme === "file" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) { + if (this.url.host !== "" && this.url.host !== null) { + this.parseError = true; + this.url.host = ""; + } + this.buffer = this.buffer[0] + ":"; + } + this.url.path.push(this.buffer); + } + this.buffer = ""; + if (this.url.scheme === "file" && (c === undefined || c === 63 || c === 35)) { + while (this.url.path.length > 1 && this.url.path[0] === "") { + this.parseError = true; + this.url.path.shift(); + } + } + if (c === 63) { + this.url.query = ""; + this.state = "query"; + } + if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } + } else { + // TODO: If c is not a URL code point and not "%", parse error. + + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.buffer += percentEncodeChar(c, isPathPercentEncode); + } + + return true; +}; + +URLStateMachine.prototype["parse cannot-be-a-base-URL path"] = function parseCannotBeABaseURLPath(c) { + if (c === 63) { + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } else { + // TODO: Add: not a URL code point + if (!isNaN(c) && c !== 37) { + this.parseError = true; + } + + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + if (!isNaN(c)) { + this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode); + } + } + + return true; +}; + +URLStateMachine.prototype["parse query"] = function parseQuery(c, cStr) { + if (isNaN(c) || (!this.stateOverride && c === 35)) { + if (!isSpecial(this.url) || this.url.scheme === "ws" || this.url.scheme === "wss") { + this.encodingOverride = "utf-8"; + } + + const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead + for (let i = 0; i < buffer.length; ++i) { + if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 || + buffer[i] === 0x3C || buffer[i] === 0x3E) { + this.url.query += percentEncode(buffer[i]); + } else { + this.url.query += String.fromCodePoint(buffer[i]); + } + } + + this.buffer = ""; + if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } + } else { + // TODO: If c is not a URL code point and not "%", parse error. + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse fragment"] = function parseFragment(c) { + if (isNaN(c)) { // do nothing + } else if (c === 0x0) { + this.parseError = true; + } else { + // TODO: If c is not a URL code point and not "%", parse error. + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode); + } + + return true; +}; + +function serializeURL(url, excludeFragment) { + let output = url.scheme + ":"; + if (url.host !== null) { + output += "//"; + + if (url.username !== "" || url.password !== "") { + output += url.username; + if (url.password !== "") { + output += ":" + url.password; + } + output += "@"; + } + + output += serializeHost(url.host); + + if (url.port !== null) { + output += ":" + url.port; + } + } else if (url.host === null && url.scheme === "file") { + output += "//"; + } + + if (url.cannotBeABaseURL) { + output += url.path[0]; + } else { + for (const string of url.path) { + output += "/" + string; + } + } + + if (url.query !== null) { + output += "?" + url.query; + } + + if (!excludeFragment && url.fragment !== null) { + output += "#" + url.fragment; + } + + return output; +} + +function serializeOrigin(tuple) { + let result = tuple.scheme + "://"; + result += serializeHost(tuple.host); + + if (tuple.port !== null) { + result += ":" + tuple.port; + } + + return result; +} + +module.exports.serializeURL = serializeURL; + +module.exports.serializeURLOrigin = function (url) { + // https://url.spec.whatwg.org/#concept-url-origin + switch (url.scheme) { + case "blob": + try { + return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0])); + } catch (e) { + // serializing an opaque origin returns "null" + return "null"; + } + case "ftp": + case "gopher": + case "http": + case "https": + case "ws": + case "wss": + return serializeOrigin({ + scheme: url.scheme, + host: url.host, + port: url.port + }); + case "file": + // spec says "exercise to the reader", chrome says "file://" + return "file://"; + default: + // serializing an opaque origin returns "null" + return "null"; + } +}; + +module.exports.basicURLParse = function (input, options) { + if (options === undefined) { + options = {}; + } + + const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride); + if (usm.failure) { + return "failure"; + } + + return usm.url; +}; + +module.exports.setTheUsername = function (url, username) { + url.username = ""; + const decoded = punycode.ucs2.decode(username); + for (let i = 0; i < decoded.length; ++i) { + url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode); + } +}; + +module.exports.setThePassword = function (url, password) { + url.password = ""; + const decoded = punycode.ucs2.decode(password); + for (let i = 0; i < decoded.length; ++i) { + url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode); + } +}; + +module.exports.serializeHost = serializeHost; + +module.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort; + +module.exports.serializeInteger = function (integer) { + return String(integer); +}; + +module.exports.parseURL = function (input, options) { + if (options === undefined) { + options = {}; + } + + // We don't handle blobs, so this just delegates: + return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride }); +}; + + +/***/ }), + +/***/ 3185: +/***/ ((module) => { + +"use strict"; + + +module.exports.mixin = function mixin(target, source) { + const keys = Object.getOwnPropertyNames(source); + for (let i = 0; i < keys.length; ++i) { + Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i])); } -} +}; + +module.exports.wrapperSymbol = Symbol("wrapper"); +module.exports.implSymbol = Symbol("impl"); + +module.exports.wrapperForImpl = function (impl) { + return impl[module.exports.wrapperSymbol]; +}; + +module.exports.implForWrapper = function (wrapper) { + return wrapper[module.exports.implSymbol]; +}; -exports.getUserAgent = getUserAgent; -//# sourceMappingURL=index.js.map /***/ }), @@ -16050,6 +19992,14 @@ function groupIssuesByLabels(issues, labels) { module.exports = eval("require")("encoding"); +/***/ }), + +/***/ 68: +/***/ ((module) => { + +"use strict"; +module.exports = JSON.parse("[[[0,44],\"disallowed_STD3_valid\"],[[45,46],\"valid\"],[[47,47],\"disallowed_STD3_valid\"],[[48,57],\"valid\"],[[58,64],\"disallowed_STD3_valid\"],[[65,65],\"mapped\",[97]],[[66,66],\"mapped\",[98]],[[67,67],\"mapped\",[99]],[[68,68],\"mapped\",[100]],[[69,69],\"mapped\",[101]],[[70,70],\"mapped\",[102]],[[71,71],\"mapped\",[103]],[[72,72],\"mapped\",[104]],[[73,73],\"mapped\",[105]],[[74,74],\"mapped\",[106]],[[75,75],\"mapped\",[107]],[[76,76],\"mapped\",[108]],[[77,77],\"mapped\",[109]],[[78,78],\"mapped\",[110]],[[79,79],\"mapped\",[111]],[[80,80],\"mapped\",[112]],[[81,81],\"mapped\",[113]],[[82,82],\"mapped\",[114]],[[83,83],\"mapped\",[115]],[[84,84],\"mapped\",[116]],[[85,85],\"mapped\",[117]],[[86,86],\"mapped\",[118]],[[87,87],\"mapped\",[119]],[[88,88],\"mapped\",[120]],[[89,89],\"mapped\",[121]],[[90,90],\"mapped\",[122]],[[91,96],\"disallowed_STD3_valid\"],[[97,122],\"valid\"],[[123,127],\"disallowed_STD3_valid\"],[[128,159],\"disallowed\"],[[160,160],\"disallowed_STD3_mapped\",[32]],[[161,167],\"valid\",[],\"NV8\"],[[168,168],\"disallowed_STD3_mapped\",[32,776]],[[169,169],\"valid\",[],\"NV8\"],[[170,170],\"mapped\",[97]],[[171,172],\"valid\",[],\"NV8\"],[[173,173],\"ignored\"],[[174,174],\"valid\",[],\"NV8\"],[[175,175],\"disallowed_STD3_mapped\",[32,772]],[[176,177],\"valid\",[],\"NV8\"],[[178,178],\"mapped\",[50]],[[179,179],\"mapped\",[51]],[[180,180],\"disallowed_STD3_mapped\",[32,769]],[[181,181],\"mapped\",[956]],[[182,182],\"valid\",[],\"NV8\"],[[183,183],\"valid\"],[[184,184],\"disallowed_STD3_mapped\",[32,807]],[[185,185],\"mapped\",[49]],[[186,186],\"mapped\",[111]],[[187,187],\"valid\",[],\"NV8\"],[[188,188],\"mapped\",[49,8260,52]],[[189,189],\"mapped\",[49,8260,50]],[[190,190],\"mapped\",[51,8260,52]],[[191,191],\"valid\",[],\"NV8\"],[[192,192],\"mapped\",[224]],[[193,193],\"mapped\",[225]],[[194,194],\"mapped\",[226]],[[195,195],\"mapped\",[227]],[[196,196],\"mapped\",[228]],[[197,197],\"mapped\",[229]],[[198,198],\"mapped\",[230]],[[199,199],\"mapped\",[231]],[[200,200],\"mapped\",[232]],[[201,201],\"mapped\",[233]],[[202,202],\"mapped\",[234]],[[203,203],\"mapped\",[235]],[[204,204],\"mapped\",[236]],[[205,205],\"mapped\",[237]],[[206,206],\"mapped\",[238]],[[207,207],\"mapped\",[239]],[[208,208],\"mapped\",[240]],[[209,209],\"mapped\",[241]],[[210,210],\"mapped\",[242]],[[211,211],\"mapped\",[243]],[[212,212],\"mapped\",[244]],[[213,213],\"mapped\",[245]],[[214,214],\"mapped\",[246]],[[215,215],\"valid\",[],\"NV8\"],[[216,216],\"mapped\",[248]],[[217,217],\"mapped\",[249]],[[218,218],\"mapped\",[250]],[[219,219],\"mapped\",[251]],[[220,220],\"mapped\",[252]],[[221,221],\"mapped\",[253]],[[222,222],\"mapped\",[254]],[[223,223],\"deviation\",[115,115]],[[224,246],\"valid\"],[[247,247],\"valid\",[],\"NV8\"],[[248,255],\"valid\"],[[256,256],\"mapped\",[257]],[[257,257],\"valid\"],[[258,258],\"mapped\",[259]],[[259,259],\"valid\"],[[260,260],\"mapped\",[261]],[[261,261],\"valid\"],[[262,262],\"mapped\",[263]],[[263,263],\"valid\"],[[264,264],\"mapped\",[265]],[[265,265],\"valid\"],[[266,266],\"mapped\",[267]],[[267,267],\"valid\"],[[268,268],\"mapped\",[269]],[[269,269],\"valid\"],[[270,270],\"mapped\",[271]],[[271,271],\"valid\"],[[272,272],\"mapped\",[273]],[[273,273],\"valid\"],[[274,274],\"mapped\",[275]],[[275,275],\"valid\"],[[276,276],\"mapped\",[277]],[[277,277],\"valid\"],[[278,278],\"mapped\",[279]],[[279,279],\"valid\"],[[280,280],\"mapped\",[281]],[[281,281],\"valid\"],[[282,282],\"mapped\",[283]],[[283,283],\"valid\"],[[284,284],\"mapped\",[285]],[[285,285],\"valid\"],[[286,286],\"mapped\",[287]],[[287,287],\"valid\"],[[288,288],\"mapped\",[289]],[[289,289],\"valid\"],[[290,290],\"mapped\",[291]],[[291,291],\"valid\"],[[292,292],\"mapped\",[293]],[[293,293],\"valid\"],[[294,294],\"mapped\",[295]],[[295,295],\"valid\"],[[296,296],\"mapped\",[297]],[[297,297],\"valid\"],[[298,298],\"mapped\",[299]],[[299,299],\"valid\"],[[300,300],\"mapped\",[301]],[[301,301],\"valid\"],[[302,302],\"mapped\",[303]],[[303,303],\"valid\"],[[304,304],\"mapped\",[105,775]],[[305,305],\"valid\"],[[306,307],\"mapped\",[105,106]],[[308,308],\"mapped\",[309]],[[309,309],\"valid\"],[[310,310],\"mapped\",[311]],[[311,312],\"valid\"],[[313,313],\"mapped\",[314]],[[314,314],\"valid\"],[[315,315],\"mapped\",[316]],[[316,316],\"valid\"],[[317,317],\"mapped\",[318]],[[318,318],\"valid\"],[[319,320],\"mapped\",[108,183]],[[321,321],\"mapped\",[322]],[[322,322],\"valid\"],[[323,323],\"mapped\",[324]],[[324,324],\"valid\"],[[325,325],\"mapped\",[326]],[[326,326],\"valid\"],[[327,327],\"mapped\",[328]],[[328,328],\"valid\"],[[329,329],\"mapped\",[700,110]],[[330,330],\"mapped\",[331]],[[331,331],\"valid\"],[[332,332],\"mapped\",[333]],[[333,333],\"valid\"],[[334,334],\"mapped\",[335]],[[335,335],\"valid\"],[[336,336],\"mapped\",[337]],[[337,337],\"valid\"],[[338,338],\"mapped\",[339]],[[339,339],\"valid\"],[[340,340],\"mapped\",[341]],[[341,341],\"valid\"],[[342,342],\"mapped\",[343]],[[343,343],\"valid\"],[[344,344],\"mapped\",[345]],[[345,345],\"valid\"],[[346,346],\"mapped\",[347]],[[347,347],\"valid\"],[[348,348],\"mapped\",[349]],[[349,349],\"valid\"],[[350,350],\"mapped\",[351]],[[351,351],\"valid\"],[[352,352],\"mapped\",[353]],[[353,353],\"valid\"],[[354,354],\"mapped\",[355]],[[355,355],\"valid\"],[[356,356],\"mapped\",[357]],[[357,357],\"valid\"],[[358,358],\"mapped\",[359]],[[359,359],\"valid\"],[[360,360],\"mapped\",[361]],[[361,361],\"valid\"],[[362,362],\"mapped\",[363]],[[363,363],\"valid\"],[[364,364],\"mapped\",[365]],[[365,365],\"valid\"],[[366,366],\"mapped\",[367]],[[367,367],\"valid\"],[[368,368],\"mapped\",[369]],[[369,369],\"valid\"],[[370,370],\"mapped\",[371]],[[371,371],\"valid\"],[[372,372],\"mapped\",[373]],[[373,373],\"valid\"],[[374,374],\"mapped\",[375]],[[375,375],\"valid\"],[[376,376],\"mapped\",[255]],[[377,377],\"mapped\",[378]],[[378,378],\"valid\"],[[379,379],\"mapped\",[380]],[[380,380],\"valid\"],[[381,381],\"mapped\",[382]],[[382,382],\"valid\"],[[383,383],\"mapped\",[115]],[[384,384],\"valid\"],[[385,385],\"mapped\",[595]],[[386,386],\"mapped\",[387]],[[387,387],\"valid\"],[[388,388],\"mapped\",[389]],[[389,389],\"valid\"],[[390,390],\"mapped\",[596]],[[391,391],\"mapped\",[392]],[[392,392],\"valid\"],[[393,393],\"mapped\",[598]],[[394,394],\"mapped\",[599]],[[395,395],\"mapped\",[396]],[[396,397],\"valid\"],[[398,398],\"mapped\",[477]],[[399,399],\"mapped\",[601]],[[400,400],\"mapped\",[603]],[[401,401],\"mapped\",[402]],[[402,402],\"valid\"],[[403,403],\"mapped\",[608]],[[404,404],\"mapped\",[611]],[[405,405],\"valid\"],[[406,406],\"mapped\",[617]],[[407,407],\"mapped\",[616]],[[408,408],\"mapped\",[409]],[[409,411],\"valid\"],[[412,412],\"mapped\",[623]],[[413,413],\"mapped\",[626]],[[414,414],\"valid\"],[[415,415],\"mapped\",[629]],[[416,416],\"mapped\",[417]],[[417,417],\"valid\"],[[418,418],\"mapped\",[419]],[[419,419],\"valid\"],[[420,420],\"mapped\",[421]],[[421,421],\"valid\"],[[422,422],\"mapped\",[640]],[[423,423],\"mapped\",[424]],[[424,424],\"valid\"],[[425,425],\"mapped\",[643]],[[426,427],\"valid\"],[[428,428],\"mapped\",[429]],[[429,429],\"valid\"],[[430,430],\"mapped\",[648]],[[431,431],\"mapped\",[432]],[[432,432],\"valid\"],[[433,433],\"mapped\",[650]],[[434,434],\"mapped\",[651]],[[435,435],\"mapped\",[436]],[[436,436],\"valid\"],[[437,437],\"mapped\",[438]],[[438,438],\"valid\"],[[439,439],\"mapped\",[658]],[[440,440],\"mapped\",[441]],[[441,443],\"valid\"],[[444,444],\"mapped\",[445]],[[445,451],\"valid\"],[[452,454],\"mapped\",[100,382]],[[455,457],\"mapped\",[108,106]],[[458,460],\"mapped\",[110,106]],[[461,461],\"mapped\",[462]],[[462,462],\"valid\"],[[463,463],\"mapped\",[464]],[[464,464],\"valid\"],[[465,465],\"mapped\",[466]],[[466,466],\"valid\"],[[467,467],\"mapped\",[468]],[[468,468],\"valid\"],[[469,469],\"mapped\",[470]],[[470,470],\"valid\"],[[471,471],\"mapped\",[472]],[[472,472],\"valid\"],[[473,473],\"mapped\",[474]],[[474,474],\"valid\"],[[475,475],\"mapped\",[476]],[[476,477],\"valid\"],[[478,478],\"mapped\",[479]],[[479,479],\"valid\"],[[480,480],\"mapped\",[481]],[[481,481],\"valid\"],[[482,482],\"mapped\",[483]],[[483,483],\"valid\"],[[484,484],\"mapped\",[485]],[[485,485],\"valid\"],[[486,486],\"mapped\",[487]],[[487,487],\"valid\"],[[488,488],\"mapped\",[489]],[[489,489],\"valid\"],[[490,490],\"mapped\",[491]],[[491,491],\"valid\"],[[492,492],\"mapped\",[493]],[[493,493],\"valid\"],[[494,494],\"mapped\",[495]],[[495,496],\"valid\"],[[497,499],\"mapped\",[100,122]],[[500,500],\"mapped\",[501]],[[501,501],\"valid\"],[[502,502],\"mapped\",[405]],[[503,503],\"mapped\",[447]],[[504,504],\"mapped\",[505]],[[505,505],\"valid\"],[[506,506],\"mapped\",[507]],[[507,507],\"valid\"],[[508,508],\"mapped\",[509]],[[509,509],\"valid\"],[[510,510],\"mapped\",[511]],[[511,511],\"valid\"],[[512,512],\"mapped\",[513]],[[513,513],\"valid\"],[[514,514],\"mapped\",[515]],[[515,515],\"valid\"],[[516,516],\"mapped\",[517]],[[517,517],\"valid\"],[[518,518],\"mapped\",[519]],[[519,519],\"valid\"],[[520,520],\"mapped\",[521]],[[521,521],\"valid\"],[[522,522],\"mapped\",[523]],[[523,523],\"valid\"],[[524,524],\"mapped\",[525]],[[525,525],\"valid\"],[[526,526],\"mapped\",[527]],[[527,527],\"valid\"],[[528,528],\"mapped\",[529]],[[529,529],\"valid\"],[[530,530],\"mapped\",[531]],[[531,531],\"valid\"],[[532,532],\"mapped\",[533]],[[533,533],\"valid\"],[[534,534],\"mapped\",[535]],[[535,535],\"valid\"],[[536,536],\"mapped\",[537]],[[537,537],\"valid\"],[[538,538],\"mapped\",[539]],[[539,539],\"valid\"],[[540,540],\"mapped\",[541]],[[541,541],\"valid\"],[[542,542],\"mapped\",[543]],[[543,543],\"valid\"],[[544,544],\"mapped\",[414]],[[545,545],\"valid\"],[[546,546],\"mapped\",[547]],[[547,547],\"valid\"],[[548,548],\"mapped\",[549]],[[549,549],\"valid\"],[[550,550],\"mapped\",[551]],[[551,551],\"valid\"],[[552,552],\"mapped\",[553]],[[553,553],\"valid\"],[[554,554],\"mapped\",[555]],[[555,555],\"valid\"],[[556,556],\"mapped\",[557]],[[557,557],\"valid\"],[[558,558],\"mapped\",[559]],[[559,559],\"valid\"],[[560,560],\"mapped\",[561]],[[561,561],\"valid\"],[[562,562],\"mapped\",[563]],[[563,563],\"valid\"],[[564,566],\"valid\"],[[567,569],\"valid\"],[[570,570],\"mapped\",[11365]],[[571,571],\"mapped\",[572]],[[572,572],\"valid\"],[[573,573],\"mapped\",[410]],[[574,574],\"mapped\",[11366]],[[575,576],\"valid\"],[[577,577],\"mapped\",[578]],[[578,578],\"valid\"],[[579,579],\"mapped\",[384]],[[580,580],\"mapped\",[649]],[[581,581],\"mapped\",[652]],[[582,582],\"mapped\",[583]],[[583,583],\"valid\"],[[584,584],\"mapped\",[585]],[[585,585],\"valid\"],[[586,586],\"mapped\",[587]],[[587,587],\"valid\"],[[588,588],\"mapped\",[589]],[[589,589],\"valid\"],[[590,590],\"mapped\",[591]],[[591,591],\"valid\"],[[592,680],\"valid\"],[[681,685],\"valid\"],[[686,687],\"valid\"],[[688,688],\"mapped\",[104]],[[689,689],\"mapped\",[614]],[[690,690],\"mapped\",[106]],[[691,691],\"mapped\",[114]],[[692,692],\"mapped\",[633]],[[693,693],\"mapped\",[635]],[[694,694],\"mapped\",[641]],[[695,695],\"mapped\",[119]],[[696,696],\"mapped\",[121]],[[697,705],\"valid\"],[[706,709],\"valid\",[],\"NV8\"],[[710,721],\"valid\"],[[722,727],\"valid\",[],\"NV8\"],[[728,728],\"disallowed_STD3_mapped\",[32,774]],[[729,729],\"disallowed_STD3_mapped\",[32,775]],[[730,730],\"disallowed_STD3_mapped\",[32,778]],[[731,731],\"disallowed_STD3_mapped\",[32,808]],[[732,732],\"disallowed_STD3_mapped\",[32,771]],[[733,733],\"disallowed_STD3_mapped\",[32,779]],[[734,734],\"valid\",[],\"NV8\"],[[735,735],\"valid\",[],\"NV8\"],[[736,736],\"mapped\",[611]],[[737,737],\"mapped\",[108]],[[738,738],\"mapped\",[115]],[[739,739],\"mapped\",[120]],[[740,740],\"mapped\",[661]],[[741,745],\"valid\",[],\"NV8\"],[[746,747],\"valid\",[],\"NV8\"],[[748,748],\"valid\"],[[749,749],\"valid\",[],\"NV8\"],[[750,750],\"valid\"],[[751,767],\"valid\",[],\"NV8\"],[[768,831],\"valid\"],[[832,832],\"mapped\",[768]],[[833,833],\"mapped\",[769]],[[834,834],\"valid\"],[[835,835],\"mapped\",[787]],[[836,836],\"mapped\",[776,769]],[[837,837],\"mapped\",[953]],[[838,846],\"valid\"],[[847,847],\"ignored\"],[[848,855],\"valid\"],[[856,860],\"valid\"],[[861,863],\"valid\"],[[864,865],\"valid\"],[[866,866],\"valid\"],[[867,879],\"valid\"],[[880,880],\"mapped\",[881]],[[881,881],\"valid\"],[[882,882],\"mapped\",[883]],[[883,883],\"valid\"],[[884,884],\"mapped\",[697]],[[885,885],\"valid\"],[[886,886],\"mapped\",[887]],[[887,887],\"valid\"],[[888,889],\"disallowed\"],[[890,890],\"disallowed_STD3_mapped\",[32,953]],[[891,893],\"valid\"],[[894,894],\"disallowed_STD3_mapped\",[59]],[[895,895],\"mapped\",[1011]],[[896,899],\"disallowed\"],[[900,900],\"disallowed_STD3_mapped\",[32,769]],[[901,901],\"disallowed_STD3_mapped\",[32,776,769]],[[902,902],\"mapped\",[940]],[[903,903],\"mapped\",[183]],[[904,904],\"mapped\",[941]],[[905,905],\"mapped\",[942]],[[906,906],\"mapped\",[943]],[[907,907],\"disallowed\"],[[908,908],\"mapped\",[972]],[[909,909],\"disallowed\"],[[910,910],\"mapped\",[973]],[[911,911],\"mapped\",[974]],[[912,912],\"valid\"],[[913,913],\"mapped\",[945]],[[914,914],\"mapped\",[946]],[[915,915],\"mapped\",[947]],[[916,916],\"mapped\",[948]],[[917,917],\"mapped\",[949]],[[918,918],\"mapped\",[950]],[[919,919],\"mapped\",[951]],[[920,920],\"mapped\",[952]],[[921,921],\"mapped\",[953]],[[922,922],\"mapped\",[954]],[[923,923],\"mapped\",[955]],[[924,924],\"mapped\",[956]],[[925,925],\"mapped\",[957]],[[926,926],\"mapped\",[958]],[[927,927],\"mapped\",[959]],[[928,928],\"mapped\",[960]],[[929,929],\"mapped\",[961]],[[930,930],\"disallowed\"],[[931,931],\"mapped\",[963]],[[932,932],\"mapped\",[964]],[[933,933],\"mapped\",[965]],[[934,934],\"mapped\",[966]],[[935,935],\"mapped\",[967]],[[936,936],\"mapped\",[968]],[[937,937],\"mapped\",[969]],[[938,938],\"mapped\",[970]],[[939,939],\"mapped\",[971]],[[940,961],\"valid\"],[[962,962],\"deviation\",[963]],[[963,974],\"valid\"],[[975,975],\"mapped\",[983]],[[976,976],\"mapped\",[946]],[[977,977],\"mapped\",[952]],[[978,978],\"mapped\",[965]],[[979,979],\"mapped\",[973]],[[980,980],\"mapped\",[971]],[[981,981],\"mapped\",[966]],[[982,982],\"mapped\",[960]],[[983,983],\"valid\"],[[984,984],\"mapped\",[985]],[[985,985],\"valid\"],[[986,986],\"mapped\",[987]],[[987,987],\"valid\"],[[988,988],\"mapped\",[989]],[[989,989],\"valid\"],[[990,990],\"mapped\",[991]],[[991,991],\"valid\"],[[992,992],\"mapped\",[993]],[[993,993],\"valid\"],[[994,994],\"mapped\",[995]],[[995,995],\"valid\"],[[996,996],\"mapped\",[997]],[[997,997],\"valid\"],[[998,998],\"mapped\",[999]],[[999,999],\"valid\"],[[1000,1000],\"mapped\",[1001]],[[1001,1001],\"valid\"],[[1002,1002],\"mapped\",[1003]],[[1003,1003],\"valid\"],[[1004,1004],\"mapped\",[1005]],[[1005,1005],\"valid\"],[[1006,1006],\"mapped\",[1007]],[[1007,1007],\"valid\"],[[1008,1008],\"mapped\",[954]],[[1009,1009],\"mapped\",[961]],[[1010,1010],\"mapped\",[963]],[[1011,1011],\"valid\"],[[1012,1012],\"mapped\",[952]],[[1013,1013],\"mapped\",[949]],[[1014,1014],\"valid\",[],\"NV8\"],[[1015,1015],\"mapped\",[1016]],[[1016,1016],\"valid\"],[[1017,1017],\"mapped\",[963]],[[1018,1018],\"mapped\",[1019]],[[1019,1019],\"valid\"],[[1020,1020],\"valid\"],[[1021,1021],\"mapped\",[891]],[[1022,1022],\"mapped\",[892]],[[1023,1023],\"mapped\",[893]],[[1024,1024],\"mapped\",[1104]],[[1025,1025],\"mapped\",[1105]],[[1026,1026],\"mapped\",[1106]],[[1027,1027],\"mapped\",[1107]],[[1028,1028],\"mapped\",[1108]],[[1029,1029],\"mapped\",[1109]],[[1030,1030],\"mapped\",[1110]],[[1031,1031],\"mapped\",[1111]],[[1032,1032],\"mapped\",[1112]],[[1033,1033],\"mapped\",[1113]],[[1034,1034],\"mapped\",[1114]],[[1035,1035],\"mapped\",[1115]],[[1036,1036],\"mapped\",[1116]],[[1037,1037],\"mapped\",[1117]],[[1038,1038],\"mapped\",[1118]],[[1039,1039],\"mapped\",[1119]],[[1040,1040],\"mapped\",[1072]],[[1041,1041],\"mapped\",[1073]],[[1042,1042],\"mapped\",[1074]],[[1043,1043],\"mapped\",[1075]],[[1044,1044],\"mapped\",[1076]],[[1045,1045],\"mapped\",[1077]],[[1046,1046],\"mapped\",[1078]],[[1047,1047],\"mapped\",[1079]],[[1048,1048],\"mapped\",[1080]],[[1049,1049],\"mapped\",[1081]],[[1050,1050],\"mapped\",[1082]],[[1051,1051],\"mapped\",[1083]],[[1052,1052],\"mapped\",[1084]],[[1053,1053],\"mapped\",[1085]],[[1054,1054],\"mapped\",[1086]],[[1055,1055],\"mapped\",[1087]],[[1056,1056],\"mapped\",[1088]],[[1057,1057],\"mapped\",[1089]],[[1058,1058],\"mapped\",[1090]],[[1059,1059],\"mapped\",[1091]],[[1060,1060],\"mapped\",[1092]],[[1061,1061],\"mapped\",[1093]],[[1062,1062],\"mapped\",[1094]],[[1063,1063],\"mapped\",[1095]],[[1064,1064],\"mapped\",[1096]],[[1065,1065],\"mapped\",[1097]],[[1066,1066],\"mapped\",[1098]],[[1067,1067],\"mapped\",[1099]],[[1068,1068],\"mapped\",[1100]],[[1069,1069],\"mapped\",[1101]],[[1070,1070],\"mapped\",[1102]],[[1071,1071],\"mapped\",[1103]],[[1072,1103],\"valid\"],[[1104,1104],\"valid\"],[[1105,1116],\"valid\"],[[1117,1117],\"valid\"],[[1118,1119],\"valid\"],[[1120,1120],\"mapped\",[1121]],[[1121,1121],\"valid\"],[[1122,1122],\"mapped\",[1123]],[[1123,1123],\"valid\"],[[1124,1124],\"mapped\",[1125]],[[1125,1125],\"valid\"],[[1126,1126],\"mapped\",[1127]],[[1127,1127],\"valid\"],[[1128,1128],\"mapped\",[1129]],[[1129,1129],\"valid\"],[[1130,1130],\"mapped\",[1131]],[[1131,1131],\"valid\"],[[1132,1132],\"mapped\",[1133]],[[1133,1133],\"valid\"],[[1134,1134],\"mapped\",[1135]],[[1135,1135],\"valid\"],[[1136,1136],\"mapped\",[1137]],[[1137,1137],\"valid\"],[[1138,1138],\"mapped\",[1139]],[[1139,1139],\"valid\"],[[1140,1140],\"mapped\",[1141]],[[1141,1141],\"valid\"],[[1142,1142],\"mapped\",[1143]],[[1143,1143],\"valid\"],[[1144,1144],\"mapped\",[1145]],[[1145,1145],\"valid\"],[[1146,1146],\"mapped\",[1147]],[[1147,1147],\"valid\"],[[1148,1148],\"mapped\",[1149]],[[1149,1149],\"valid\"],[[1150,1150],\"mapped\",[1151]],[[1151,1151],\"valid\"],[[1152,1152],\"mapped\",[1153]],[[1153,1153],\"valid\"],[[1154,1154],\"valid\",[],\"NV8\"],[[1155,1158],\"valid\"],[[1159,1159],\"valid\"],[[1160,1161],\"valid\",[],\"NV8\"],[[1162,1162],\"mapped\",[1163]],[[1163,1163],\"valid\"],[[1164,1164],\"mapped\",[1165]],[[1165,1165],\"valid\"],[[1166,1166],\"mapped\",[1167]],[[1167,1167],\"valid\"],[[1168,1168],\"mapped\",[1169]],[[1169,1169],\"valid\"],[[1170,1170],\"mapped\",[1171]],[[1171,1171],\"valid\"],[[1172,1172],\"mapped\",[1173]],[[1173,1173],\"valid\"],[[1174,1174],\"mapped\",[1175]],[[1175,1175],\"valid\"],[[1176,1176],\"mapped\",[1177]],[[1177,1177],\"valid\"],[[1178,1178],\"mapped\",[1179]],[[1179,1179],\"valid\"],[[1180,1180],\"mapped\",[1181]],[[1181,1181],\"valid\"],[[1182,1182],\"mapped\",[1183]],[[1183,1183],\"valid\"],[[1184,1184],\"mapped\",[1185]],[[1185,1185],\"valid\"],[[1186,1186],\"mapped\",[1187]],[[1187,1187],\"valid\"],[[1188,1188],\"mapped\",[1189]],[[1189,1189],\"valid\"],[[1190,1190],\"mapped\",[1191]],[[1191,1191],\"valid\"],[[1192,1192],\"mapped\",[1193]],[[1193,1193],\"valid\"],[[1194,1194],\"mapped\",[1195]],[[1195,1195],\"valid\"],[[1196,1196],\"mapped\",[1197]],[[1197,1197],\"valid\"],[[1198,1198],\"mapped\",[1199]],[[1199,1199],\"valid\"],[[1200,1200],\"mapped\",[1201]],[[1201,1201],\"valid\"],[[1202,1202],\"mapped\",[1203]],[[1203,1203],\"valid\"],[[1204,1204],\"mapped\",[1205]],[[1205,1205],\"valid\"],[[1206,1206],\"mapped\",[1207]],[[1207,1207],\"valid\"],[[1208,1208],\"mapped\",[1209]],[[1209,1209],\"valid\"],[[1210,1210],\"mapped\",[1211]],[[1211,1211],\"valid\"],[[1212,1212],\"mapped\",[1213]],[[1213,1213],\"valid\"],[[1214,1214],\"mapped\",[1215]],[[1215,1215],\"valid\"],[[1216,1216],\"disallowed\"],[[1217,1217],\"mapped\",[1218]],[[1218,1218],\"valid\"],[[1219,1219],\"mapped\",[1220]],[[1220,1220],\"valid\"],[[1221,1221],\"mapped\",[1222]],[[1222,1222],\"valid\"],[[1223,1223],\"mapped\",[1224]],[[1224,1224],\"valid\"],[[1225,1225],\"mapped\",[1226]],[[1226,1226],\"valid\"],[[1227,1227],\"mapped\",[1228]],[[1228,1228],\"valid\"],[[1229,1229],\"mapped\",[1230]],[[1230,1230],\"valid\"],[[1231,1231],\"valid\"],[[1232,1232],\"mapped\",[1233]],[[1233,1233],\"valid\"],[[1234,1234],\"mapped\",[1235]],[[1235,1235],\"valid\"],[[1236,1236],\"mapped\",[1237]],[[1237,1237],\"valid\"],[[1238,1238],\"mapped\",[1239]],[[1239,1239],\"valid\"],[[1240,1240],\"mapped\",[1241]],[[1241,1241],\"valid\"],[[1242,1242],\"mapped\",[1243]],[[1243,1243],\"valid\"],[[1244,1244],\"mapped\",[1245]],[[1245,1245],\"valid\"],[[1246,1246],\"mapped\",[1247]],[[1247,1247],\"valid\"],[[1248,1248],\"mapped\",[1249]],[[1249,1249],\"valid\"],[[1250,1250],\"mapped\",[1251]],[[1251,1251],\"valid\"],[[1252,1252],\"mapped\",[1253]],[[1253,1253],\"valid\"],[[1254,1254],\"mapped\",[1255]],[[1255,1255],\"valid\"],[[1256,1256],\"mapped\",[1257]],[[1257,1257],\"valid\"],[[1258,1258],\"mapped\",[1259]],[[1259,1259],\"valid\"],[[1260,1260],\"mapped\",[1261]],[[1261,1261],\"valid\"],[[1262,1262],\"mapped\",[1263]],[[1263,1263],\"valid\"],[[1264,1264],\"mapped\",[1265]],[[1265,1265],\"valid\"],[[1266,1266],\"mapped\",[1267]],[[1267,1267],\"valid\"],[[1268,1268],\"mapped\",[1269]],[[1269,1269],\"valid\"],[[1270,1270],\"mapped\",[1271]],[[1271,1271],\"valid\"],[[1272,1272],\"mapped\",[1273]],[[1273,1273],\"valid\"],[[1274,1274],\"mapped\",[1275]],[[1275,1275],\"valid\"],[[1276,1276],\"mapped\",[1277]],[[1277,1277],\"valid\"],[[1278,1278],\"mapped\",[1279]],[[1279,1279],\"valid\"],[[1280,1280],\"mapped\",[1281]],[[1281,1281],\"valid\"],[[1282,1282],\"mapped\",[1283]],[[1283,1283],\"valid\"],[[1284,1284],\"mapped\",[1285]],[[1285,1285],\"valid\"],[[1286,1286],\"mapped\",[1287]],[[1287,1287],\"valid\"],[[1288,1288],\"mapped\",[1289]],[[1289,1289],\"valid\"],[[1290,1290],\"mapped\",[1291]],[[1291,1291],\"valid\"],[[1292,1292],\"mapped\",[1293]],[[1293,1293],\"valid\"],[[1294,1294],\"mapped\",[1295]],[[1295,1295],\"valid\"],[[1296,1296],\"mapped\",[1297]],[[1297,1297],\"valid\"],[[1298,1298],\"mapped\",[1299]],[[1299,1299],\"valid\"],[[1300,1300],\"mapped\",[1301]],[[1301,1301],\"valid\"],[[1302,1302],\"mapped\",[1303]],[[1303,1303],\"valid\"],[[1304,1304],\"mapped\",[1305]],[[1305,1305],\"valid\"],[[1306,1306],\"mapped\",[1307]],[[1307,1307],\"valid\"],[[1308,1308],\"mapped\",[1309]],[[1309,1309],\"valid\"],[[1310,1310],\"mapped\",[1311]],[[1311,1311],\"valid\"],[[1312,1312],\"mapped\",[1313]],[[1313,1313],\"valid\"],[[1314,1314],\"mapped\",[1315]],[[1315,1315],\"valid\"],[[1316,1316],\"mapped\",[1317]],[[1317,1317],\"valid\"],[[1318,1318],\"mapped\",[1319]],[[1319,1319],\"valid\"],[[1320,1320],\"mapped\",[1321]],[[1321,1321],\"valid\"],[[1322,1322],\"mapped\",[1323]],[[1323,1323],\"valid\"],[[1324,1324],\"mapped\",[1325]],[[1325,1325],\"valid\"],[[1326,1326],\"mapped\",[1327]],[[1327,1327],\"valid\"],[[1328,1328],\"disallowed\"],[[1329,1329],\"mapped\",[1377]],[[1330,1330],\"mapped\",[1378]],[[1331,1331],\"mapped\",[1379]],[[1332,1332],\"mapped\",[1380]],[[1333,1333],\"mapped\",[1381]],[[1334,1334],\"mapped\",[1382]],[[1335,1335],\"mapped\",[1383]],[[1336,1336],\"mapped\",[1384]],[[1337,1337],\"mapped\",[1385]],[[1338,1338],\"mapped\",[1386]],[[1339,1339],\"mapped\",[1387]],[[1340,1340],\"mapped\",[1388]],[[1341,1341],\"mapped\",[1389]],[[1342,1342],\"mapped\",[1390]],[[1343,1343],\"mapped\",[1391]],[[1344,1344],\"mapped\",[1392]],[[1345,1345],\"mapped\",[1393]],[[1346,1346],\"mapped\",[1394]],[[1347,1347],\"mapped\",[1395]],[[1348,1348],\"mapped\",[1396]],[[1349,1349],\"mapped\",[1397]],[[1350,1350],\"mapped\",[1398]],[[1351,1351],\"mapped\",[1399]],[[1352,1352],\"mapped\",[1400]],[[1353,1353],\"mapped\",[1401]],[[1354,1354],\"mapped\",[1402]],[[1355,1355],\"mapped\",[1403]],[[1356,1356],\"mapped\",[1404]],[[1357,1357],\"mapped\",[1405]],[[1358,1358],\"mapped\",[1406]],[[1359,1359],\"mapped\",[1407]],[[1360,1360],\"mapped\",[1408]],[[1361,1361],\"mapped\",[1409]],[[1362,1362],\"mapped\",[1410]],[[1363,1363],\"mapped\",[1411]],[[1364,1364],\"mapped\",[1412]],[[1365,1365],\"mapped\",[1413]],[[1366,1366],\"mapped\",[1414]],[[1367,1368],\"disallowed\"],[[1369,1369],\"valid\"],[[1370,1375],\"valid\",[],\"NV8\"],[[1376,1376],\"disallowed\"],[[1377,1414],\"valid\"],[[1415,1415],\"mapped\",[1381,1410]],[[1416,1416],\"disallowed\"],[[1417,1417],\"valid\",[],\"NV8\"],[[1418,1418],\"valid\",[],\"NV8\"],[[1419,1420],\"disallowed\"],[[1421,1422],\"valid\",[],\"NV8\"],[[1423,1423],\"valid\",[],\"NV8\"],[[1424,1424],\"disallowed\"],[[1425,1441],\"valid\"],[[1442,1442],\"valid\"],[[1443,1455],\"valid\"],[[1456,1465],\"valid\"],[[1466,1466],\"valid\"],[[1467,1469],\"valid\"],[[1470,1470],\"valid\",[],\"NV8\"],[[1471,1471],\"valid\"],[[1472,1472],\"valid\",[],\"NV8\"],[[1473,1474],\"valid\"],[[1475,1475],\"valid\",[],\"NV8\"],[[1476,1476],\"valid\"],[[1477,1477],\"valid\"],[[1478,1478],\"valid\",[],\"NV8\"],[[1479,1479],\"valid\"],[[1480,1487],\"disallowed\"],[[1488,1514],\"valid\"],[[1515,1519],\"disallowed\"],[[1520,1524],\"valid\"],[[1525,1535],\"disallowed\"],[[1536,1539],\"disallowed\"],[[1540,1540],\"disallowed\"],[[1541,1541],\"disallowed\"],[[1542,1546],\"valid\",[],\"NV8\"],[[1547,1547],\"valid\",[],\"NV8\"],[[1548,1548],\"valid\",[],\"NV8\"],[[1549,1551],\"valid\",[],\"NV8\"],[[1552,1557],\"valid\"],[[1558,1562],\"valid\"],[[1563,1563],\"valid\",[],\"NV8\"],[[1564,1564],\"disallowed\"],[[1565,1565],\"disallowed\"],[[1566,1566],\"valid\",[],\"NV8\"],[[1567,1567],\"valid\",[],\"NV8\"],[[1568,1568],\"valid\"],[[1569,1594],\"valid\"],[[1595,1599],\"valid\"],[[1600,1600],\"valid\",[],\"NV8\"],[[1601,1618],\"valid\"],[[1619,1621],\"valid\"],[[1622,1624],\"valid\"],[[1625,1630],\"valid\"],[[1631,1631],\"valid\"],[[1632,1641],\"valid\"],[[1642,1645],\"valid\",[],\"NV8\"],[[1646,1647],\"valid\"],[[1648,1652],\"valid\"],[[1653,1653],\"mapped\",[1575,1652]],[[1654,1654],\"mapped\",[1608,1652]],[[1655,1655],\"mapped\",[1735,1652]],[[1656,1656],\"mapped\",[1610,1652]],[[1657,1719],\"valid\"],[[1720,1721],\"valid\"],[[1722,1726],\"valid\"],[[1727,1727],\"valid\"],[[1728,1742],\"valid\"],[[1743,1743],\"valid\"],[[1744,1747],\"valid\"],[[1748,1748],\"valid\",[],\"NV8\"],[[1749,1756],\"valid\"],[[1757,1757],\"disallowed\"],[[1758,1758],\"valid\",[],\"NV8\"],[[1759,1768],\"valid\"],[[1769,1769],\"valid\",[],\"NV8\"],[[1770,1773],\"valid\"],[[1774,1775],\"valid\"],[[1776,1785],\"valid\"],[[1786,1790],\"valid\"],[[1791,1791],\"valid\"],[[1792,1805],\"valid\",[],\"NV8\"],[[1806,1806],\"disallowed\"],[[1807,1807],\"disallowed\"],[[1808,1836],\"valid\"],[[1837,1839],\"valid\"],[[1840,1866],\"valid\"],[[1867,1868],\"disallowed\"],[[1869,1871],\"valid\"],[[1872,1901],\"valid\"],[[1902,1919],\"valid\"],[[1920,1968],\"valid\"],[[1969,1969],\"valid\"],[[1970,1983],\"disallowed\"],[[1984,2037],\"valid\"],[[2038,2042],\"valid\",[],\"NV8\"],[[2043,2047],\"disallowed\"],[[2048,2093],\"valid\"],[[2094,2095],\"disallowed\"],[[2096,2110],\"valid\",[],\"NV8\"],[[2111,2111],\"disallowed\"],[[2112,2139],\"valid\"],[[2140,2141],\"disallowed\"],[[2142,2142],\"valid\",[],\"NV8\"],[[2143,2207],\"disallowed\"],[[2208,2208],\"valid\"],[[2209,2209],\"valid\"],[[2210,2220],\"valid\"],[[2221,2226],\"valid\"],[[2227,2228],\"valid\"],[[2229,2274],\"disallowed\"],[[2275,2275],\"valid\"],[[2276,2302],\"valid\"],[[2303,2303],\"valid\"],[[2304,2304],\"valid\"],[[2305,2307],\"valid\"],[[2308,2308],\"valid\"],[[2309,2361],\"valid\"],[[2362,2363],\"valid\"],[[2364,2381],\"valid\"],[[2382,2382],\"valid\"],[[2383,2383],\"valid\"],[[2384,2388],\"valid\"],[[2389,2389],\"valid\"],[[2390,2391],\"valid\"],[[2392,2392],\"mapped\",[2325,2364]],[[2393,2393],\"mapped\",[2326,2364]],[[2394,2394],\"mapped\",[2327,2364]],[[2395,2395],\"mapped\",[2332,2364]],[[2396,2396],\"mapped\",[2337,2364]],[[2397,2397],\"mapped\",[2338,2364]],[[2398,2398],\"mapped\",[2347,2364]],[[2399,2399],\"mapped\",[2351,2364]],[[2400,2403],\"valid\"],[[2404,2405],\"valid\",[],\"NV8\"],[[2406,2415],\"valid\"],[[2416,2416],\"valid\",[],\"NV8\"],[[2417,2418],\"valid\"],[[2419,2423],\"valid\"],[[2424,2424],\"valid\"],[[2425,2426],\"valid\"],[[2427,2428],\"valid\"],[[2429,2429],\"valid\"],[[2430,2431],\"valid\"],[[2432,2432],\"valid\"],[[2433,2435],\"valid\"],[[2436,2436],\"disallowed\"],[[2437,2444],\"valid\"],[[2445,2446],\"disallowed\"],[[2447,2448],\"valid\"],[[2449,2450],\"disallowed\"],[[2451,2472],\"valid\"],[[2473,2473],\"disallowed\"],[[2474,2480],\"valid\"],[[2481,2481],\"disallowed\"],[[2482,2482],\"valid\"],[[2483,2485],\"disallowed\"],[[2486,2489],\"valid\"],[[2490,2491],\"disallowed\"],[[2492,2492],\"valid\"],[[2493,2493],\"valid\"],[[2494,2500],\"valid\"],[[2501,2502],\"disallowed\"],[[2503,2504],\"valid\"],[[2505,2506],\"disallowed\"],[[2507,2509],\"valid\"],[[2510,2510],\"valid\"],[[2511,2518],\"disallowed\"],[[2519,2519],\"valid\"],[[2520,2523],\"disallowed\"],[[2524,2524],\"mapped\",[2465,2492]],[[2525,2525],\"mapped\",[2466,2492]],[[2526,2526],\"disallowed\"],[[2527,2527],\"mapped\",[2479,2492]],[[2528,2531],\"valid\"],[[2532,2533],\"disallowed\"],[[2534,2545],\"valid\"],[[2546,2554],\"valid\",[],\"NV8\"],[[2555,2555],\"valid\",[],\"NV8\"],[[2556,2560],\"disallowed\"],[[2561,2561],\"valid\"],[[2562,2562],\"valid\"],[[2563,2563],\"valid\"],[[2564,2564],\"disallowed\"],[[2565,2570],\"valid\"],[[2571,2574],\"disallowed\"],[[2575,2576],\"valid\"],[[2577,2578],\"disallowed\"],[[2579,2600],\"valid\"],[[2601,2601],\"disallowed\"],[[2602,2608],\"valid\"],[[2609,2609],\"disallowed\"],[[2610,2610],\"valid\"],[[2611,2611],\"mapped\",[2610,2620]],[[2612,2612],\"disallowed\"],[[2613,2613],\"valid\"],[[2614,2614],\"mapped\",[2616,2620]],[[2615,2615],\"disallowed\"],[[2616,2617],\"valid\"],[[2618,2619],\"disallowed\"],[[2620,2620],\"valid\"],[[2621,2621],\"disallowed\"],[[2622,2626],\"valid\"],[[2627,2630],\"disallowed\"],[[2631,2632],\"valid\"],[[2633,2634],\"disallowed\"],[[2635,2637],\"valid\"],[[2638,2640],\"disallowed\"],[[2641,2641],\"valid\"],[[2642,2648],\"disallowed\"],[[2649,2649],\"mapped\",[2582,2620]],[[2650,2650],\"mapped\",[2583,2620]],[[2651,2651],\"mapped\",[2588,2620]],[[2652,2652],\"valid\"],[[2653,2653],\"disallowed\"],[[2654,2654],\"mapped\",[2603,2620]],[[2655,2661],\"disallowed\"],[[2662,2676],\"valid\"],[[2677,2677],\"valid\"],[[2678,2688],\"disallowed\"],[[2689,2691],\"valid\"],[[2692,2692],\"disallowed\"],[[2693,2699],\"valid\"],[[2700,2700],\"valid\"],[[2701,2701],\"valid\"],[[2702,2702],\"disallowed\"],[[2703,2705],\"valid\"],[[2706,2706],\"disallowed\"],[[2707,2728],\"valid\"],[[2729,2729],\"disallowed\"],[[2730,2736],\"valid\"],[[2737,2737],\"disallowed\"],[[2738,2739],\"valid\"],[[2740,2740],\"disallowed\"],[[2741,2745],\"valid\"],[[2746,2747],\"disallowed\"],[[2748,2757],\"valid\"],[[2758,2758],\"disallowed\"],[[2759,2761],\"valid\"],[[2762,2762],\"disallowed\"],[[2763,2765],\"valid\"],[[2766,2767],\"disallowed\"],[[2768,2768],\"valid\"],[[2769,2783],\"disallowed\"],[[2784,2784],\"valid\"],[[2785,2787],\"valid\"],[[2788,2789],\"disallowed\"],[[2790,2799],\"valid\"],[[2800,2800],\"valid\",[],\"NV8\"],[[2801,2801],\"valid\",[],\"NV8\"],[[2802,2808],\"disallowed\"],[[2809,2809],\"valid\"],[[2810,2816],\"disallowed\"],[[2817,2819],\"valid\"],[[2820,2820],\"disallowed\"],[[2821,2828],\"valid\"],[[2829,2830],\"disallowed\"],[[2831,2832],\"valid\"],[[2833,2834],\"disallowed\"],[[2835,2856],\"valid\"],[[2857,2857],\"disallowed\"],[[2858,2864],\"valid\"],[[2865,2865],\"disallowed\"],[[2866,2867],\"valid\"],[[2868,2868],\"disallowed\"],[[2869,2869],\"valid\"],[[2870,2873],\"valid\"],[[2874,2875],\"disallowed\"],[[2876,2883],\"valid\"],[[2884,2884],\"valid\"],[[2885,2886],\"disallowed\"],[[2887,2888],\"valid\"],[[2889,2890],\"disallowed\"],[[2891,2893],\"valid\"],[[2894,2901],\"disallowed\"],[[2902,2903],\"valid\"],[[2904,2907],\"disallowed\"],[[2908,2908],\"mapped\",[2849,2876]],[[2909,2909],\"mapped\",[2850,2876]],[[2910,2910],\"disallowed\"],[[2911,2913],\"valid\"],[[2914,2915],\"valid\"],[[2916,2917],\"disallowed\"],[[2918,2927],\"valid\"],[[2928,2928],\"valid\",[],\"NV8\"],[[2929,2929],\"valid\"],[[2930,2935],\"valid\",[],\"NV8\"],[[2936,2945],\"disallowed\"],[[2946,2947],\"valid\"],[[2948,2948],\"disallowed\"],[[2949,2954],\"valid\"],[[2955,2957],\"disallowed\"],[[2958,2960],\"valid\"],[[2961,2961],\"disallowed\"],[[2962,2965],\"valid\"],[[2966,2968],\"disallowed\"],[[2969,2970],\"valid\"],[[2971,2971],\"disallowed\"],[[2972,2972],\"valid\"],[[2973,2973],\"disallowed\"],[[2974,2975],\"valid\"],[[2976,2978],\"disallowed\"],[[2979,2980],\"valid\"],[[2981,2983],\"disallowed\"],[[2984,2986],\"valid\"],[[2987,2989],\"disallowed\"],[[2990,2997],\"valid\"],[[2998,2998],\"valid\"],[[2999,3001],\"valid\"],[[3002,3005],\"disallowed\"],[[3006,3010],\"valid\"],[[3011,3013],\"disallowed\"],[[3014,3016],\"valid\"],[[3017,3017],\"disallowed\"],[[3018,3021],\"valid\"],[[3022,3023],\"disallowed\"],[[3024,3024],\"valid\"],[[3025,3030],\"disallowed\"],[[3031,3031],\"valid\"],[[3032,3045],\"disallowed\"],[[3046,3046],\"valid\"],[[3047,3055],\"valid\"],[[3056,3058],\"valid\",[],\"NV8\"],[[3059,3066],\"valid\",[],\"NV8\"],[[3067,3071],\"disallowed\"],[[3072,3072],\"valid\"],[[3073,3075],\"valid\"],[[3076,3076],\"disallowed\"],[[3077,3084],\"valid\"],[[3085,3085],\"disallowed\"],[[3086,3088],\"valid\"],[[3089,3089],\"disallowed\"],[[3090,3112],\"valid\"],[[3113,3113],\"disallowed\"],[[3114,3123],\"valid\"],[[3124,3124],\"valid\"],[[3125,3129],\"valid\"],[[3130,3132],\"disallowed\"],[[3133,3133],\"valid\"],[[3134,3140],\"valid\"],[[3141,3141],\"disallowed\"],[[3142,3144],\"valid\"],[[3145,3145],\"disallowed\"],[[3146,3149],\"valid\"],[[3150,3156],\"disallowed\"],[[3157,3158],\"valid\"],[[3159,3159],\"disallowed\"],[[3160,3161],\"valid\"],[[3162,3162],\"valid\"],[[3163,3167],\"disallowed\"],[[3168,3169],\"valid\"],[[3170,3171],\"valid\"],[[3172,3173],\"disallowed\"],[[3174,3183],\"valid\"],[[3184,3191],\"disallowed\"],[[3192,3199],\"valid\",[],\"NV8\"],[[3200,3200],\"disallowed\"],[[3201,3201],\"valid\"],[[3202,3203],\"valid\"],[[3204,3204],\"disallowed\"],[[3205,3212],\"valid\"],[[3213,3213],\"disallowed\"],[[3214,3216],\"valid\"],[[3217,3217],\"disallowed\"],[[3218,3240],\"valid\"],[[3241,3241],\"disallowed\"],[[3242,3251],\"valid\"],[[3252,3252],\"disallowed\"],[[3253,3257],\"valid\"],[[3258,3259],\"disallowed\"],[[3260,3261],\"valid\"],[[3262,3268],\"valid\"],[[3269,3269],\"disallowed\"],[[3270,3272],\"valid\"],[[3273,3273],\"disallowed\"],[[3274,3277],\"valid\"],[[3278,3284],\"disallowed\"],[[3285,3286],\"valid\"],[[3287,3293],\"disallowed\"],[[3294,3294],\"valid\"],[[3295,3295],\"disallowed\"],[[3296,3297],\"valid\"],[[3298,3299],\"valid\"],[[3300,3301],\"disallowed\"],[[3302,3311],\"valid\"],[[3312,3312],\"disallowed\"],[[3313,3314],\"valid\"],[[3315,3328],\"disallowed\"],[[3329,3329],\"valid\"],[[3330,3331],\"valid\"],[[3332,3332],\"disallowed\"],[[3333,3340],\"valid\"],[[3341,3341],\"disallowed\"],[[3342,3344],\"valid\"],[[3345,3345],\"disallowed\"],[[3346,3368],\"valid\"],[[3369,3369],\"valid\"],[[3370,3385],\"valid\"],[[3386,3386],\"valid\"],[[3387,3388],\"disallowed\"],[[3389,3389],\"valid\"],[[3390,3395],\"valid\"],[[3396,3396],\"valid\"],[[3397,3397],\"disallowed\"],[[3398,3400],\"valid\"],[[3401,3401],\"disallowed\"],[[3402,3405],\"valid\"],[[3406,3406],\"valid\"],[[3407,3414],\"disallowed\"],[[3415,3415],\"valid\"],[[3416,3422],\"disallowed\"],[[3423,3423],\"valid\"],[[3424,3425],\"valid\"],[[3426,3427],\"valid\"],[[3428,3429],\"disallowed\"],[[3430,3439],\"valid\"],[[3440,3445],\"valid\",[],\"NV8\"],[[3446,3448],\"disallowed\"],[[3449,3449],\"valid\",[],\"NV8\"],[[3450,3455],\"valid\"],[[3456,3457],\"disallowed\"],[[3458,3459],\"valid\"],[[3460,3460],\"disallowed\"],[[3461,3478],\"valid\"],[[3479,3481],\"disallowed\"],[[3482,3505],\"valid\"],[[3506,3506],\"disallowed\"],[[3507,3515],\"valid\"],[[3516,3516],\"disallowed\"],[[3517,3517],\"valid\"],[[3518,3519],\"disallowed\"],[[3520,3526],\"valid\"],[[3527,3529],\"disallowed\"],[[3530,3530],\"valid\"],[[3531,3534],\"disallowed\"],[[3535,3540],\"valid\"],[[3541,3541],\"disallowed\"],[[3542,3542],\"valid\"],[[3543,3543],\"disallowed\"],[[3544,3551],\"valid\"],[[3552,3557],\"disallowed\"],[[3558,3567],\"valid\"],[[3568,3569],\"disallowed\"],[[3570,3571],\"valid\"],[[3572,3572],\"valid\",[],\"NV8\"],[[3573,3584],\"disallowed\"],[[3585,3634],\"valid\"],[[3635,3635],\"mapped\",[3661,3634]],[[3636,3642],\"valid\"],[[3643,3646],\"disallowed\"],[[3647,3647],\"valid\",[],\"NV8\"],[[3648,3662],\"valid\"],[[3663,3663],\"valid\",[],\"NV8\"],[[3664,3673],\"valid\"],[[3674,3675],\"valid\",[],\"NV8\"],[[3676,3712],\"disallowed\"],[[3713,3714],\"valid\"],[[3715,3715],\"disallowed\"],[[3716,3716],\"valid\"],[[3717,3718],\"disallowed\"],[[3719,3720],\"valid\"],[[3721,3721],\"disallowed\"],[[3722,3722],\"valid\"],[[3723,3724],\"disallowed\"],[[3725,3725],\"valid\"],[[3726,3731],\"disallowed\"],[[3732,3735],\"valid\"],[[3736,3736],\"disallowed\"],[[3737,3743],\"valid\"],[[3744,3744],\"disallowed\"],[[3745,3747],\"valid\"],[[3748,3748],\"disallowed\"],[[3749,3749],\"valid\"],[[3750,3750],\"disallowed\"],[[3751,3751],\"valid\"],[[3752,3753],\"disallowed\"],[[3754,3755],\"valid\"],[[3756,3756],\"disallowed\"],[[3757,3762],\"valid\"],[[3763,3763],\"mapped\",[3789,3762]],[[3764,3769],\"valid\"],[[3770,3770],\"disallowed\"],[[3771,3773],\"valid\"],[[3774,3775],\"disallowed\"],[[3776,3780],\"valid\"],[[3781,3781],\"disallowed\"],[[3782,3782],\"valid\"],[[3783,3783],\"disallowed\"],[[3784,3789],\"valid\"],[[3790,3791],\"disallowed\"],[[3792,3801],\"valid\"],[[3802,3803],\"disallowed\"],[[3804,3804],\"mapped\",[3755,3737]],[[3805,3805],\"mapped\",[3755,3745]],[[3806,3807],\"valid\"],[[3808,3839],\"disallowed\"],[[3840,3840],\"valid\"],[[3841,3850],\"valid\",[],\"NV8\"],[[3851,3851],\"valid\"],[[3852,3852],\"mapped\",[3851]],[[3853,3863],\"valid\",[],\"NV8\"],[[3864,3865],\"valid\"],[[3866,3871],\"valid\",[],\"NV8\"],[[3872,3881],\"valid\"],[[3882,3892],\"valid\",[],\"NV8\"],[[3893,3893],\"valid\"],[[3894,3894],\"valid\",[],\"NV8\"],[[3895,3895],\"valid\"],[[3896,3896],\"valid\",[],\"NV8\"],[[3897,3897],\"valid\"],[[3898,3901],\"valid\",[],\"NV8\"],[[3902,3906],\"valid\"],[[3907,3907],\"mapped\",[3906,4023]],[[3908,3911],\"valid\"],[[3912,3912],\"disallowed\"],[[3913,3916],\"valid\"],[[3917,3917],\"mapped\",[3916,4023]],[[3918,3921],\"valid\"],[[3922,3922],\"mapped\",[3921,4023]],[[3923,3926],\"valid\"],[[3927,3927],\"mapped\",[3926,4023]],[[3928,3931],\"valid\"],[[3932,3932],\"mapped\",[3931,4023]],[[3933,3944],\"valid\"],[[3945,3945],\"mapped\",[3904,4021]],[[3946,3946],\"valid\"],[[3947,3948],\"valid\"],[[3949,3952],\"disallowed\"],[[3953,3954],\"valid\"],[[3955,3955],\"mapped\",[3953,3954]],[[3956,3956],\"valid\"],[[3957,3957],\"mapped\",[3953,3956]],[[3958,3958],\"mapped\",[4018,3968]],[[3959,3959],\"mapped\",[4018,3953,3968]],[[3960,3960],\"mapped\",[4019,3968]],[[3961,3961],\"mapped\",[4019,3953,3968]],[[3962,3968],\"valid\"],[[3969,3969],\"mapped\",[3953,3968]],[[3970,3972],\"valid\"],[[3973,3973],\"valid\",[],\"NV8\"],[[3974,3979],\"valid\"],[[3980,3983],\"valid\"],[[3984,3986],\"valid\"],[[3987,3987],\"mapped\",[3986,4023]],[[3988,3989],\"valid\"],[[3990,3990],\"valid\"],[[3991,3991],\"valid\"],[[3992,3992],\"disallowed\"],[[3993,3996],\"valid\"],[[3997,3997],\"mapped\",[3996,4023]],[[3998,4001],\"valid\"],[[4002,4002],\"mapped\",[4001,4023]],[[4003,4006],\"valid\"],[[4007,4007],\"mapped\",[4006,4023]],[[4008,4011],\"valid\"],[[4012,4012],\"mapped\",[4011,4023]],[[4013,4013],\"valid\"],[[4014,4016],\"valid\"],[[4017,4023],\"valid\"],[[4024,4024],\"valid\"],[[4025,4025],\"mapped\",[3984,4021]],[[4026,4028],\"valid\"],[[4029,4029],\"disallowed\"],[[4030,4037],\"valid\",[],\"NV8\"],[[4038,4038],\"valid\"],[[4039,4044],\"valid\",[],\"NV8\"],[[4045,4045],\"disallowed\"],[[4046,4046],\"valid\",[],\"NV8\"],[[4047,4047],\"valid\",[],\"NV8\"],[[4048,4049],\"valid\",[],\"NV8\"],[[4050,4052],\"valid\",[],\"NV8\"],[[4053,4056],\"valid\",[],\"NV8\"],[[4057,4058],\"valid\",[],\"NV8\"],[[4059,4095],\"disallowed\"],[[4096,4129],\"valid\"],[[4130,4130],\"valid\"],[[4131,4135],\"valid\"],[[4136,4136],\"valid\"],[[4137,4138],\"valid\"],[[4139,4139],\"valid\"],[[4140,4146],\"valid\"],[[4147,4149],\"valid\"],[[4150,4153],\"valid\"],[[4154,4159],\"valid\"],[[4160,4169],\"valid\"],[[4170,4175],\"valid\",[],\"NV8\"],[[4176,4185],\"valid\"],[[4186,4249],\"valid\"],[[4250,4253],\"valid\"],[[4254,4255],\"valid\",[],\"NV8\"],[[4256,4293],\"disallowed\"],[[4294,4294],\"disallowed\"],[[4295,4295],\"mapped\",[11559]],[[4296,4300],\"disallowed\"],[[4301,4301],\"mapped\",[11565]],[[4302,4303],\"disallowed\"],[[4304,4342],\"valid\"],[[4343,4344],\"valid\"],[[4345,4346],\"valid\"],[[4347,4347],\"valid\",[],\"NV8\"],[[4348,4348],\"mapped\",[4316]],[[4349,4351],\"valid\"],[[4352,4441],\"valid\",[],\"NV8\"],[[4442,4446],\"valid\",[],\"NV8\"],[[4447,4448],\"disallowed\"],[[4449,4514],\"valid\",[],\"NV8\"],[[4515,4519],\"valid\",[],\"NV8\"],[[4520,4601],\"valid\",[],\"NV8\"],[[4602,4607],\"valid\",[],\"NV8\"],[[4608,4614],\"valid\"],[[4615,4615],\"valid\"],[[4616,4678],\"valid\"],[[4679,4679],\"valid\"],[[4680,4680],\"valid\"],[[4681,4681],\"disallowed\"],[[4682,4685],\"valid\"],[[4686,4687],\"disallowed\"],[[4688,4694],\"valid\"],[[4695,4695],\"disallowed\"],[[4696,4696],\"valid\"],[[4697,4697],\"disallowed\"],[[4698,4701],\"valid\"],[[4702,4703],\"disallowed\"],[[4704,4742],\"valid\"],[[4743,4743],\"valid\"],[[4744,4744],\"valid\"],[[4745,4745],\"disallowed\"],[[4746,4749],\"valid\"],[[4750,4751],\"disallowed\"],[[4752,4782],\"valid\"],[[4783,4783],\"valid\"],[[4784,4784],\"valid\"],[[4785,4785],\"disallowed\"],[[4786,4789],\"valid\"],[[4790,4791],\"disallowed\"],[[4792,4798],\"valid\"],[[4799,4799],\"disallowed\"],[[4800,4800],\"valid\"],[[4801,4801],\"disallowed\"],[[4802,4805],\"valid\"],[[4806,4807],\"disallowed\"],[[4808,4814],\"valid\"],[[4815,4815],\"valid\"],[[4816,4822],\"valid\"],[[4823,4823],\"disallowed\"],[[4824,4846],\"valid\"],[[4847,4847],\"valid\"],[[4848,4878],\"valid\"],[[4879,4879],\"valid\"],[[4880,4880],\"valid\"],[[4881,4881],\"disallowed\"],[[4882,4885],\"valid\"],[[4886,4887],\"disallowed\"],[[4888,4894],\"valid\"],[[4895,4895],\"valid\"],[[4896,4934],\"valid\"],[[4935,4935],\"valid\"],[[4936,4954],\"valid\"],[[4955,4956],\"disallowed\"],[[4957,4958],\"valid\"],[[4959,4959],\"valid\"],[[4960,4960],\"valid\",[],\"NV8\"],[[4961,4988],\"valid\",[],\"NV8\"],[[4989,4991],\"disallowed\"],[[4992,5007],\"valid\"],[[5008,5017],\"valid\",[],\"NV8\"],[[5018,5023],\"disallowed\"],[[5024,5108],\"valid\"],[[5109,5109],\"valid\"],[[5110,5111],\"disallowed\"],[[5112,5112],\"mapped\",[5104]],[[5113,5113],\"mapped\",[5105]],[[5114,5114],\"mapped\",[5106]],[[5115,5115],\"mapped\",[5107]],[[5116,5116],\"mapped\",[5108]],[[5117,5117],\"mapped\",[5109]],[[5118,5119],\"disallowed\"],[[5120,5120],\"valid\",[],\"NV8\"],[[5121,5740],\"valid\"],[[5741,5742],\"valid\",[],\"NV8\"],[[5743,5750],\"valid\"],[[5751,5759],\"valid\"],[[5760,5760],\"disallowed\"],[[5761,5786],\"valid\"],[[5787,5788],\"valid\",[],\"NV8\"],[[5789,5791],\"disallowed\"],[[5792,5866],\"valid\"],[[5867,5872],\"valid\",[],\"NV8\"],[[5873,5880],\"valid\"],[[5881,5887],\"disallowed\"],[[5888,5900],\"valid\"],[[5901,5901],\"disallowed\"],[[5902,5908],\"valid\"],[[5909,5919],\"disallowed\"],[[5920,5940],\"valid\"],[[5941,5942],\"valid\",[],\"NV8\"],[[5943,5951],\"disallowed\"],[[5952,5971],\"valid\"],[[5972,5983],\"disallowed\"],[[5984,5996],\"valid\"],[[5997,5997],\"disallowed\"],[[5998,6000],\"valid\"],[[6001,6001],\"disallowed\"],[[6002,6003],\"valid\"],[[6004,6015],\"disallowed\"],[[6016,6067],\"valid\"],[[6068,6069],\"disallowed\"],[[6070,6099],\"valid\"],[[6100,6102],\"valid\",[],\"NV8\"],[[6103,6103],\"valid\"],[[6104,6107],\"valid\",[],\"NV8\"],[[6108,6108],\"valid\"],[[6109,6109],\"valid\"],[[6110,6111],\"disallowed\"],[[6112,6121],\"valid\"],[[6122,6127],\"disallowed\"],[[6128,6137],\"valid\",[],\"NV8\"],[[6138,6143],\"disallowed\"],[[6144,6149],\"valid\",[],\"NV8\"],[[6150,6150],\"disallowed\"],[[6151,6154],\"valid\",[],\"NV8\"],[[6155,6157],\"ignored\"],[[6158,6158],\"disallowed\"],[[6159,6159],\"disallowed\"],[[6160,6169],\"valid\"],[[6170,6175],\"disallowed\"],[[6176,6263],\"valid\"],[[6264,6271],\"disallowed\"],[[6272,6313],\"valid\"],[[6314,6314],\"valid\"],[[6315,6319],\"disallowed\"],[[6320,6389],\"valid\"],[[6390,6399],\"disallowed\"],[[6400,6428],\"valid\"],[[6429,6430],\"valid\"],[[6431,6431],\"disallowed\"],[[6432,6443],\"valid\"],[[6444,6447],\"disallowed\"],[[6448,6459],\"valid\"],[[6460,6463],\"disallowed\"],[[6464,6464],\"valid\",[],\"NV8\"],[[6465,6467],\"disallowed\"],[[6468,6469],\"valid\",[],\"NV8\"],[[6470,6509],\"valid\"],[[6510,6511],\"disallowed\"],[[6512,6516],\"valid\"],[[6517,6527],\"disallowed\"],[[6528,6569],\"valid\"],[[6570,6571],\"valid\"],[[6572,6575],\"disallowed\"],[[6576,6601],\"valid\"],[[6602,6607],\"disallowed\"],[[6608,6617],\"valid\"],[[6618,6618],\"valid\",[],\"XV8\"],[[6619,6621],\"disallowed\"],[[6622,6623],\"valid\",[],\"NV8\"],[[6624,6655],\"valid\",[],\"NV8\"],[[6656,6683],\"valid\"],[[6684,6685],\"disallowed\"],[[6686,6687],\"valid\",[],\"NV8\"],[[6688,6750],\"valid\"],[[6751,6751],\"disallowed\"],[[6752,6780],\"valid\"],[[6781,6782],\"disallowed\"],[[6783,6793],\"valid\"],[[6794,6799],\"disallowed\"],[[6800,6809],\"valid\"],[[6810,6815],\"disallowed\"],[[6816,6822],\"valid\",[],\"NV8\"],[[6823,6823],\"valid\"],[[6824,6829],\"valid\",[],\"NV8\"],[[6830,6831],\"disallowed\"],[[6832,6845],\"valid\"],[[6846,6846],\"valid\",[],\"NV8\"],[[6847,6911],\"disallowed\"],[[6912,6987],\"valid\"],[[6988,6991],\"disallowed\"],[[6992,7001],\"valid\"],[[7002,7018],\"valid\",[],\"NV8\"],[[7019,7027],\"valid\"],[[7028,7036],\"valid\",[],\"NV8\"],[[7037,7039],\"disallowed\"],[[7040,7082],\"valid\"],[[7083,7085],\"valid\"],[[7086,7097],\"valid\"],[[7098,7103],\"valid\"],[[7104,7155],\"valid\"],[[7156,7163],\"disallowed\"],[[7164,7167],\"valid\",[],\"NV8\"],[[7168,7223],\"valid\"],[[7224,7226],\"disallowed\"],[[7227,7231],\"valid\",[],\"NV8\"],[[7232,7241],\"valid\"],[[7242,7244],\"disallowed\"],[[7245,7293],\"valid\"],[[7294,7295],\"valid\",[],\"NV8\"],[[7296,7359],\"disallowed\"],[[7360,7367],\"valid\",[],\"NV8\"],[[7368,7375],\"disallowed\"],[[7376,7378],\"valid\"],[[7379,7379],\"valid\",[],\"NV8\"],[[7380,7410],\"valid\"],[[7411,7414],\"valid\"],[[7415,7415],\"disallowed\"],[[7416,7417],\"valid\"],[[7418,7423],\"disallowed\"],[[7424,7467],\"valid\"],[[7468,7468],\"mapped\",[97]],[[7469,7469],\"mapped\",[230]],[[7470,7470],\"mapped\",[98]],[[7471,7471],\"valid\"],[[7472,7472],\"mapped\",[100]],[[7473,7473],\"mapped\",[101]],[[7474,7474],\"mapped\",[477]],[[7475,7475],\"mapped\",[103]],[[7476,7476],\"mapped\",[104]],[[7477,7477],\"mapped\",[105]],[[7478,7478],\"mapped\",[106]],[[7479,7479],\"mapped\",[107]],[[7480,7480],\"mapped\",[108]],[[7481,7481],\"mapped\",[109]],[[7482,7482],\"mapped\",[110]],[[7483,7483],\"valid\"],[[7484,7484],\"mapped\",[111]],[[7485,7485],\"mapped\",[547]],[[7486,7486],\"mapped\",[112]],[[7487,7487],\"mapped\",[114]],[[7488,7488],\"mapped\",[116]],[[7489,7489],\"mapped\",[117]],[[7490,7490],\"mapped\",[119]],[[7491,7491],\"mapped\",[97]],[[7492,7492],\"mapped\",[592]],[[7493,7493],\"mapped\",[593]],[[7494,7494],\"mapped\",[7426]],[[7495,7495],\"mapped\",[98]],[[7496,7496],\"mapped\",[100]],[[7497,7497],\"mapped\",[101]],[[7498,7498],\"mapped\",[601]],[[7499,7499],\"mapped\",[603]],[[7500,7500],\"mapped\",[604]],[[7501,7501],\"mapped\",[103]],[[7502,7502],\"valid\"],[[7503,7503],\"mapped\",[107]],[[7504,7504],\"mapped\",[109]],[[7505,7505],\"mapped\",[331]],[[7506,7506],\"mapped\",[111]],[[7507,7507],\"mapped\",[596]],[[7508,7508],\"mapped\",[7446]],[[7509,7509],\"mapped\",[7447]],[[7510,7510],\"mapped\",[112]],[[7511,7511],\"mapped\",[116]],[[7512,7512],\"mapped\",[117]],[[7513,7513],\"mapped\",[7453]],[[7514,7514],\"mapped\",[623]],[[7515,7515],\"mapped\",[118]],[[7516,7516],\"mapped\",[7461]],[[7517,7517],\"mapped\",[946]],[[7518,7518],\"mapped\",[947]],[[7519,7519],\"mapped\",[948]],[[7520,7520],\"mapped\",[966]],[[7521,7521],\"mapped\",[967]],[[7522,7522],\"mapped\",[105]],[[7523,7523],\"mapped\",[114]],[[7524,7524],\"mapped\",[117]],[[7525,7525],\"mapped\",[118]],[[7526,7526],\"mapped\",[946]],[[7527,7527],\"mapped\",[947]],[[7528,7528],\"mapped\",[961]],[[7529,7529],\"mapped\",[966]],[[7530,7530],\"mapped\",[967]],[[7531,7531],\"valid\"],[[7532,7543],\"valid\"],[[7544,7544],\"mapped\",[1085]],[[7545,7578],\"valid\"],[[7579,7579],\"mapped\",[594]],[[7580,7580],\"mapped\",[99]],[[7581,7581],\"mapped\",[597]],[[7582,7582],\"mapped\",[240]],[[7583,7583],\"mapped\",[604]],[[7584,7584],\"mapped\",[102]],[[7585,7585],\"mapped\",[607]],[[7586,7586],\"mapped\",[609]],[[7587,7587],\"mapped\",[613]],[[7588,7588],\"mapped\",[616]],[[7589,7589],\"mapped\",[617]],[[7590,7590],\"mapped\",[618]],[[7591,7591],\"mapped\",[7547]],[[7592,7592],\"mapped\",[669]],[[7593,7593],\"mapped\",[621]],[[7594,7594],\"mapped\",[7557]],[[7595,7595],\"mapped\",[671]],[[7596,7596],\"mapped\",[625]],[[7597,7597],\"mapped\",[624]],[[7598,7598],\"mapped\",[626]],[[7599,7599],\"mapped\",[627]],[[7600,7600],\"mapped\",[628]],[[7601,7601],\"mapped\",[629]],[[7602,7602],\"mapped\",[632]],[[7603,7603],\"mapped\",[642]],[[7604,7604],\"mapped\",[643]],[[7605,7605],\"mapped\",[427]],[[7606,7606],\"mapped\",[649]],[[7607,7607],\"mapped\",[650]],[[7608,7608],\"mapped\",[7452]],[[7609,7609],\"mapped\",[651]],[[7610,7610],\"mapped\",[652]],[[7611,7611],\"mapped\",[122]],[[7612,7612],\"mapped\",[656]],[[7613,7613],\"mapped\",[657]],[[7614,7614],\"mapped\",[658]],[[7615,7615],\"mapped\",[952]],[[7616,7619],\"valid\"],[[7620,7626],\"valid\"],[[7627,7654],\"valid\"],[[7655,7669],\"valid\"],[[7670,7675],\"disallowed\"],[[7676,7676],\"valid\"],[[7677,7677],\"valid\"],[[7678,7679],\"valid\"],[[7680,7680],\"mapped\",[7681]],[[7681,7681],\"valid\"],[[7682,7682],\"mapped\",[7683]],[[7683,7683],\"valid\"],[[7684,7684],\"mapped\",[7685]],[[7685,7685],\"valid\"],[[7686,7686],\"mapped\",[7687]],[[7687,7687],\"valid\"],[[7688,7688],\"mapped\",[7689]],[[7689,7689],\"valid\"],[[7690,7690],\"mapped\",[7691]],[[7691,7691],\"valid\"],[[7692,7692],\"mapped\",[7693]],[[7693,7693],\"valid\"],[[7694,7694],\"mapped\",[7695]],[[7695,7695],\"valid\"],[[7696,7696],\"mapped\",[7697]],[[7697,7697],\"valid\"],[[7698,7698],\"mapped\",[7699]],[[7699,7699],\"valid\"],[[7700,7700],\"mapped\",[7701]],[[7701,7701],\"valid\"],[[7702,7702],\"mapped\",[7703]],[[7703,7703],\"valid\"],[[7704,7704],\"mapped\",[7705]],[[7705,7705],\"valid\"],[[7706,7706],\"mapped\",[7707]],[[7707,7707],\"valid\"],[[7708,7708],\"mapped\",[7709]],[[7709,7709],\"valid\"],[[7710,7710],\"mapped\",[7711]],[[7711,7711],\"valid\"],[[7712,7712],\"mapped\",[7713]],[[7713,7713],\"valid\"],[[7714,7714],\"mapped\",[7715]],[[7715,7715],\"valid\"],[[7716,7716],\"mapped\",[7717]],[[7717,7717],\"valid\"],[[7718,7718],\"mapped\",[7719]],[[7719,7719],\"valid\"],[[7720,7720],\"mapped\",[7721]],[[7721,7721],\"valid\"],[[7722,7722],\"mapped\",[7723]],[[7723,7723],\"valid\"],[[7724,7724],\"mapped\",[7725]],[[7725,7725],\"valid\"],[[7726,7726],\"mapped\",[7727]],[[7727,7727],\"valid\"],[[7728,7728],\"mapped\",[7729]],[[7729,7729],\"valid\"],[[7730,7730],\"mapped\",[7731]],[[7731,7731],\"valid\"],[[7732,7732],\"mapped\",[7733]],[[7733,7733],\"valid\"],[[7734,7734],\"mapped\",[7735]],[[7735,7735],\"valid\"],[[7736,7736],\"mapped\",[7737]],[[7737,7737],\"valid\"],[[7738,7738],\"mapped\",[7739]],[[7739,7739],\"valid\"],[[7740,7740],\"mapped\",[7741]],[[7741,7741],\"valid\"],[[7742,7742],\"mapped\",[7743]],[[7743,7743],\"valid\"],[[7744,7744],\"mapped\",[7745]],[[7745,7745],\"valid\"],[[7746,7746],\"mapped\",[7747]],[[7747,7747],\"valid\"],[[7748,7748],\"mapped\",[7749]],[[7749,7749],\"valid\"],[[7750,7750],\"mapped\",[7751]],[[7751,7751],\"valid\"],[[7752,7752],\"mapped\",[7753]],[[7753,7753],\"valid\"],[[7754,7754],\"mapped\",[7755]],[[7755,7755],\"valid\"],[[7756,7756],\"mapped\",[7757]],[[7757,7757],\"valid\"],[[7758,7758],\"mapped\",[7759]],[[7759,7759],\"valid\"],[[7760,7760],\"mapped\",[7761]],[[7761,7761],\"valid\"],[[7762,7762],\"mapped\",[7763]],[[7763,7763],\"valid\"],[[7764,7764],\"mapped\",[7765]],[[7765,7765],\"valid\"],[[7766,7766],\"mapped\",[7767]],[[7767,7767],\"valid\"],[[7768,7768],\"mapped\",[7769]],[[7769,7769],\"valid\"],[[7770,7770],\"mapped\",[7771]],[[7771,7771],\"valid\"],[[7772,7772],\"mapped\",[7773]],[[7773,7773],\"valid\"],[[7774,7774],\"mapped\",[7775]],[[7775,7775],\"valid\"],[[7776,7776],\"mapped\",[7777]],[[7777,7777],\"valid\"],[[7778,7778],\"mapped\",[7779]],[[7779,7779],\"valid\"],[[7780,7780],\"mapped\",[7781]],[[7781,7781],\"valid\"],[[7782,7782],\"mapped\",[7783]],[[7783,7783],\"valid\"],[[7784,7784],\"mapped\",[7785]],[[7785,7785],\"valid\"],[[7786,7786],\"mapped\",[7787]],[[7787,7787],\"valid\"],[[7788,7788],\"mapped\",[7789]],[[7789,7789],\"valid\"],[[7790,7790],\"mapped\",[7791]],[[7791,7791],\"valid\"],[[7792,7792],\"mapped\",[7793]],[[7793,7793],\"valid\"],[[7794,7794],\"mapped\",[7795]],[[7795,7795],\"valid\"],[[7796,7796],\"mapped\",[7797]],[[7797,7797],\"valid\"],[[7798,7798],\"mapped\",[7799]],[[7799,7799],\"valid\"],[[7800,7800],\"mapped\",[7801]],[[7801,7801],\"valid\"],[[7802,7802],\"mapped\",[7803]],[[7803,7803],\"valid\"],[[7804,7804],\"mapped\",[7805]],[[7805,7805],\"valid\"],[[7806,7806],\"mapped\",[7807]],[[7807,7807],\"valid\"],[[7808,7808],\"mapped\",[7809]],[[7809,7809],\"valid\"],[[7810,7810],\"mapped\",[7811]],[[7811,7811],\"valid\"],[[7812,7812],\"mapped\",[7813]],[[7813,7813],\"valid\"],[[7814,7814],\"mapped\",[7815]],[[7815,7815],\"valid\"],[[7816,7816],\"mapped\",[7817]],[[7817,7817],\"valid\"],[[7818,7818],\"mapped\",[7819]],[[7819,7819],\"valid\"],[[7820,7820],\"mapped\",[7821]],[[7821,7821],\"valid\"],[[7822,7822],\"mapped\",[7823]],[[7823,7823],\"valid\"],[[7824,7824],\"mapped\",[7825]],[[7825,7825],\"valid\"],[[7826,7826],\"mapped\",[7827]],[[7827,7827],\"valid\"],[[7828,7828],\"mapped\",[7829]],[[7829,7833],\"valid\"],[[7834,7834],\"mapped\",[97,702]],[[7835,7835],\"mapped\",[7777]],[[7836,7837],\"valid\"],[[7838,7838],\"mapped\",[115,115]],[[7839,7839],\"valid\"],[[7840,7840],\"mapped\",[7841]],[[7841,7841],\"valid\"],[[7842,7842],\"mapped\",[7843]],[[7843,7843],\"valid\"],[[7844,7844],\"mapped\",[7845]],[[7845,7845],\"valid\"],[[7846,7846],\"mapped\",[7847]],[[7847,7847],\"valid\"],[[7848,7848],\"mapped\",[7849]],[[7849,7849],\"valid\"],[[7850,7850],\"mapped\",[7851]],[[7851,7851],\"valid\"],[[7852,7852],\"mapped\",[7853]],[[7853,7853],\"valid\"],[[7854,7854],\"mapped\",[7855]],[[7855,7855],\"valid\"],[[7856,7856],\"mapped\",[7857]],[[7857,7857],\"valid\"],[[7858,7858],\"mapped\",[7859]],[[7859,7859],\"valid\"],[[7860,7860],\"mapped\",[7861]],[[7861,7861],\"valid\"],[[7862,7862],\"mapped\",[7863]],[[7863,7863],\"valid\"],[[7864,7864],\"mapped\",[7865]],[[7865,7865],\"valid\"],[[7866,7866],\"mapped\",[7867]],[[7867,7867],\"valid\"],[[7868,7868],\"mapped\",[7869]],[[7869,7869],\"valid\"],[[7870,7870],\"mapped\",[7871]],[[7871,7871],\"valid\"],[[7872,7872],\"mapped\",[7873]],[[7873,7873],\"valid\"],[[7874,7874],\"mapped\",[7875]],[[7875,7875],\"valid\"],[[7876,7876],\"mapped\",[7877]],[[7877,7877],\"valid\"],[[7878,7878],\"mapped\",[7879]],[[7879,7879],\"valid\"],[[7880,7880],\"mapped\",[7881]],[[7881,7881],\"valid\"],[[7882,7882],\"mapped\",[7883]],[[7883,7883],\"valid\"],[[7884,7884],\"mapped\",[7885]],[[7885,7885],\"valid\"],[[7886,7886],\"mapped\",[7887]],[[7887,7887],\"valid\"],[[7888,7888],\"mapped\",[7889]],[[7889,7889],\"valid\"],[[7890,7890],\"mapped\",[7891]],[[7891,7891],\"valid\"],[[7892,7892],\"mapped\",[7893]],[[7893,7893],\"valid\"],[[7894,7894],\"mapped\",[7895]],[[7895,7895],\"valid\"],[[7896,7896],\"mapped\",[7897]],[[7897,7897],\"valid\"],[[7898,7898],\"mapped\",[7899]],[[7899,7899],\"valid\"],[[7900,7900],\"mapped\",[7901]],[[7901,7901],\"valid\"],[[7902,7902],\"mapped\",[7903]],[[7903,7903],\"valid\"],[[7904,7904],\"mapped\",[7905]],[[7905,7905],\"valid\"],[[7906,7906],\"mapped\",[7907]],[[7907,7907],\"valid\"],[[7908,7908],\"mapped\",[7909]],[[7909,7909],\"valid\"],[[7910,7910],\"mapped\",[7911]],[[7911,7911],\"valid\"],[[7912,7912],\"mapped\",[7913]],[[7913,7913],\"valid\"],[[7914,7914],\"mapped\",[7915]],[[7915,7915],\"valid\"],[[7916,7916],\"mapped\",[7917]],[[7917,7917],\"valid\"],[[7918,7918],\"mapped\",[7919]],[[7919,7919],\"valid\"],[[7920,7920],\"mapped\",[7921]],[[7921,7921],\"valid\"],[[7922,7922],\"mapped\",[7923]],[[7923,7923],\"valid\"],[[7924,7924],\"mapped\",[7925]],[[7925,7925],\"valid\"],[[7926,7926],\"mapped\",[7927]],[[7927,7927],\"valid\"],[[7928,7928],\"mapped\",[7929]],[[7929,7929],\"valid\"],[[7930,7930],\"mapped\",[7931]],[[7931,7931],\"valid\"],[[7932,7932],\"mapped\",[7933]],[[7933,7933],\"valid\"],[[7934,7934],\"mapped\",[7935]],[[7935,7935],\"valid\"],[[7936,7943],\"valid\"],[[7944,7944],\"mapped\",[7936]],[[7945,7945],\"mapped\",[7937]],[[7946,7946],\"mapped\",[7938]],[[7947,7947],\"mapped\",[7939]],[[7948,7948],\"mapped\",[7940]],[[7949,7949],\"mapped\",[7941]],[[7950,7950],\"mapped\",[7942]],[[7951,7951],\"mapped\",[7943]],[[7952,7957],\"valid\"],[[7958,7959],\"disallowed\"],[[7960,7960],\"mapped\",[7952]],[[7961,7961],\"mapped\",[7953]],[[7962,7962],\"mapped\",[7954]],[[7963,7963],\"mapped\",[7955]],[[7964,7964],\"mapped\",[7956]],[[7965,7965],\"mapped\",[7957]],[[7966,7967],\"disallowed\"],[[7968,7975],\"valid\"],[[7976,7976],\"mapped\",[7968]],[[7977,7977],\"mapped\",[7969]],[[7978,7978],\"mapped\",[7970]],[[7979,7979],\"mapped\",[7971]],[[7980,7980],\"mapped\",[7972]],[[7981,7981],\"mapped\",[7973]],[[7982,7982],\"mapped\",[7974]],[[7983,7983],\"mapped\",[7975]],[[7984,7991],\"valid\"],[[7992,7992],\"mapped\",[7984]],[[7993,7993],\"mapped\",[7985]],[[7994,7994],\"mapped\",[7986]],[[7995,7995],\"mapped\",[7987]],[[7996,7996],\"mapped\",[7988]],[[7997,7997],\"mapped\",[7989]],[[7998,7998],\"mapped\",[7990]],[[7999,7999],\"mapped\",[7991]],[[8000,8005],\"valid\"],[[8006,8007],\"disallowed\"],[[8008,8008],\"mapped\",[8000]],[[8009,8009],\"mapped\",[8001]],[[8010,8010],\"mapped\",[8002]],[[8011,8011],\"mapped\",[8003]],[[8012,8012],\"mapped\",[8004]],[[8013,8013],\"mapped\",[8005]],[[8014,8015],\"disallowed\"],[[8016,8023],\"valid\"],[[8024,8024],\"disallowed\"],[[8025,8025],\"mapped\",[8017]],[[8026,8026],\"disallowed\"],[[8027,8027],\"mapped\",[8019]],[[8028,8028],\"disallowed\"],[[8029,8029],\"mapped\",[8021]],[[8030,8030],\"disallowed\"],[[8031,8031],\"mapped\",[8023]],[[8032,8039],\"valid\"],[[8040,8040],\"mapped\",[8032]],[[8041,8041],\"mapped\",[8033]],[[8042,8042],\"mapped\",[8034]],[[8043,8043],\"mapped\",[8035]],[[8044,8044],\"mapped\",[8036]],[[8045,8045],\"mapped\",[8037]],[[8046,8046],\"mapped\",[8038]],[[8047,8047],\"mapped\",[8039]],[[8048,8048],\"valid\"],[[8049,8049],\"mapped\",[940]],[[8050,8050],\"valid\"],[[8051,8051],\"mapped\",[941]],[[8052,8052],\"valid\"],[[8053,8053],\"mapped\",[942]],[[8054,8054],\"valid\"],[[8055,8055],\"mapped\",[943]],[[8056,8056],\"valid\"],[[8057,8057],\"mapped\",[972]],[[8058,8058],\"valid\"],[[8059,8059],\"mapped\",[973]],[[8060,8060],\"valid\"],[[8061,8061],\"mapped\",[974]],[[8062,8063],\"disallowed\"],[[8064,8064],\"mapped\",[7936,953]],[[8065,8065],\"mapped\",[7937,953]],[[8066,8066],\"mapped\",[7938,953]],[[8067,8067],\"mapped\",[7939,953]],[[8068,8068],\"mapped\",[7940,953]],[[8069,8069],\"mapped\",[7941,953]],[[8070,8070],\"mapped\",[7942,953]],[[8071,8071],\"mapped\",[7943,953]],[[8072,8072],\"mapped\",[7936,953]],[[8073,8073],\"mapped\",[7937,953]],[[8074,8074],\"mapped\",[7938,953]],[[8075,8075],\"mapped\",[7939,953]],[[8076,8076],\"mapped\",[7940,953]],[[8077,8077],\"mapped\",[7941,953]],[[8078,8078],\"mapped\",[7942,953]],[[8079,8079],\"mapped\",[7943,953]],[[8080,8080],\"mapped\",[7968,953]],[[8081,8081],\"mapped\",[7969,953]],[[8082,8082],\"mapped\",[7970,953]],[[8083,8083],\"mapped\",[7971,953]],[[8084,8084],\"mapped\",[7972,953]],[[8085,8085],\"mapped\",[7973,953]],[[8086,8086],\"mapped\",[7974,953]],[[8087,8087],\"mapped\",[7975,953]],[[8088,8088],\"mapped\",[7968,953]],[[8089,8089],\"mapped\",[7969,953]],[[8090,8090],\"mapped\",[7970,953]],[[8091,8091],\"mapped\",[7971,953]],[[8092,8092],\"mapped\",[7972,953]],[[8093,8093],\"mapped\",[7973,953]],[[8094,8094],\"mapped\",[7974,953]],[[8095,8095],\"mapped\",[7975,953]],[[8096,8096],\"mapped\",[8032,953]],[[8097,8097],\"mapped\",[8033,953]],[[8098,8098],\"mapped\",[8034,953]],[[8099,8099],\"mapped\",[8035,953]],[[8100,8100],\"mapped\",[8036,953]],[[8101,8101],\"mapped\",[8037,953]],[[8102,8102],\"mapped\",[8038,953]],[[8103,8103],\"mapped\",[8039,953]],[[8104,8104],\"mapped\",[8032,953]],[[8105,8105],\"mapped\",[8033,953]],[[8106,8106],\"mapped\",[8034,953]],[[8107,8107],\"mapped\",[8035,953]],[[8108,8108],\"mapped\",[8036,953]],[[8109,8109],\"mapped\",[8037,953]],[[8110,8110],\"mapped\",[8038,953]],[[8111,8111],\"mapped\",[8039,953]],[[8112,8113],\"valid\"],[[8114,8114],\"mapped\",[8048,953]],[[8115,8115],\"mapped\",[945,953]],[[8116,8116],\"mapped\",[940,953]],[[8117,8117],\"disallowed\"],[[8118,8118],\"valid\"],[[8119,8119],\"mapped\",[8118,953]],[[8120,8120],\"mapped\",[8112]],[[8121,8121],\"mapped\",[8113]],[[8122,8122],\"mapped\",[8048]],[[8123,8123],\"mapped\",[940]],[[8124,8124],\"mapped\",[945,953]],[[8125,8125],\"disallowed_STD3_mapped\",[32,787]],[[8126,8126],\"mapped\",[953]],[[8127,8127],\"disallowed_STD3_mapped\",[32,787]],[[8128,8128],\"disallowed_STD3_mapped\",[32,834]],[[8129,8129],\"disallowed_STD3_mapped\",[32,776,834]],[[8130,8130],\"mapped\",[8052,953]],[[8131,8131],\"mapped\",[951,953]],[[8132,8132],\"mapped\",[942,953]],[[8133,8133],\"disallowed\"],[[8134,8134],\"valid\"],[[8135,8135],\"mapped\",[8134,953]],[[8136,8136],\"mapped\",[8050]],[[8137,8137],\"mapped\",[941]],[[8138,8138],\"mapped\",[8052]],[[8139,8139],\"mapped\",[942]],[[8140,8140],\"mapped\",[951,953]],[[8141,8141],\"disallowed_STD3_mapped\",[32,787,768]],[[8142,8142],\"disallowed_STD3_mapped\",[32,787,769]],[[8143,8143],\"disallowed_STD3_mapped\",[32,787,834]],[[8144,8146],\"valid\"],[[8147,8147],\"mapped\",[912]],[[8148,8149],\"disallowed\"],[[8150,8151],\"valid\"],[[8152,8152],\"mapped\",[8144]],[[8153,8153],\"mapped\",[8145]],[[8154,8154],\"mapped\",[8054]],[[8155,8155],\"mapped\",[943]],[[8156,8156],\"disallowed\"],[[8157,8157],\"disallowed_STD3_mapped\",[32,788,768]],[[8158,8158],\"disallowed_STD3_mapped\",[32,788,769]],[[8159,8159],\"disallowed_STD3_mapped\",[32,788,834]],[[8160,8162],\"valid\"],[[8163,8163],\"mapped\",[944]],[[8164,8167],\"valid\"],[[8168,8168],\"mapped\",[8160]],[[8169,8169],\"mapped\",[8161]],[[8170,8170],\"mapped\",[8058]],[[8171,8171],\"mapped\",[973]],[[8172,8172],\"mapped\",[8165]],[[8173,8173],\"disallowed_STD3_mapped\",[32,776,768]],[[8174,8174],\"disallowed_STD3_mapped\",[32,776,769]],[[8175,8175],\"disallowed_STD3_mapped\",[96]],[[8176,8177],\"disallowed\"],[[8178,8178],\"mapped\",[8060,953]],[[8179,8179],\"mapped\",[969,953]],[[8180,8180],\"mapped\",[974,953]],[[8181,8181],\"disallowed\"],[[8182,8182],\"valid\"],[[8183,8183],\"mapped\",[8182,953]],[[8184,8184],\"mapped\",[8056]],[[8185,8185],\"mapped\",[972]],[[8186,8186],\"mapped\",[8060]],[[8187,8187],\"mapped\",[974]],[[8188,8188],\"mapped\",[969,953]],[[8189,8189],\"disallowed_STD3_mapped\",[32,769]],[[8190,8190],\"disallowed_STD3_mapped\",[32,788]],[[8191,8191],\"disallowed\"],[[8192,8202],\"disallowed_STD3_mapped\",[32]],[[8203,8203],\"ignored\"],[[8204,8205],\"deviation\",[]],[[8206,8207],\"disallowed\"],[[8208,8208],\"valid\",[],\"NV8\"],[[8209,8209],\"mapped\",[8208]],[[8210,8214],\"valid\",[],\"NV8\"],[[8215,8215],\"disallowed_STD3_mapped\",[32,819]],[[8216,8227],\"valid\",[],\"NV8\"],[[8228,8230],\"disallowed\"],[[8231,8231],\"valid\",[],\"NV8\"],[[8232,8238],\"disallowed\"],[[8239,8239],\"disallowed_STD3_mapped\",[32]],[[8240,8242],\"valid\",[],\"NV8\"],[[8243,8243],\"mapped\",[8242,8242]],[[8244,8244],\"mapped\",[8242,8242,8242]],[[8245,8245],\"valid\",[],\"NV8\"],[[8246,8246],\"mapped\",[8245,8245]],[[8247,8247],\"mapped\",[8245,8245,8245]],[[8248,8251],\"valid\",[],\"NV8\"],[[8252,8252],\"disallowed_STD3_mapped\",[33,33]],[[8253,8253],\"valid\",[],\"NV8\"],[[8254,8254],\"disallowed_STD3_mapped\",[32,773]],[[8255,8262],\"valid\",[],\"NV8\"],[[8263,8263],\"disallowed_STD3_mapped\",[63,63]],[[8264,8264],\"disallowed_STD3_mapped\",[63,33]],[[8265,8265],\"disallowed_STD3_mapped\",[33,63]],[[8266,8269],\"valid\",[],\"NV8\"],[[8270,8274],\"valid\",[],\"NV8\"],[[8275,8276],\"valid\",[],\"NV8\"],[[8277,8278],\"valid\",[],\"NV8\"],[[8279,8279],\"mapped\",[8242,8242,8242,8242]],[[8280,8286],\"valid\",[],\"NV8\"],[[8287,8287],\"disallowed_STD3_mapped\",[32]],[[8288,8288],\"ignored\"],[[8289,8291],\"disallowed\"],[[8292,8292],\"ignored\"],[[8293,8293],\"disallowed\"],[[8294,8297],\"disallowed\"],[[8298,8303],\"disallowed\"],[[8304,8304],\"mapped\",[48]],[[8305,8305],\"mapped\",[105]],[[8306,8307],\"disallowed\"],[[8308,8308],\"mapped\",[52]],[[8309,8309],\"mapped\",[53]],[[8310,8310],\"mapped\",[54]],[[8311,8311],\"mapped\",[55]],[[8312,8312],\"mapped\",[56]],[[8313,8313],\"mapped\",[57]],[[8314,8314],\"disallowed_STD3_mapped\",[43]],[[8315,8315],\"mapped\",[8722]],[[8316,8316],\"disallowed_STD3_mapped\",[61]],[[8317,8317],\"disallowed_STD3_mapped\",[40]],[[8318,8318],\"disallowed_STD3_mapped\",[41]],[[8319,8319],\"mapped\",[110]],[[8320,8320],\"mapped\",[48]],[[8321,8321],\"mapped\",[49]],[[8322,8322],\"mapped\",[50]],[[8323,8323],\"mapped\",[51]],[[8324,8324],\"mapped\",[52]],[[8325,8325],\"mapped\",[53]],[[8326,8326],\"mapped\",[54]],[[8327,8327],\"mapped\",[55]],[[8328,8328],\"mapped\",[56]],[[8329,8329],\"mapped\",[57]],[[8330,8330],\"disallowed_STD3_mapped\",[43]],[[8331,8331],\"mapped\",[8722]],[[8332,8332],\"disallowed_STD3_mapped\",[61]],[[8333,8333],\"disallowed_STD3_mapped\",[40]],[[8334,8334],\"disallowed_STD3_mapped\",[41]],[[8335,8335],\"disallowed\"],[[8336,8336],\"mapped\",[97]],[[8337,8337],\"mapped\",[101]],[[8338,8338],\"mapped\",[111]],[[8339,8339],\"mapped\",[120]],[[8340,8340],\"mapped\",[601]],[[8341,8341],\"mapped\",[104]],[[8342,8342],\"mapped\",[107]],[[8343,8343],\"mapped\",[108]],[[8344,8344],\"mapped\",[109]],[[8345,8345],\"mapped\",[110]],[[8346,8346],\"mapped\",[112]],[[8347,8347],\"mapped\",[115]],[[8348,8348],\"mapped\",[116]],[[8349,8351],\"disallowed\"],[[8352,8359],\"valid\",[],\"NV8\"],[[8360,8360],\"mapped\",[114,115]],[[8361,8362],\"valid\",[],\"NV8\"],[[8363,8363],\"valid\",[],\"NV8\"],[[8364,8364],\"valid\",[],\"NV8\"],[[8365,8367],\"valid\",[],\"NV8\"],[[8368,8369],\"valid\",[],\"NV8\"],[[8370,8373],\"valid\",[],\"NV8\"],[[8374,8376],\"valid\",[],\"NV8\"],[[8377,8377],\"valid\",[],\"NV8\"],[[8378,8378],\"valid\",[],\"NV8\"],[[8379,8381],\"valid\",[],\"NV8\"],[[8382,8382],\"valid\",[],\"NV8\"],[[8383,8399],\"disallowed\"],[[8400,8417],\"valid\",[],\"NV8\"],[[8418,8419],\"valid\",[],\"NV8\"],[[8420,8426],\"valid\",[],\"NV8\"],[[8427,8427],\"valid\",[],\"NV8\"],[[8428,8431],\"valid\",[],\"NV8\"],[[8432,8432],\"valid\",[],\"NV8\"],[[8433,8447],\"disallowed\"],[[8448,8448],\"disallowed_STD3_mapped\",[97,47,99]],[[8449,8449],\"disallowed_STD3_mapped\",[97,47,115]],[[8450,8450],\"mapped\",[99]],[[8451,8451],\"mapped\",[176,99]],[[8452,8452],\"valid\",[],\"NV8\"],[[8453,8453],\"disallowed_STD3_mapped\",[99,47,111]],[[8454,8454],\"disallowed_STD3_mapped\",[99,47,117]],[[8455,8455],\"mapped\",[603]],[[8456,8456],\"valid\",[],\"NV8\"],[[8457,8457],\"mapped\",[176,102]],[[8458,8458],\"mapped\",[103]],[[8459,8462],\"mapped\",[104]],[[8463,8463],\"mapped\",[295]],[[8464,8465],\"mapped\",[105]],[[8466,8467],\"mapped\",[108]],[[8468,8468],\"valid\",[],\"NV8\"],[[8469,8469],\"mapped\",[110]],[[8470,8470],\"mapped\",[110,111]],[[8471,8472],\"valid\",[],\"NV8\"],[[8473,8473],\"mapped\",[112]],[[8474,8474],\"mapped\",[113]],[[8475,8477],\"mapped\",[114]],[[8478,8479],\"valid\",[],\"NV8\"],[[8480,8480],\"mapped\",[115,109]],[[8481,8481],\"mapped\",[116,101,108]],[[8482,8482],\"mapped\",[116,109]],[[8483,8483],\"valid\",[],\"NV8\"],[[8484,8484],\"mapped\",[122]],[[8485,8485],\"valid\",[],\"NV8\"],[[8486,8486],\"mapped\",[969]],[[8487,8487],\"valid\",[],\"NV8\"],[[8488,8488],\"mapped\",[122]],[[8489,8489],\"valid\",[],\"NV8\"],[[8490,8490],\"mapped\",[107]],[[8491,8491],\"mapped\",[229]],[[8492,8492],\"mapped\",[98]],[[8493,8493],\"mapped\",[99]],[[8494,8494],\"valid\",[],\"NV8\"],[[8495,8496],\"mapped\",[101]],[[8497,8497],\"mapped\",[102]],[[8498,8498],\"disallowed\"],[[8499,8499],\"mapped\",[109]],[[8500,8500],\"mapped\",[111]],[[8501,8501],\"mapped\",[1488]],[[8502,8502],\"mapped\",[1489]],[[8503,8503],\"mapped\",[1490]],[[8504,8504],\"mapped\",[1491]],[[8505,8505],\"mapped\",[105]],[[8506,8506],\"valid\",[],\"NV8\"],[[8507,8507],\"mapped\",[102,97,120]],[[8508,8508],\"mapped\",[960]],[[8509,8510],\"mapped\",[947]],[[8511,8511],\"mapped\",[960]],[[8512,8512],\"mapped\",[8721]],[[8513,8516],\"valid\",[],\"NV8\"],[[8517,8518],\"mapped\",[100]],[[8519,8519],\"mapped\",[101]],[[8520,8520],\"mapped\",[105]],[[8521,8521],\"mapped\",[106]],[[8522,8523],\"valid\",[],\"NV8\"],[[8524,8524],\"valid\",[],\"NV8\"],[[8525,8525],\"valid\",[],\"NV8\"],[[8526,8526],\"valid\"],[[8527,8527],\"valid\",[],\"NV8\"],[[8528,8528],\"mapped\",[49,8260,55]],[[8529,8529],\"mapped\",[49,8260,57]],[[8530,8530],\"mapped\",[49,8260,49,48]],[[8531,8531],\"mapped\",[49,8260,51]],[[8532,8532],\"mapped\",[50,8260,51]],[[8533,8533],\"mapped\",[49,8260,53]],[[8534,8534],\"mapped\",[50,8260,53]],[[8535,8535],\"mapped\",[51,8260,53]],[[8536,8536],\"mapped\",[52,8260,53]],[[8537,8537],\"mapped\",[49,8260,54]],[[8538,8538],\"mapped\",[53,8260,54]],[[8539,8539],\"mapped\",[49,8260,56]],[[8540,8540],\"mapped\",[51,8260,56]],[[8541,8541],\"mapped\",[53,8260,56]],[[8542,8542],\"mapped\",[55,8260,56]],[[8543,8543],\"mapped\",[49,8260]],[[8544,8544],\"mapped\",[105]],[[8545,8545],\"mapped\",[105,105]],[[8546,8546],\"mapped\",[105,105,105]],[[8547,8547],\"mapped\",[105,118]],[[8548,8548],\"mapped\",[118]],[[8549,8549],\"mapped\",[118,105]],[[8550,8550],\"mapped\",[118,105,105]],[[8551,8551],\"mapped\",[118,105,105,105]],[[8552,8552],\"mapped\",[105,120]],[[8553,8553],\"mapped\",[120]],[[8554,8554],\"mapped\",[120,105]],[[8555,8555],\"mapped\",[120,105,105]],[[8556,8556],\"mapped\",[108]],[[8557,8557],\"mapped\",[99]],[[8558,8558],\"mapped\",[100]],[[8559,8559],\"mapped\",[109]],[[8560,8560],\"mapped\",[105]],[[8561,8561],\"mapped\",[105,105]],[[8562,8562],\"mapped\",[105,105,105]],[[8563,8563],\"mapped\",[105,118]],[[8564,8564],\"mapped\",[118]],[[8565,8565],\"mapped\",[118,105]],[[8566,8566],\"mapped\",[118,105,105]],[[8567,8567],\"mapped\",[118,105,105,105]],[[8568,8568],\"mapped\",[105,120]],[[8569,8569],\"mapped\",[120]],[[8570,8570],\"mapped\",[120,105]],[[8571,8571],\"mapped\",[120,105,105]],[[8572,8572],\"mapped\",[108]],[[8573,8573],\"mapped\",[99]],[[8574,8574],\"mapped\",[100]],[[8575,8575],\"mapped\",[109]],[[8576,8578],\"valid\",[],\"NV8\"],[[8579,8579],\"disallowed\"],[[8580,8580],\"valid\"],[[8581,8584],\"valid\",[],\"NV8\"],[[8585,8585],\"mapped\",[48,8260,51]],[[8586,8587],\"valid\",[],\"NV8\"],[[8588,8591],\"disallowed\"],[[8592,8682],\"valid\",[],\"NV8\"],[[8683,8691],\"valid\",[],\"NV8\"],[[8692,8703],\"valid\",[],\"NV8\"],[[8704,8747],\"valid\",[],\"NV8\"],[[8748,8748],\"mapped\",[8747,8747]],[[8749,8749],\"mapped\",[8747,8747,8747]],[[8750,8750],\"valid\",[],\"NV8\"],[[8751,8751],\"mapped\",[8750,8750]],[[8752,8752],\"mapped\",[8750,8750,8750]],[[8753,8799],\"valid\",[],\"NV8\"],[[8800,8800],\"disallowed_STD3_valid\"],[[8801,8813],\"valid\",[],\"NV8\"],[[8814,8815],\"disallowed_STD3_valid\"],[[8816,8945],\"valid\",[],\"NV8\"],[[8946,8959],\"valid\",[],\"NV8\"],[[8960,8960],\"valid\",[],\"NV8\"],[[8961,8961],\"valid\",[],\"NV8\"],[[8962,9000],\"valid\",[],\"NV8\"],[[9001,9001],\"mapped\",[12296]],[[9002,9002],\"mapped\",[12297]],[[9003,9082],\"valid\",[],\"NV8\"],[[9083,9083],\"valid\",[],\"NV8\"],[[9084,9084],\"valid\",[],\"NV8\"],[[9085,9114],\"valid\",[],\"NV8\"],[[9115,9166],\"valid\",[],\"NV8\"],[[9167,9168],\"valid\",[],\"NV8\"],[[9169,9179],\"valid\",[],\"NV8\"],[[9180,9191],\"valid\",[],\"NV8\"],[[9192,9192],\"valid\",[],\"NV8\"],[[9193,9203],\"valid\",[],\"NV8\"],[[9204,9210],\"valid\",[],\"NV8\"],[[9211,9215],\"disallowed\"],[[9216,9252],\"valid\",[],\"NV8\"],[[9253,9254],\"valid\",[],\"NV8\"],[[9255,9279],\"disallowed\"],[[9280,9290],\"valid\",[],\"NV8\"],[[9291,9311],\"disallowed\"],[[9312,9312],\"mapped\",[49]],[[9313,9313],\"mapped\",[50]],[[9314,9314],\"mapped\",[51]],[[9315,9315],\"mapped\",[52]],[[9316,9316],\"mapped\",[53]],[[9317,9317],\"mapped\",[54]],[[9318,9318],\"mapped\",[55]],[[9319,9319],\"mapped\",[56]],[[9320,9320],\"mapped\",[57]],[[9321,9321],\"mapped\",[49,48]],[[9322,9322],\"mapped\",[49,49]],[[9323,9323],\"mapped\",[49,50]],[[9324,9324],\"mapped\",[49,51]],[[9325,9325],\"mapped\",[49,52]],[[9326,9326],\"mapped\",[49,53]],[[9327,9327],\"mapped\",[49,54]],[[9328,9328],\"mapped\",[49,55]],[[9329,9329],\"mapped\",[49,56]],[[9330,9330],\"mapped\",[49,57]],[[9331,9331],\"mapped\",[50,48]],[[9332,9332],\"disallowed_STD3_mapped\",[40,49,41]],[[9333,9333],\"disallowed_STD3_mapped\",[40,50,41]],[[9334,9334],\"disallowed_STD3_mapped\",[40,51,41]],[[9335,9335],\"disallowed_STD3_mapped\",[40,52,41]],[[9336,9336],\"disallowed_STD3_mapped\",[40,53,41]],[[9337,9337],\"disallowed_STD3_mapped\",[40,54,41]],[[9338,9338],\"disallowed_STD3_mapped\",[40,55,41]],[[9339,9339],\"disallowed_STD3_mapped\",[40,56,41]],[[9340,9340],\"disallowed_STD3_mapped\",[40,57,41]],[[9341,9341],\"disallowed_STD3_mapped\",[40,49,48,41]],[[9342,9342],\"disallowed_STD3_mapped\",[40,49,49,41]],[[9343,9343],\"disallowed_STD3_mapped\",[40,49,50,41]],[[9344,9344],\"disallowed_STD3_mapped\",[40,49,51,41]],[[9345,9345],\"disallowed_STD3_mapped\",[40,49,52,41]],[[9346,9346],\"disallowed_STD3_mapped\",[40,49,53,41]],[[9347,9347],\"disallowed_STD3_mapped\",[40,49,54,41]],[[9348,9348],\"disallowed_STD3_mapped\",[40,49,55,41]],[[9349,9349],\"disallowed_STD3_mapped\",[40,49,56,41]],[[9350,9350],\"disallowed_STD3_mapped\",[40,49,57,41]],[[9351,9351],\"disallowed_STD3_mapped\",[40,50,48,41]],[[9352,9371],\"disallowed\"],[[9372,9372],\"disallowed_STD3_mapped\",[40,97,41]],[[9373,9373],\"disallowed_STD3_mapped\",[40,98,41]],[[9374,9374],\"disallowed_STD3_mapped\",[40,99,41]],[[9375,9375],\"disallowed_STD3_mapped\",[40,100,41]],[[9376,9376],\"disallowed_STD3_mapped\",[40,101,41]],[[9377,9377],\"disallowed_STD3_mapped\",[40,102,41]],[[9378,9378],\"disallowed_STD3_mapped\",[40,103,41]],[[9379,9379],\"disallowed_STD3_mapped\",[40,104,41]],[[9380,9380],\"disallowed_STD3_mapped\",[40,105,41]],[[9381,9381],\"disallowed_STD3_mapped\",[40,106,41]],[[9382,9382],\"disallowed_STD3_mapped\",[40,107,41]],[[9383,9383],\"disallowed_STD3_mapped\",[40,108,41]],[[9384,9384],\"disallowed_STD3_mapped\",[40,109,41]],[[9385,9385],\"disallowed_STD3_mapped\",[40,110,41]],[[9386,9386],\"disallowed_STD3_mapped\",[40,111,41]],[[9387,9387],\"disallowed_STD3_mapped\",[40,112,41]],[[9388,9388],\"disallowed_STD3_mapped\",[40,113,41]],[[9389,9389],\"disallowed_STD3_mapped\",[40,114,41]],[[9390,9390],\"disallowed_STD3_mapped\",[40,115,41]],[[9391,9391],\"disallowed_STD3_mapped\",[40,116,41]],[[9392,9392],\"disallowed_STD3_mapped\",[40,117,41]],[[9393,9393],\"disallowed_STD3_mapped\",[40,118,41]],[[9394,9394],\"disallowed_STD3_mapped\",[40,119,41]],[[9395,9395],\"disallowed_STD3_mapped\",[40,120,41]],[[9396,9396],\"disallowed_STD3_mapped\",[40,121,41]],[[9397,9397],\"disallowed_STD3_mapped\",[40,122,41]],[[9398,9398],\"mapped\",[97]],[[9399,9399],\"mapped\",[98]],[[9400,9400],\"mapped\",[99]],[[9401,9401],\"mapped\",[100]],[[9402,9402],\"mapped\",[101]],[[9403,9403],\"mapped\",[102]],[[9404,9404],\"mapped\",[103]],[[9405,9405],\"mapped\",[104]],[[9406,9406],\"mapped\",[105]],[[9407,9407],\"mapped\",[106]],[[9408,9408],\"mapped\",[107]],[[9409,9409],\"mapped\",[108]],[[9410,9410],\"mapped\",[109]],[[9411,9411],\"mapped\",[110]],[[9412,9412],\"mapped\",[111]],[[9413,9413],\"mapped\",[112]],[[9414,9414],\"mapped\",[113]],[[9415,9415],\"mapped\",[114]],[[9416,9416],\"mapped\",[115]],[[9417,9417],\"mapped\",[116]],[[9418,9418],\"mapped\",[117]],[[9419,9419],\"mapped\",[118]],[[9420,9420],\"mapped\",[119]],[[9421,9421],\"mapped\",[120]],[[9422,9422],\"mapped\",[121]],[[9423,9423],\"mapped\",[122]],[[9424,9424],\"mapped\",[97]],[[9425,9425],\"mapped\",[98]],[[9426,9426],\"mapped\",[99]],[[9427,9427],\"mapped\",[100]],[[9428,9428],\"mapped\",[101]],[[9429,9429],\"mapped\",[102]],[[9430,9430],\"mapped\",[103]],[[9431,9431],\"mapped\",[104]],[[9432,9432],\"mapped\",[105]],[[9433,9433],\"mapped\",[106]],[[9434,9434],\"mapped\",[107]],[[9435,9435],\"mapped\",[108]],[[9436,9436],\"mapped\",[109]],[[9437,9437],\"mapped\",[110]],[[9438,9438],\"mapped\",[111]],[[9439,9439],\"mapped\",[112]],[[9440,9440],\"mapped\",[113]],[[9441,9441],\"mapped\",[114]],[[9442,9442],\"mapped\",[115]],[[9443,9443],\"mapped\",[116]],[[9444,9444],\"mapped\",[117]],[[9445,9445],\"mapped\",[118]],[[9446,9446],\"mapped\",[119]],[[9447,9447],\"mapped\",[120]],[[9448,9448],\"mapped\",[121]],[[9449,9449],\"mapped\",[122]],[[9450,9450],\"mapped\",[48]],[[9451,9470],\"valid\",[],\"NV8\"],[[9471,9471],\"valid\",[],\"NV8\"],[[9472,9621],\"valid\",[],\"NV8\"],[[9622,9631],\"valid\",[],\"NV8\"],[[9632,9711],\"valid\",[],\"NV8\"],[[9712,9719],\"valid\",[],\"NV8\"],[[9720,9727],\"valid\",[],\"NV8\"],[[9728,9747],\"valid\",[],\"NV8\"],[[9748,9749],\"valid\",[],\"NV8\"],[[9750,9751],\"valid\",[],\"NV8\"],[[9752,9752],\"valid\",[],\"NV8\"],[[9753,9753],\"valid\",[],\"NV8\"],[[9754,9839],\"valid\",[],\"NV8\"],[[9840,9841],\"valid\",[],\"NV8\"],[[9842,9853],\"valid\",[],\"NV8\"],[[9854,9855],\"valid\",[],\"NV8\"],[[9856,9865],\"valid\",[],\"NV8\"],[[9866,9873],\"valid\",[],\"NV8\"],[[9874,9884],\"valid\",[],\"NV8\"],[[9885,9885],\"valid\",[],\"NV8\"],[[9886,9887],\"valid\",[],\"NV8\"],[[9888,9889],\"valid\",[],\"NV8\"],[[9890,9905],\"valid\",[],\"NV8\"],[[9906,9906],\"valid\",[],\"NV8\"],[[9907,9916],\"valid\",[],\"NV8\"],[[9917,9919],\"valid\",[],\"NV8\"],[[9920,9923],\"valid\",[],\"NV8\"],[[9924,9933],\"valid\",[],\"NV8\"],[[9934,9934],\"valid\",[],\"NV8\"],[[9935,9953],\"valid\",[],\"NV8\"],[[9954,9954],\"valid\",[],\"NV8\"],[[9955,9955],\"valid\",[],\"NV8\"],[[9956,9959],\"valid\",[],\"NV8\"],[[9960,9983],\"valid\",[],\"NV8\"],[[9984,9984],\"valid\",[],\"NV8\"],[[9985,9988],\"valid\",[],\"NV8\"],[[9989,9989],\"valid\",[],\"NV8\"],[[9990,9993],\"valid\",[],\"NV8\"],[[9994,9995],\"valid\",[],\"NV8\"],[[9996,10023],\"valid\",[],\"NV8\"],[[10024,10024],\"valid\",[],\"NV8\"],[[10025,10059],\"valid\",[],\"NV8\"],[[10060,10060],\"valid\",[],\"NV8\"],[[10061,10061],\"valid\",[],\"NV8\"],[[10062,10062],\"valid\",[],\"NV8\"],[[10063,10066],\"valid\",[],\"NV8\"],[[10067,10069],\"valid\",[],\"NV8\"],[[10070,10070],\"valid\",[],\"NV8\"],[[10071,10071],\"valid\",[],\"NV8\"],[[10072,10078],\"valid\",[],\"NV8\"],[[10079,10080],\"valid\",[],\"NV8\"],[[10081,10087],\"valid\",[],\"NV8\"],[[10088,10101],\"valid\",[],\"NV8\"],[[10102,10132],\"valid\",[],\"NV8\"],[[10133,10135],\"valid\",[],\"NV8\"],[[10136,10159],\"valid\",[],\"NV8\"],[[10160,10160],\"valid\",[],\"NV8\"],[[10161,10174],\"valid\",[],\"NV8\"],[[10175,10175],\"valid\",[],\"NV8\"],[[10176,10182],\"valid\",[],\"NV8\"],[[10183,10186],\"valid\",[],\"NV8\"],[[10187,10187],\"valid\",[],\"NV8\"],[[10188,10188],\"valid\",[],\"NV8\"],[[10189,10189],\"valid\",[],\"NV8\"],[[10190,10191],\"valid\",[],\"NV8\"],[[10192,10219],\"valid\",[],\"NV8\"],[[10220,10223],\"valid\",[],\"NV8\"],[[10224,10239],\"valid\",[],\"NV8\"],[[10240,10495],\"valid\",[],\"NV8\"],[[10496,10763],\"valid\",[],\"NV8\"],[[10764,10764],\"mapped\",[8747,8747,8747,8747]],[[10765,10867],\"valid\",[],\"NV8\"],[[10868,10868],\"disallowed_STD3_mapped\",[58,58,61]],[[10869,10869],\"disallowed_STD3_mapped\",[61,61]],[[10870,10870],\"disallowed_STD3_mapped\",[61,61,61]],[[10871,10971],\"valid\",[],\"NV8\"],[[10972,10972],\"mapped\",[10973,824]],[[10973,11007],\"valid\",[],\"NV8\"],[[11008,11021],\"valid\",[],\"NV8\"],[[11022,11027],\"valid\",[],\"NV8\"],[[11028,11034],\"valid\",[],\"NV8\"],[[11035,11039],\"valid\",[],\"NV8\"],[[11040,11043],\"valid\",[],\"NV8\"],[[11044,11084],\"valid\",[],\"NV8\"],[[11085,11087],\"valid\",[],\"NV8\"],[[11088,11092],\"valid\",[],\"NV8\"],[[11093,11097],\"valid\",[],\"NV8\"],[[11098,11123],\"valid\",[],\"NV8\"],[[11124,11125],\"disallowed\"],[[11126,11157],\"valid\",[],\"NV8\"],[[11158,11159],\"disallowed\"],[[11160,11193],\"valid\",[],\"NV8\"],[[11194,11196],\"disallowed\"],[[11197,11208],\"valid\",[],\"NV8\"],[[11209,11209],\"disallowed\"],[[11210,11217],\"valid\",[],\"NV8\"],[[11218,11243],\"disallowed\"],[[11244,11247],\"valid\",[],\"NV8\"],[[11248,11263],\"disallowed\"],[[11264,11264],\"mapped\",[11312]],[[11265,11265],\"mapped\",[11313]],[[11266,11266],\"mapped\",[11314]],[[11267,11267],\"mapped\",[11315]],[[11268,11268],\"mapped\",[11316]],[[11269,11269],\"mapped\",[11317]],[[11270,11270],\"mapped\",[11318]],[[11271,11271],\"mapped\",[11319]],[[11272,11272],\"mapped\",[11320]],[[11273,11273],\"mapped\",[11321]],[[11274,11274],\"mapped\",[11322]],[[11275,11275],\"mapped\",[11323]],[[11276,11276],\"mapped\",[11324]],[[11277,11277],\"mapped\",[11325]],[[11278,11278],\"mapped\",[11326]],[[11279,11279],\"mapped\",[11327]],[[11280,11280],\"mapped\",[11328]],[[11281,11281],\"mapped\",[11329]],[[11282,11282],\"mapped\",[11330]],[[11283,11283],\"mapped\",[11331]],[[11284,11284],\"mapped\",[11332]],[[11285,11285],\"mapped\",[11333]],[[11286,11286],\"mapped\",[11334]],[[11287,11287],\"mapped\",[11335]],[[11288,11288],\"mapped\",[11336]],[[11289,11289],\"mapped\",[11337]],[[11290,11290],\"mapped\",[11338]],[[11291,11291],\"mapped\",[11339]],[[11292,11292],\"mapped\",[11340]],[[11293,11293],\"mapped\",[11341]],[[11294,11294],\"mapped\",[11342]],[[11295,11295],\"mapped\",[11343]],[[11296,11296],\"mapped\",[11344]],[[11297,11297],\"mapped\",[11345]],[[11298,11298],\"mapped\",[11346]],[[11299,11299],\"mapped\",[11347]],[[11300,11300],\"mapped\",[11348]],[[11301,11301],\"mapped\",[11349]],[[11302,11302],\"mapped\",[11350]],[[11303,11303],\"mapped\",[11351]],[[11304,11304],\"mapped\",[11352]],[[11305,11305],\"mapped\",[11353]],[[11306,11306],\"mapped\",[11354]],[[11307,11307],\"mapped\",[11355]],[[11308,11308],\"mapped\",[11356]],[[11309,11309],\"mapped\",[11357]],[[11310,11310],\"mapped\",[11358]],[[11311,11311],\"disallowed\"],[[11312,11358],\"valid\"],[[11359,11359],\"disallowed\"],[[11360,11360],\"mapped\",[11361]],[[11361,11361],\"valid\"],[[11362,11362],\"mapped\",[619]],[[11363,11363],\"mapped\",[7549]],[[11364,11364],\"mapped\",[637]],[[11365,11366],\"valid\"],[[11367,11367],\"mapped\",[11368]],[[11368,11368],\"valid\"],[[11369,11369],\"mapped\",[11370]],[[11370,11370],\"valid\"],[[11371,11371],\"mapped\",[11372]],[[11372,11372],\"valid\"],[[11373,11373],\"mapped\",[593]],[[11374,11374],\"mapped\",[625]],[[11375,11375],\"mapped\",[592]],[[11376,11376],\"mapped\",[594]],[[11377,11377],\"valid\"],[[11378,11378],\"mapped\",[11379]],[[11379,11379],\"valid\"],[[11380,11380],\"valid\"],[[11381,11381],\"mapped\",[11382]],[[11382,11383],\"valid\"],[[11384,11387],\"valid\"],[[11388,11388],\"mapped\",[106]],[[11389,11389],\"mapped\",[118]],[[11390,11390],\"mapped\",[575]],[[11391,11391],\"mapped\",[576]],[[11392,11392],\"mapped\",[11393]],[[11393,11393],\"valid\"],[[11394,11394],\"mapped\",[11395]],[[11395,11395],\"valid\"],[[11396,11396],\"mapped\",[11397]],[[11397,11397],\"valid\"],[[11398,11398],\"mapped\",[11399]],[[11399,11399],\"valid\"],[[11400,11400],\"mapped\",[11401]],[[11401,11401],\"valid\"],[[11402,11402],\"mapped\",[11403]],[[11403,11403],\"valid\"],[[11404,11404],\"mapped\",[11405]],[[11405,11405],\"valid\"],[[11406,11406],\"mapped\",[11407]],[[11407,11407],\"valid\"],[[11408,11408],\"mapped\",[11409]],[[11409,11409],\"valid\"],[[11410,11410],\"mapped\",[11411]],[[11411,11411],\"valid\"],[[11412,11412],\"mapped\",[11413]],[[11413,11413],\"valid\"],[[11414,11414],\"mapped\",[11415]],[[11415,11415],\"valid\"],[[11416,11416],\"mapped\",[11417]],[[11417,11417],\"valid\"],[[11418,11418],\"mapped\",[11419]],[[11419,11419],\"valid\"],[[11420,11420],\"mapped\",[11421]],[[11421,11421],\"valid\"],[[11422,11422],\"mapped\",[11423]],[[11423,11423],\"valid\"],[[11424,11424],\"mapped\",[11425]],[[11425,11425],\"valid\"],[[11426,11426],\"mapped\",[11427]],[[11427,11427],\"valid\"],[[11428,11428],\"mapped\",[11429]],[[11429,11429],\"valid\"],[[11430,11430],\"mapped\",[11431]],[[11431,11431],\"valid\"],[[11432,11432],\"mapped\",[11433]],[[11433,11433],\"valid\"],[[11434,11434],\"mapped\",[11435]],[[11435,11435],\"valid\"],[[11436,11436],\"mapped\",[11437]],[[11437,11437],\"valid\"],[[11438,11438],\"mapped\",[11439]],[[11439,11439],\"valid\"],[[11440,11440],\"mapped\",[11441]],[[11441,11441],\"valid\"],[[11442,11442],\"mapped\",[11443]],[[11443,11443],\"valid\"],[[11444,11444],\"mapped\",[11445]],[[11445,11445],\"valid\"],[[11446,11446],\"mapped\",[11447]],[[11447,11447],\"valid\"],[[11448,11448],\"mapped\",[11449]],[[11449,11449],\"valid\"],[[11450,11450],\"mapped\",[11451]],[[11451,11451],\"valid\"],[[11452,11452],\"mapped\",[11453]],[[11453,11453],\"valid\"],[[11454,11454],\"mapped\",[11455]],[[11455,11455],\"valid\"],[[11456,11456],\"mapped\",[11457]],[[11457,11457],\"valid\"],[[11458,11458],\"mapped\",[11459]],[[11459,11459],\"valid\"],[[11460,11460],\"mapped\",[11461]],[[11461,11461],\"valid\"],[[11462,11462],\"mapped\",[11463]],[[11463,11463],\"valid\"],[[11464,11464],\"mapped\",[11465]],[[11465,11465],\"valid\"],[[11466,11466],\"mapped\",[11467]],[[11467,11467],\"valid\"],[[11468,11468],\"mapped\",[11469]],[[11469,11469],\"valid\"],[[11470,11470],\"mapped\",[11471]],[[11471,11471],\"valid\"],[[11472,11472],\"mapped\",[11473]],[[11473,11473],\"valid\"],[[11474,11474],\"mapped\",[11475]],[[11475,11475],\"valid\"],[[11476,11476],\"mapped\",[11477]],[[11477,11477],\"valid\"],[[11478,11478],\"mapped\",[11479]],[[11479,11479],\"valid\"],[[11480,11480],\"mapped\",[11481]],[[11481,11481],\"valid\"],[[11482,11482],\"mapped\",[11483]],[[11483,11483],\"valid\"],[[11484,11484],\"mapped\",[11485]],[[11485,11485],\"valid\"],[[11486,11486],\"mapped\",[11487]],[[11487,11487],\"valid\"],[[11488,11488],\"mapped\",[11489]],[[11489,11489],\"valid\"],[[11490,11490],\"mapped\",[11491]],[[11491,11492],\"valid\"],[[11493,11498],\"valid\",[],\"NV8\"],[[11499,11499],\"mapped\",[11500]],[[11500,11500],\"valid\"],[[11501,11501],\"mapped\",[11502]],[[11502,11505],\"valid\"],[[11506,11506],\"mapped\",[11507]],[[11507,11507],\"valid\"],[[11508,11512],\"disallowed\"],[[11513,11519],\"valid\",[],\"NV8\"],[[11520,11557],\"valid\"],[[11558,11558],\"disallowed\"],[[11559,11559],\"valid\"],[[11560,11564],\"disallowed\"],[[11565,11565],\"valid\"],[[11566,11567],\"disallowed\"],[[11568,11621],\"valid\"],[[11622,11623],\"valid\"],[[11624,11630],\"disallowed\"],[[11631,11631],\"mapped\",[11617]],[[11632,11632],\"valid\",[],\"NV8\"],[[11633,11646],\"disallowed\"],[[11647,11647],\"valid\"],[[11648,11670],\"valid\"],[[11671,11679],\"disallowed\"],[[11680,11686],\"valid\"],[[11687,11687],\"disallowed\"],[[11688,11694],\"valid\"],[[11695,11695],\"disallowed\"],[[11696,11702],\"valid\"],[[11703,11703],\"disallowed\"],[[11704,11710],\"valid\"],[[11711,11711],\"disallowed\"],[[11712,11718],\"valid\"],[[11719,11719],\"disallowed\"],[[11720,11726],\"valid\"],[[11727,11727],\"disallowed\"],[[11728,11734],\"valid\"],[[11735,11735],\"disallowed\"],[[11736,11742],\"valid\"],[[11743,11743],\"disallowed\"],[[11744,11775],\"valid\"],[[11776,11799],\"valid\",[],\"NV8\"],[[11800,11803],\"valid\",[],\"NV8\"],[[11804,11805],\"valid\",[],\"NV8\"],[[11806,11822],\"valid\",[],\"NV8\"],[[11823,11823],\"valid\"],[[11824,11824],\"valid\",[],\"NV8\"],[[11825,11825],\"valid\",[],\"NV8\"],[[11826,11835],\"valid\",[],\"NV8\"],[[11836,11842],\"valid\",[],\"NV8\"],[[11843,11903],\"disallowed\"],[[11904,11929],\"valid\",[],\"NV8\"],[[11930,11930],\"disallowed\"],[[11931,11934],\"valid\",[],\"NV8\"],[[11935,11935],\"mapped\",[27597]],[[11936,12018],\"valid\",[],\"NV8\"],[[12019,12019],\"mapped\",[40863]],[[12020,12031],\"disallowed\"],[[12032,12032],\"mapped\",[19968]],[[12033,12033],\"mapped\",[20008]],[[12034,12034],\"mapped\",[20022]],[[12035,12035],\"mapped\",[20031]],[[12036,12036],\"mapped\",[20057]],[[12037,12037],\"mapped\",[20101]],[[12038,12038],\"mapped\",[20108]],[[12039,12039],\"mapped\",[20128]],[[12040,12040],\"mapped\",[20154]],[[12041,12041],\"mapped\",[20799]],[[12042,12042],\"mapped\",[20837]],[[12043,12043],\"mapped\",[20843]],[[12044,12044],\"mapped\",[20866]],[[12045,12045],\"mapped\",[20886]],[[12046,12046],\"mapped\",[20907]],[[12047,12047],\"mapped\",[20960]],[[12048,12048],\"mapped\",[20981]],[[12049,12049],\"mapped\",[20992]],[[12050,12050],\"mapped\",[21147]],[[12051,12051],\"mapped\",[21241]],[[12052,12052],\"mapped\",[21269]],[[12053,12053],\"mapped\",[21274]],[[12054,12054],\"mapped\",[21304]],[[12055,12055],\"mapped\",[21313]],[[12056,12056],\"mapped\",[21340]],[[12057,12057],\"mapped\",[21353]],[[12058,12058],\"mapped\",[21378]],[[12059,12059],\"mapped\",[21430]],[[12060,12060],\"mapped\",[21448]],[[12061,12061],\"mapped\",[21475]],[[12062,12062],\"mapped\",[22231]],[[12063,12063],\"mapped\",[22303]],[[12064,12064],\"mapped\",[22763]],[[12065,12065],\"mapped\",[22786]],[[12066,12066],\"mapped\",[22794]],[[12067,12067],\"mapped\",[22805]],[[12068,12068],\"mapped\",[22823]],[[12069,12069],\"mapped\",[22899]],[[12070,12070],\"mapped\",[23376]],[[12071,12071],\"mapped\",[23424]],[[12072,12072],\"mapped\",[23544]],[[12073,12073],\"mapped\",[23567]],[[12074,12074],\"mapped\",[23586]],[[12075,12075],\"mapped\",[23608]],[[12076,12076],\"mapped\",[23662]],[[12077,12077],\"mapped\",[23665]],[[12078,12078],\"mapped\",[24027]],[[12079,12079],\"mapped\",[24037]],[[12080,12080],\"mapped\",[24049]],[[12081,12081],\"mapped\",[24062]],[[12082,12082],\"mapped\",[24178]],[[12083,12083],\"mapped\",[24186]],[[12084,12084],\"mapped\",[24191]],[[12085,12085],\"mapped\",[24308]],[[12086,12086],\"mapped\",[24318]],[[12087,12087],\"mapped\",[24331]],[[12088,12088],\"mapped\",[24339]],[[12089,12089],\"mapped\",[24400]],[[12090,12090],\"mapped\",[24417]],[[12091,12091],\"mapped\",[24435]],[[12092,12092],\"mapped\",[24515]],[[12093,12093],\"mapped\",[25096]],[[12094,12094],\"mapped\",[25142]],[[12095,12095],\"mapped\",[25163]],[[12096,12096],\"mapped\",[25903]],[[12097,12097],\"mapped\",[25908]],[[12098,12098],\"mapped\",[25991]],[[12099,12099],\"mapped\",[26007]],[[12100,12100],\"mapped\",[26020]],[[12101,12101],\"mapped\",[26041]],[[12102,12102],\"mapped\",[26080]],[[12103,12103],\"mapped\",[26085]],[[12104,12104],\"mapped\",[26352]],[[12105,12105],\"mapped\",[26376]],[[12106,12106],\"mapped\",[26408]],[[12107,12107],\"mapped\",[27424]],[[12108,12108],\"mapped\",[27490]],[[12109,12109],\"mapped\",[27513]],[[12110,12110],\"mapped\",[27571]],[[12111,12111],\"mapped\",[27595]],[[12112,12112],\"mapped\",[27604]],[[12113,12113],\"mapped\",[27611]],[[12114,12114],\"mapped\",[27663]],[[12115,12115],\"mapped\",[27668]],[[12116,12116],\"mapped\",[27700]],[[12117,12117],\"mapped\",[28779]],[[12118,12118],\"mapped\",[29226]],[[12119,12119],\"mapped\",[29238]],[[12120,12120],\"mapped\",[29243]],[[12121,12121],\"mapped\",[29247]],[[12122,12122],\"mapped\",[29255]],[[12123,12123],\"mapped\",[29273]],[[12124,12124],\"mapped\",[29275]],[[12125,12125],\"mapped\",[29356]],[[12126,12126],\"mapped\",[29572]],[[12127,12127],\"mapped\",[29577]],[[12128,12128],\"mapped\",[29916]],[[12129,12129],\"mapped\",[29926]],[[12130,12130],\"mapped\",[29976]],[[12131,12131],\"mapped\",[29983]],[[12132,12132],\"mapped\",[29992]],[[12133,12133],\"mapped\",[30000]],[[12134,12134],\"mapped\",[30091]],[[12135,12135],\"mapped\",[30098]],[[12136,12136],\"mapped\",[30326]],[[12137,12137],\"mapped\",[30333]],[[12138,12138],\"mapped\",[30382]],[[12139,12139],\"mapped\",[30399]],[[12140,12140],\"mapped\",[30446]],[[12141,12141],\"mapped\",[30683]],[[12142,12142],\"mapped\",[30690]],[[12143,12143],\"mapped\",[30707]],[[12144,12144],\"mapped\",[31034]],[[12145,12145],\"mapped\",[31160]],[[12146,12146],\"mapped\",[31166]],[[12147,12147],\"mapped\",[31348]],[[12148,12148],\"mapped\",[31435]],[[12149,12149],\"mapped\",[31481]],[[12150,12150],\"mapped\",[31859]],[[12151,12151],\"mapped\",[31992]],[[12152,12152],\"mapped\",[32566]],[[12153,12153],\"mapped\",[32593]],[[12154,12154],\"mapped\",[32650]],[[12155,12155],\"mapped\",[32701]],[[12156,12156],\"mapped\",[32769]],[[12157,12157],\"mapped\",[32780]],[[12158,12158],\"mapped\",[32786]],[[12159,12159],\"mapped\",[32819]],[[12160,12160],\"mapped\",[32895]],[[12161,12161],\"mapped\",[32905]],[[12162,12162],\"mapped\",[33251]],[[12163,12163],\"mapped\",[33258]],[[12164,12164],\"mapped\",[33267]],[[12165,12165],\"mapped\",[33276]],[[12166,12166],\"mapped\",[33292]],[[12167,12167],\"mapped\",[33307]],[[12168,12168],\"mapped\",[33311]],[[12169,12169],\"mapped\",[33390]],[[12170,12170],\"mapped\",[33394]],[[12171,12171],\"mapped\",[33400]],[[12172,12172],\"mapped\",[34381]],[[12173,12173],\"mapped\",[34411]],[[12174,12174],\"mapped\",[34880]],[[12175,12175],\"mapped\",[34892]],[[12176,12176],\"mapped\",[34915]],[[12177,12177],\"mapped\",[35198]],[[12178,12178],\"mapped\",[35211]],[[12179,12179],\"mapped\",[35282]],[[12180,12180],\"mapped\",[35328]],[[12181,12181],\"mapped\",[35895]],[[12182,12182],\"mapped\",[35910]],[[12183,12183],\"mapped\",[35925]],[[12184,12184],\"mapped\",[35960]],[[12185,12185],\"mapped\",[35997]],[[12186,12186],\"mapped\",[36196]],[[12187,12187],\"mapped\",[36208]],[[12188,12188],\"mapped\",[36275]],[[12189,12189],\"mapped\",[36523]],[[12190,12190],\"mapped\",[36554]],[[12191,12191],\"mapped\",[36763]],[[12192,12192],\"mapped\",[36784]],[[12193,12193],\"mapped\",[36789]],[[12194,12194],\"mapped\",[37009]],[[12195,12195],\"mapped\",[37193]],[[12196,12196],\"mapped\",[37318]],[[12197,12197],\"mapped\",[37324]],[[12198,12198],\"mapped\",[37329]],[[12199,12199],\"mapped\",[38263]],[[12200,12200],\"mapped\",[38272]],[[12201,12201],\"mapped\",[38428]],[[12202,12202],\"mapped\",[38582]],[[12203,12203],\"mapped\",[38585]],[[12204,12204],\"mapped\",[38632]],[[12205,12205],\"mapped\",[38737]],[[12206,12206],\"mapped\",[38750]],[[12207,12207],\"mapped\",[38754]],[[12208,12208],\"mapped\",[38761]],[[12209,12209],\"mapped\",[38859]],[[12210,12210],\"mapped\",[38893]],[[12211,12211],\"mapped\",[38899]],[[12212,12212],\"mapped\",[38913]],[[12213,12213],\"mapped\",[39080]],[[12214,12214],\"mapped\",[39131]],[[12215,12215],\"mapped\",[39135]],[[12216,12216],\"mapped\",[39318]],[[12217,12217],\"mapped\",[39321]],[[12218,12218],\"mapped\",[39340]],[[12219,12219],\"mapped\",[39592]],[[12220,12220],\"mapped\",[39640]],[[12221,12221],\"mapped\",[39647]],[[12222,12222],\"mapped\",[39717]],[[12223,12223],\"mapped\",[39727]],[[12224,12224],\"mapped\",[39730]],[[12225,12225],\"mapped\",[39740]],[[12226,12226],\"mapped\",[39770]],[[12227,12227],\"mapped\",[40165]],[[12228,12228],\"mapped\",[40565]],[[12229,12229],\"mapped\",[40575]],[[12230,12230],\"mapped\",[40613]],[[12231,12231],\"mapped\",[40635]],[[12232,12232],\"mapped\",[40643]],[[12233,12233],\"mapped\",[40653]],[[12234,12234],\"mapped\",[40657]],[[12235,12235],\"mapped\",[40697]],[[12236,12236],\"mapped\",[40701]],[[12237,12237],\"mapped\",[40718]],[[12238,12238],\"mapped\",[40723]],[[12239,12239],\"mapped\",[40736]],[[12240,12240],\"mapped\",[40763]],[[12241,12241],\"mapped\",[40778]],[[12242,12242],\"mapped\",[40786]],[[12243,12243],\"mapped\",[40845]],[[12244,12244],\"mapped\",[40860]],[[12245,12245],\"mapped\",[40864]],[[12246,12271],\"disallowed\"],[[12272,12283],\"disallowed\"],[[12284,12287],\"disallowed\"],[[12288,12288],\"disallowed_STD3_mapped\",[32]],[[12289,12289],\"valid\",[],\"NV8\"],[[12290,12290],\"mapped\",[46]],[[12291,12292],\"valid\",[],\"NV8\"],[[12293,12295],\"valid\"],[[12296,12329],\"valid\",[],\"NV8\"],[[12330,12333],\"valid\"],[[12334,12341],\"valid\",[],\"NV8\"],[[12342,12342],\"mapped\",[12306]],[[12343,12343],\"valid\",[],\"NV8\"],[[12344,12344],\"mapped\",[21313]],[[12345,12345],\"mapped\",[21316]],[[12346,12346],\"mapped\",[21317]],[[12347,12347],\"valid\",[],\"NV8\"],[[12348,12348],\"valid\"],[[12349,12349],\"valid\",[],\"NV8\"],[[12350,12350],\"valid\",[],\"NV8\"],[[12351,12351],\"valid\",[],\"NV8\"],[[12352,12352],\"disallowed\"],[[12353,12436],\"valid\"],[[12437,12438],\"valid\"],[[12439,12440],\"disallowed\"],[[12441,12442],\"valid\"],[[12443,12443],\"disallowed_STD3_mapped\",[32,12441]],[[12444,12444],\"disallowed_STD3_mapped\",[32,12442]],[[12445,12446],\"valid\"],[[12447,12447],\"mapped\",[12424,12426]],[[12448,12448],\"valid\",[],\"NV8\"],[[12449,12542],\"valid\"],[[12543,12543],\"mapped\",[12467,12488]],[[12544,12548],\"disallowed\"],[[12549,12588],\"valid\"],[[12589,12589],\"valid\"],[[12590,12592],\"disallowed\"],[[12593,12593],\"mapped\",[4352]],[[12594,12594],\"mapped\",[4353]],[[12595,12595],\"mapped\",[4522]],[[12596,12596],\"mapped\",[4354]],[[12597,12597],\"mapped\",[4524]],[[12598,12598],\"mapped\",[4525]],[[12599,12599],\"mapped\",[4355]],[[12600,12600],\"mapped\",[4356]],[[12601,12601],\"mapped\",[4357]],[[12602,12602],\"mapped\",[4528]],[[12603,12603],\"mapped\",[4529]],[[12604,12604],\"mapped\",[4530]],[[12605,12605],\"mapped\",[4531]],[[12606,12606],\"mapped\",[4532]],[[12607,12607],\"mapped\",[4533]],[[12608,12608],\"mapped\",[4378]],[[12609,12609],\"mapped\",[4358]],[[12610,12610],\"mapped\",[4359]],[[12611,12611],\"mapped\",[4360]],[[12612,12612],\"mapped\",[4385]],[[12613,12613],\"mapped\",[4361]],[[12614,12614],\"mapped\",[4362]],[[12615,12615],\"mapped\",[4363]],[[12616,12616],\"mapped\",[4364]],[[12617,12617],\"mapped\",[4365]],[[12618,12618],\"mapped\",[4366]],[[12619,12619],\"mapped\",[4367]],[[12620,12620],\"mapped\",[4368]],[[12621,12621],\"mapped\",[4369]],[[12622,12622],\"mapped\",[4370]],[[12623,12623],\"mapped\",[4449]],[[12624,12624],\"mapped\",[4450]],[[12625,12625],\"mapped\",[4451]],[[12626,12626],\"mapped\",[4452]],[[12627,12627],\"mapped\",[4453]],[[12628,12628],\"mapped\",[4454]],[[12629,12629],\"mapped\",[4455]],[[12630,12630],\"mapped\",[4456]],[[12631,12631],\"mapped\",[4457]],[[12632,12632],\"mapped\",[4458]],[[12633,12633],\"mapped\",[4459]],[[12634,12634],\"mapped\",[4460]],[[12635,12635],\"mapped\",[4461]],[[12636,12636],\"mapped\",[4462]],[[12637,12637],\"mapped\",[4463]],[[12638,12638],\"mapped\",[4464]],[[12639,12639],\"mapped\",[4465]],[[12640,12640],\"mapped\",[4466]],[[12641,12641],\"mapped\",[4467]],[[12642,12642],\"mapped\",[4468]],[[12643,12643],\"mapped\",[4469]],[[12644,12644],\"disallowed\"],[[12645,12645],\"mapped\",[4372]],[[12646,12646],\"mapped\",[4373]],[[12647,12647],\"mapped\",[4551]],[[12648,12648],\"mapped\",[4552]],[[12649,12649],\"mapped\",[4556]],[[12650,12650],\"mapped\",[4558]],[[12651,12651],\"mapped\",[4563]],[[12652,12652],\"mapped\",[4567]],[[12653,12653],\"mapped\",[4569]],[[12654,12654],\"mapped\",[4380]],[[12655,12655],\"mapped\",[4573]],[[12656,12656],\"mapped\",[4575]],[[12657,12657],\"mapped\",[4381]],[[12658,12658],\"mapped\",[4382]],[[12659,12659],\"mapped\",[4384]],[[12660,12660],\"mapped\",[4386]],[[12661,12661],\"mapped\",[4387]],[[12662,12662],\"mapped\",[4391]],[[12663,12663],\"mapped\",[4393]],[[12664,12664],\"mapped\",[4395]],[[12665,12665],\"mapped\",[4396]],[[12666,12666],\"mapped\",[4397]],[[12667,12667],\"mapped\",[4398]],[[12668,12668],\"mapped\",[4399]],[[12669,12669],\"mapped\",[4402]],[[12670,12670],\"mapped\",[4406]],[[12671,12671],\"mapped\",[4416]],[[12672,12672],\"mapped\",[4423]],[[12673,12673],\"mapped\",[4428]],[[12674,12674],\"mapped\",[4593]],[[12675,12675],\"mapped\",[4594]],[[12676,12676],\"mapped\",[4439]],[[12677,12677],\"mapped\",[4440]],[[12678,12678],\"mapped\",[4441]],[[12679,12679],\"mapped\",[4484]],[[12680,12680],\"mapped\",[4485]],[[12681,12681],\"mapped\",[4488]],[[12682,12682],\"mapped\",[4497]],[[12683,12683],\"mapped\",[4498]],[[12684,12684],\"mapped\",[4500]],[[12685,12685],\"mapped\",[4510]],[[12686,12686],\"mapped\",[4513]],[[12687,12687],\"disallowed\"],[[12688,12689],\"valid\",[],\"NV8\"],[[12690,12690],\"mapped\",[19968]],[[12691,12691],\"mapped\",[20108]],[[12692,12692],\"mapped\",[19977]],[[12693,12693],\"mapped\",[22235]],[[12694,12694],\"mapped\",[19978]],[[12695,12695],\"mapped\",[20013]],[[12696,12696],\"mapped\",[19979]],[[12697,12697],\"mapped\",[30002]],[[12698,12698],\"mapped\",[20057]],[[12699,12699],\"mapped\",[19993]],[[12700,12700],\"mapped\",[19969]],[[12701,12701],\"mapped\",[22825]],[[12702,12702],\"mapped\",[22320]],[[12703,12703],\"mapped\",[20154]],[[12704,12727],\"valid\"],[[12728,12730],\"valid\"],[[12731,12735],\"disallowed\"],[[12736,12751],\"valid\",[],\"NV8\"],[[12752,12771],\"valid\",[],\"NV8\"],[[12772,12783],\"disallowed\"],[[12784,12799],\"valid\"],[[12800,12800],\"disallowed_STD3_mapped\",[40,4352,41]],[[12801,12801],\"disallowed_STD3_mapped\",[40,4354,41]],[[12802,12802],\"disallowed_STD3_mapped\",[40,4355,41]],[[12803,12803],\"disallowed_STD3_mapped\",[40,4357,41]],[[12804,12804],\"disallowed_STD3_mapped\",[40,4358,41]],[[12805,12805],\"disallowed_STD3_mapped\",[40,4359,41]],[[12806,12806],\"disallowed_STD3_mapped\",[40,4361,41]],[[12807,12807],\"disallowed_STD3_mapped\",[40,4363,41]],[[12808,12808],\"disallowed_STD3_mapped\",[40,4364,41]],[[12809,12809],\"disallowed_STD3_mapped\",[40,4366,41]],[[12810,12810],\"disallowed_STD3_mapped\",[40,4367,41]],[[12811,12811],\"disallowed_STD3_mapped\",[40,4368,41]],[[12812,12812],\"disallowed_STD3_mapped\",[40,4369,41]],[[12813,12813],\"disallowed_STD3_mapped\",[40,4370,41]],[[12814,12814],\"disallowed_STD3_mapped\",[40,44032,41]],[[12815,12815],\"disallowed_STD3_mapped\",[40,45208,41]],[[12816,12816],\"disallowed_STD3_mapped\",[40,45796,41]],[[12817,12817],\"disallowed_STD3_mapped\",[40,46972,41]],[[12818,12818],\"disallowed_STD3_mapped\",[40,47560,41]],[[12819,12819],\"disallowed_STD3_mapped\",[40,48148,41]],[[12820,12820],\"disallowed_STD3_mapped\",[40,49324,41]],[[12821,12821],\"disallowed_STD3_mapped\",[40,50500,41]],[[12822,12822],\"disallowed_STD3_mapped\",[40,51088,41]],[[12823,12823],\"disallowed_STD3_mapped\",[40,52264,41]],[[12824,12824],\"disallowed_STD3_mapped\",[40,52852,41]],[[12825,12825],\"disallowed_STD3_mapped\",[40,53440,41]],[[12826,12826],\"disallowed_STD3_mapped\",[40,54028,41]],[[12827,12827],\"disallowed_STD3_mapped\",[40,54616,41]],[[12828,12828],\"disallowed_STD3_mapped\",[40,51452,41]],[[12829,12829],\"disallowed_STD3_mapped\",[40,50724,51204,41]],[[12830,12830],\"disallowed_STD3_mapped\",[40,50724,54980,41]],[[12831,12831],\"disallowed\"],[[12832,12832],\"disallowed_STD3_mapped\",[40,19968,41]],[[12833,12833],\"disallowed_STD3_mapped\",[40,20108,41]],[[12834,12834],\"disallowed_STD3_mapped\",[40,19977,41]],[[12835,12835],\"disallowed_STD3_mapped\",[40,22235,41]],[[12836,12836],\"disallowed_STD3_mapped\",[40,20116,41]],[[12837,12837],\"disallowed_STD3_mapped\",[40,20845,41]],[[12838,12838],\"disallowed_STD3_mapped\",[40,19971,41]],[[12839,12839],\"disallowed_STD3_mapped\",[40,20843,41]],[[12840,12840],\"disallowed_STD3_mapped\",[40,20061,41]],[[12841,12841],\"disallowed_STD3_mapped\",[40,21313,41]],[[12842,12842],\"disallowed_STD3_mapped\",[40,26376,41]],[[12843,12843],\"disallowed_STD3_mapped\",[40,28779,41]],[[12844,12844],\"disallowed_STD3_mapped\",[40,27700,41]],[[12845,12845],\"disallowed_STD3_mapped\",[40,26408,41]],[[12846,12846],\"disallowed_STD3_mapped\",[40,37329,41]],[[12847,12847],\"disallowed_STD3_mapped\",[40,22303,41]],[[12848,12848],\"disallowed_STD3_mapped\",[40,26085,41]],[[12849,12849],\"disallowed_STD3_mapped\",[40,26666,41]],[[12850,12850],\"disallowed_STD3_mapped\",[40,26377,41]],[[12851,12851],\"disallowed_STD3_mapped\",[40,31038,41]],[[12852,12852],\"disallowed_STD3_mapped\",[40,21517,41]],[[12853,12853],\"disallowed_STD3_mapped\",[40,29305,41]],[[12854,12854],\"disallowed_STD3_mapped\",[40,36001,41]],[[12855,12855],\"disallowed_STD3_mapped\",[40,31069,41]],[[12856,12856],\"disallowed_STD3_mapped\",[40,21172,41]],[[12857,12857],\"disallowed_STD3_mapped\",[40,20195,41]],[[12858,12858],\"disallowed_STD3_mapped\",[40,21628,41]],[[12859,12859],\"disallowed_STD3_mapped\",[40,23398,41]],[[12860,12860],\"disallowed_STD3_mapped\",[40,30435,41]],[[12861,12861],\"disallowed_STD3_mapped\",[40,20225,41]],[[12862,12862],\"disallowed_STD3_mapped\",[40,36039,41]],[[12863,12863],\"disallowed_STD3_mapped\",[40,21332,41]],[[12864,12864],\"disallowed_STD3_mapped\",[40,31085,41]],[[12865,12865],\"disallowed_STD3_mapped\",[40,20241,41]],[[12866,12866],\"disallowed_STD3_mapped\",[40,33258,41]],[[12867,12867],\"disallowed_STD3_mapped\",[40,33267,41]],[[12868,12868],\"mapped\",[21839]],[[12869,12869],\"mapped\",[24188]],[[12870,12870],\"mapped\",[25991]],[[12871,12871],\"mapped\",[31631]],[[12872,12879],\"valid\",[],\"NV8\"],[[12880,12880],\"mapped\",[112,116,101]],[[12881,12881],\"mapped\",[50,49]],[[12882,12882],\"mapped\",[50,50]],[[12883,12883],\"mapped\",[50,51]],[[12884,12884],\"mapped\",[50,52]],[[12885,12885],\"mapped\",[50,53]],[[12886,12886],\"mapped\",[50,54]],[[12887,12887],\"mapped\",[50,55]],[[12888,12888],\"mapped\",[50,56]],[[12889,12889],\"mapped\",[50,57]],[[12890,12890],\"mapped\",[51,48]],[[12891,12891],\"mapped\",[51,49]],[[12892,12892],\"mapped\",[51,50]],[[12893,12893],\"mapped\",[51,51]],[[12894,12894],\"mapped\",[51,52]],[[12895,12895],\"mapped\",[51,53]],[[12896,12896],\"mapped\",[4352]],[[12897,12897],\"mapped\",[4354]],[[12898,12898],\"mapped\",[4355]],[[12899,12899],\"mapped\",[4357]],[[12900,12900],\"mapped\",[4358]],[[12901,12901],\"mapped\",[4359]],[[12902,12902],\"mapped\",[4361]],[[12903,12903],\"mapped\",[4363]],[[12904,12904],\"mapped\",[4364]],[[12905,12905],\"mapped\",[4366]],[[12906,12906],\"mapped\",[4367]],[[12907,12907],\"mapped\",[4368]],[[12908,12908],\"mapped\",[4369]],[[12909,12909],\"mapped\",[4370]],[[12910,12910],\"mapped\",[44032]],[[12911,12911],\"mapped\",[45208]],[[12912,12912],\"mapped\",[45796]],[[12913,12913],\"mapped\",[46972]],[[12914,12914],\"mapped\",[47560]],[[12915,12915],\"mapped\",[48148]],[[12916,12916],\"mapped\",[49324]],[[12917,12917],\"mapped\",[50500]],[[12918,12918],\"mapped\",[51088]],[[12919,12919],\"mapped\",[52264]],[[12920,12920],\"mapped\",[52852]],[[12921,12921],\"mapped\",[53440]],[[12922,12922],\"mapped\",[54028]],[[12923,12923],\"mapped\",[54616]],[[12924,12924],\"mapped\",[52280,44256]],[[12925,12925],\"mapped\",[51452,51032]],[[12926,12926],\"mapped\",[50864]],[[12927,12927],\"valid\",[],\"NV8\"],[[12928,12928],\"mapped\",[19968]],[[12929,12929],\"mapped\",[20108]],[[12930,12930],\"mapped\",[19977]],[[12931,12931],\"mapped\",[22235]],[[12932,12932],\"mapped\",[20116]],[[12933,12933],\"mapped\",[20845]],[[12934,12934],\"mapped\",[19971]],[[12935,12935],\"mapped\",[20843]],[[12936,12936],\"mapped\",[20061]],[[12937,12937],\"mapped\",[21313]],[[12938,12938],\"mapped\",[26376]],[[12939,12939],\"mapped\",[28779]],[[12940,12940],\"mapped\",[27700]],[[12941,12941],\"mapped\",[26408]],[[12942,12942],\"mapped\",[37329]],[[12943,12943],\"mapped\",[22303]],[[12944,12944],\"mapped\",[26085]],[[12945,12945],\"mapped\",[26666]],[[12946,12946],\"mapped\",[26377]],[[12947,12947],\"mapped\",[31038]],[[12948,12948],\"mapped\",[21517]],[[12949,12949],\"mapped\",[29305]],[[12950,12950],\"mapped\",[36001]],[[12951,12951],\"mapped\",[31069]],[[12952,12952],\"mapped\",[21172]],[[12953,12953],\"mapped\",[31192]],[[12954,12954],\"mapped\",[30007]],[[12955,12955],\"mapped\",[22899]],[[12956,12956],\"mapped\",[36969]],[[12957,12957],\"mapped\",[20778]],[[12958,12958],\"mapped\",[21360]],[[12959,12959],\"mapped\",[27880]],[[12960,12960],\"mapped\",[38917]],[[12961,12961],\"mapped\",[20241]],[[12962,12962],\"mapped\",[20889]],[[12963,12963],\"mapped\",[27491]],[[12964,12964],\"mapped\",[19978]],[[12965,12965],\"mapped\",[20013]],[[12966,12966],\"mapped\",[19979]],[[12967,12967],\"mapped\",[24038]],[[12968,12968],\"mapped\",[21491]],[[12969,12969],\"mapped\",[21307]],[[12970,12970],\"mapped\",[23447]],[[12971,12971],\"mapped\",[23398]],[[12972,12972],\"mapped\",[30435]],[[12973,12973],\"mapped\",[20225]],[[12974,12974],\"mapped\",[36039]],[[12975,12975],\"mapped\",[21332]],[[12976,12976],\"mapped\",[22812]],[[12977,12977],\"mapped\",[51,54]],[[12978,12978],\"mapped\",[51,55]],[[12979,12979],\"mapped\",[51,56]],[[12980,12980],\"mapped\",[51,57]],[[12981,12981],\"mapped\",[52,48]],[[12982,12982],\"mapped\",[52,49]],[[12983,12983],\"mapped\",[52,50]],[[12984,12984],\"mapped\",[52,51]],[[12985,12985],\"mapped\",[52,52]],[[12986,12986],\"mapped\",[52,53]],[[12987,12987],\"mapped\",[52,54]],[[12988,12988],\"mapped\",[52,55]],[[12989,12989],\"mapped\",[52,56]],[[12990,12990],\"mapped\",[52,57]],[[12991,12991],\"mapped\",[53,48]],[[12992,12992],\"mapped\",[49,26376]],[[12993,12993],\"mapped\",[50,26376]],[[12994,12994],\"mapped\",[51,26376]],[[12995,12995],\"mapped\",[52,26376]],[[12996,12996],\"mapped\",[53,26376]],[[12997,12997],\"mapped\",[54,26376]],[[12998,12998],\"mapped\",[55,26376]],[[12999,12999],\"mapped\",[56,26376]],[[13000,13000],\"mapped\",[57,26376]],[[13001,13001],\"mapped\",[49,48,26376]],[[13002,13002],\"mapped\",[49,49,26376]],[[13003,13003],\"mapped\",[49,50,26376]],[[13004,13004],\"mapped\",[104,103]],[[13005,13005],\"mapped\",[101,114,103]],[[13006,13006],\"mapped\",[101,118]],[[13007,13007],\"mapped\",[108,116,100]],[[13008,13008],\"mapped\",[12450]],[[13009,13009],\"mapped\",[12452]],[[13010,13010],\"mapped\",[12454]],[[13011,13011],\"mapped\",[12456]],[[13012,13012],\"mapped\",[12458]],[[13013,13013],\"mapped\",[12459]],[[13014,13014],\"mapped\",[12461]],[[13015,13015],\"mapped\",[12463]],[[13016,13016],\"mapped\",[12465]],[[13017,13017],\"mapped\",[12467]],[[13018,13018],\"mapped\",[12469]],[[13019,13019],\"mapped\",[12471]],[[13020,13020],\"mapped\",[12473]],[[13021,13021],\"mapped\",[12475]],[[13022,13022],\"mapped\",[12477]],[[13023,13023],\"mapped\",[12479]],[[13024,13024],\"mapped\",[12481]],[[13025,13025],\"mapped\",[12484]],[[13026,13026],\"mapped\",[12486]],[[13027,13027],\"mapped\",[12488]],[[13028,13028],\"mapped\",[12490]],[[13029,13029],\"mapped\",[12491]],[[13030,13030],\"mapped\",[12492]],[[13031,13031],\"mapped\",[12493]],[[13032,13032],\"mapped\",[12494]],[[13033,13033],\"mapped\",[12495]],[[13034,13034],\"mapped\",[12498]],[[13035,13035],\"mapped\",[12501]],[[13036,13036],\"mapped\",[12504]],[[13037,13037],\"mapped\",[12507]],[[13038,13038],\"mapped\",[12510]],[[13039,13039],\"mapped\",[12511]],[[13040,13040],\"mapped\",[12512]],[[13041,13041],\"mapped\",[12513]],[[13042,13042],\"mapped\",[12514]],[[13043,13043],\"mapped\",[12516]],[[13044,13044],\"mapped\",[12518]],[[13045,13045],\"mapped\",[12520]],[[13046,13046],\"mapped\",[12521]],[[13047,13047],\"mapped\",[12522]],[[13048,13048],\"mapped\",[12523]],[[13049,13049],\"mapped\",[12524]],[[13050,13050],\"mapped\",[12525]],[[13051,13051],\"mapped\",[12527]],[[13052,13052],\"mapped\",[12528]],[[13053,13053],\"mapped\",[12529]],[[13054,13054],\"mapped\",[12530]],[[13055,13055],\"disallowed\"],[[13056,13056],\"mapped\",[12450,12497,12540,12488]],[[13057,13057],\"mapped\",[12450,12523,12501,12449]],[[13058,13058],\"mapped\",[12450,12531,12506,12450]],[[13059,13059],\"mapped\",[12450,12540,12523]],[[13060,13060],\"mapped\",[12452,12491,12531,12464]],[[13061,13061],\"mapped\",[12452,12531,12481]],[[13062,13062],\"mapped\",[12454,12457,12531]],[[13063,13063],\"mapped\",[12456,12473,12463,12540,12489]],[[13064,13064],\"mapped\",[12456,12540,12459,12540]],[[13065,13065],\"mapped\",[12458,12531,12473]],[[13066,13066],\"mapped\",[12458,12540,12512]],[[13067,13067],\"mapped\",[12459,12452,12522]],[[13068,13068],\"mapped\",[12459,12521,12483,12488]],[[13069,13069],\"mapped\",[12459,12525,12522,12540]],[[13070,13070],\"mapped\",[12460,12525,12531]],[[13071,13071],\"mapped\",[12460,12531,12510]],[[13072,13072],\"mapped\",[12462,12460]],[[13073,13073],\"mapped\",[12462,12491,12540]],[[13074,13074],\"mapped\",[12461,12517,12522,12540]],[[13075,13075],\"mapped\",[12462,12523,12480,12540]],[[13076,13076],\"mapped\",[12461,12525]],[[13077,13077],\"mapped\",[12461,12525,12464,12521,12512]],[[13078,13078],\"mapped\",[12461,12525,12513,12540,12488,12523]],[[13079,13079],\"mapped\",[12461,12525,12527,12483,12488]],[[13080,13080],\"mapped\",[12464,12521,12512]],[[13081,13081],\"mapped\",[12464,12521,12512,12488,12531]],[[13082,13082],\"mapped\",[12463,12523,12476,12452,12525]],[[13083,13083],\"mapped\",[12463,12525,12540,12493]],[[13084,13084],\"mapped\",[12465,12540,12473]],[[13085,13085],\"mapped\",[12467,12523,12490]],[[13086,13086],\"mapped\",[12467,12540,12509]],[[13087,13087],\"mapped\",[12469,12452,12463,12523]],[[13088,13088],\"mapped\",[12469,12531,12481,12540,12512]],[[13089,13089],\"mapped\",[12471,12522,12531,12464]],[[13090,13090],\"mapped\",[12475,12531,12481]],[[13091,13091],\"mapped\",[12475,12531,12488]],[[13092,13092],\"mapped\",[12480,12540,12473]],[[13093,13093],\"mapped\",[12487,12471]],[[13094,13094],\"mapped\",[12489,12523]],[[13095,13095],\"mapped\",[12488,12531]],[[13096,13096],\"mapped\",[12490,12494]],[[13097,13097],\"mapped\",[12494,12483,12488]],[[13098,13098],\"mapped\",[12495,12452,12484]],[[13099,13099],\"mapped\",[12497,12540,12475,12531,12488]],[[13100,13100],\"mapped\",[12497,12540,12484]],[[13101,13101],\"mapped\",[12496,12540,12524,12523]],[[13102,13102],\"mapped\",[12500,12450,12473,12488,12523]],[[13103,13103],\"mapped\",[12500,12463,12523]],[[13104,13104],\"mapped\",[12500,12467]],[[13105,13105],\"mapped\",[12499,12523]],[[13106,13106],\"mapped\",[12501,12449,12521,12483,12489]],[[13107,13107],\"mapped\",[12501,12451,12540,12488]],[[13108,13108],\"mapped\",[12502,12483,12471,12455,12523]],[[13109,13109],\"mapped\",[12501,12521,12531]],[[13110,13110],\"mapped\",[12504,12463,12479,12540,12523]],[[13111,13111],\"mapped\",[12506,12477]],[[13112,13112],\"mapped\",[12506,12491,12498]],[[13113,13113],\"mapped\",[12504,12523,12484]],[[13114,13114],\"mapped\",[12506,12531,12473]],[[13115,13115],\"mapped\",[12506,12540,12472]],[[13116,13116],\"mapped\",[12505,12540,12479]],[[13117,13117],\"mapped\",[12509,12452,12531,12488]],[[13118,13118],\"mapped\",[12508,12523,12488]],[[13119,13119],\"mapped\",[12507,12531]],[[13120,13120],\"mapped\",[12509,12531,12489]],[[13121,13121],\"mapped\",[12507,12540,12523]],[[13122,13122],\"mapped\",[12507,12540,12531]],[[13123,13123],\"mapped\",[12510,12452,12463,12525]],[[13124,13124],\"mapped\",[12510,12452,12523]],[[13125,13125],\"mapped\",[12510,12483,12495]],[[13126,13126],\"mapped\",[12510,12523,12463]],[[13127,13127],\"mapped\",[12510,12531,12471,12519,12531]],[[13128,13128],\"mapped\",[12511,12463,12525,12531]],[[13129,13129],\"mapped\",[12511,12522]],[[13130,13130],\"mapped\",[12511,12522,12496,12540,12523]],[[13131,13131],\"mapped\",[12513,12460]],[[13132,13132],\"mapped\",[12513,12460,12488,12531]],[[13133,13133],\"mapped\",[12513,12540,12488,12523]],[[13134,13134],\"mapped\",[12516,12540,12489]],[[13135,13135],\"mapped\",[12516,12540,12523]],[[13136,13136],\"mapped\",[12518,12450,12531]],[[13137,13137],\"mapped\",[12522,12483,12488,12523]],[[13138,13138],\"mapped\",[12522,12521]],[[13139,13139],\"mapped\",[12523,12500,12540]],[[13140,13140],\"mapped\",[12523,12540,12502,12523]],[[13141,13141],\"mapped\",[12524,12512]],[[13142,13142],\"mapped\",[12524,12531,12488,12466,12531]],[[13143,13143],\"mapped\",[12527,12483,12488]],[[13144,13144],\"mapped\",[48,28857]],[[13145,13145],\"mapped\",[49,28857]],[[13146,13146],\"mapped\",[50,28857]],[[13147,13147],\"mapped\",[51,28857]],[[13148,13148],\"mapped\",[52,28857]],[[13149,13149],\"mapped\",[53,28857]],[[13150,13150],\"mapped\",[54,28857]],[[13151,13151],\"mapped\",[55,28857]],[[13152,13152],\"mapped\",[56,28857]],[[13153,13153],\"mapped\",[57,28857]],[[13154,13154],\"mapped\",[49,48,28857]],[[13155,13155],\"mapped\",[49,49,28857]],[[13156,13156],\"mapped\",[49,50,28857]],[[13157,13157],\"mapped\",[49,51,28857]],[[13158,13158],\"mapped\",[49,52,28857]],[[13159,13159],\"mapped\",[49,53,28857]],[[13160,13160],\"mapped\",[49,54,28857]],[[13161,13161],\"mapped\",[49,55,28857]],[[13162,13162],\"mapped\",[49,56,28857]],[[13163,13163],\"mapped\",[49,57,28857]],[[13164,13164],\"mapped\",[50,48,28857]],[[13165,13165],\"mapped\",[50,49,28857]],[[13166,13166],\"mapped\",[50,50,28857]],[[13167,13167],\"mapped\",[50,51,28857]],[[13168,13168],\"mapped\",[50,52,28857]],[[13169,13169],\"mapped\",[104,112,97]],[[13170,13170],\"mapped\",[100,97]],[[13171,13171],\"mapped\",[97,117]],[[13172,13172],\"mapped\",[98,97,114]],[[13173,13173],\"mapped\",[111,118]],[[13174,13174],\"mapped\",[112,99]],[[13175,13175],\"mapped\",[100,109]],[[13176,13176],\"mapped\",[100,109,50]],[[13177,13177],\"mapped\",[100,109,51]],[[13178,13178],\"mapped\",[105,117]],[[13179,13179],\"mapped\",[24179,25104]],[[13180,13180],\"mapped\",[26157,21644]],[[13181,13181],\"mapped\",[22823,27491]],[[13182,13182],\"mapped\",[26126,27835]],[[13183,13183],\"mapped\",[26666,24335,20250,31038]],[[13184,13184],\"mapped\",[112,97]],[[13185,13185],\"mapped\",[110,97]],[[13186,13186],\"mapped\",[956,97]],[[13187,13187],\"mapped\",[109,97]],[[13188,13188],\"mapped\",[107,97]],[[13189,13189],\"mapped\",[107,98]],[[13190,13190],\"mapped\",[109,98]],[[13191,13191],\"mapped\",[103,98]],[[13192,13192],\"mapped\",[99,97,108]],[[13193,13193],\"mapped\",[107,99,97,108]],[[13194,13194],\"mapped\",[112,102]],[[13195,13195],\"mapped\",[110,102]],[[13196,13196],\"mapped\",[956,102]],[[13197,13197],\"mapped\",[956,103]],[[13198,13198],\"mapped\",[109,103]],[[13199,13199],\"mapped\",[107,103]],[[13200,13200],\"mapped\",[104,122]],[[13201,13201],\"mapped\",[107,104,122]],[[13202,13202],\"mapped\",[109,104,122]],[[13203,13203],\"mapped\",[103,104,122]],[[13204,13204],\"mapped\",[116,104,122]],[[13205,13205],\"mapped\",[956,108]],[[13206,13206],\"mapped\",[109,108]],[[13207,13207],\"mapped\",[100,108]],[[13208,13208],\"mapped\",[107,108]],[[13209,13209],\"mapped\",[102,109]],[[13210,13210],\"mapped\",[110,109]],[[13211,13211],\"mapped\",[956,109]],[[13212,13212],\"mapped\",[109,109]],[[13213,13213],\"mapped\",[99,109]],[[13214,13214],\"mapped\",[107,109]],[[13215,13215],\"mapped\",[109,109,50]],[[13216,13216],\"mapped\",[99,109,50]],[[13217,13217],\"mapped\",[109,50]],[[13218,13218],\"mapped\",[107,109,50]],[[13219,13219],\"mapped\",[109,109,51]],[[13220,13220],\"mapped\",[99,109,51]],[[13221,13221],\"mapped\",[109,51]],[[13222,13222],\"mapped\",[107,109,51]],[[13223,13223],\"mapped\",[109,8725,115]],[[13224,13224],\"mapped\",[109,8725,115,50]],[[13225,13225],\"mapped\",[112,97]],[[13226,13226],\"mapped\",[107,112,97]],[[13227,13227],\"mapped\",[109,112,97]],[[13228,13228],\"mapped\",[103,112,97]],[[13229,13229],\"mapped\",[114,97,100]],[[13230,13230],\"mapped\",[114,97,100,8725,115]],[[13231,13231],\"mapped\",[114,97,100,8725,115,50]],[[13232,13232],\"mapped\",[112,115]],[[13233,13233],\"mapped\",[110,115]],[[13234,13234],\"mapped\",[956,115]],[[13235,13235],\"mapped\",[109,115]],[[13236,13236],\"mapped\",[112,118]],[[13237,13237],\"mapped\",[110,118]],[[13238,13238],\"mapped\",[956,118]],[[13239,13239],\"mapped\",[109,118]],[[13240,13240],\"mapped\",[107,118]],[[13241,13241],\"mapped\",[109,118]],[[13242,13242],\"mapped\",[112,119]],[[13243,13243],\"mapped\",[110,119]],[[13244,13244],\"mapped\",[956,119]],[[13245,13245],\"mapped\",[109,119]],[[13246,13246],\"mapped\",[107,119]],[[13247,13247],\"mapped\",[109,119]],[[13248,13248],\"mapped\",[107,969]],[[13249,13249],\"mapped\",[109,969]],[[13250,13250],\"disallowed\"],[[13251,13251],\"mapped\",[98,113]],[[13252,13252],\"mapped\",[99,99]],[[13253,13253],\"mapped\",[99,100]],[[13254,13254],\"mapped\",[99,8725,107,103]],[[13255,13255],\"disallowed\"],[[13256,13256],\"mapped\",[100,98]],[[13257,13257],\"mapped\",[103,121]],[[13258,13258],\"mapped\",[104,97]],[[13259,13259],\"mapped\",[104,112]],[[13260,13260],\"mapped\",[105,110]],[[13261,13261],\"mapped\",[107,107]],[[13262,13262],\"mapped\",[107,109]],[[13263,13263],\"mapped\",[107,116]],[[13264,13264],\"mapped\",[108,109]],[[13265,13265],\"mapped\",[108,110]],[[13266,13266],\"mapped\",[108,111,103]],[[13267,13267],\"mapped\",[108,120]],[[13268,13268],\"mapped\",[109,98]],[[13269,13269],\"mapped\",[109,105,108]],[[13270,13270],\"mapped\",[109,111,108]],[[13271,13271],\"mapped\",[112,104]],[[13272,13272],\"disallowed\"],[[13273,13273],\"mapped\",[112,112,109]],[[13274,13274],\"mapped\",[112,114]],[[13275,13275],\"mapped\",[115,114]],[[13276,13276],\"mapped\",[115,118]],[[13277,13277],\"mapped\",[119,98]],[[13278,13278],\"mapped\",[118,8725,109]],[[13279,13279],\"mapped\",[97,8725,109]],[[13280,13280],\"mapped\",[49,26085]],[[13281,13281],\"mapped\",[50,26085]],[[13282,13282],\"mapped\",[51,26085]],[[13283,13283],\"mapped\",[52,26085]],[[13284,13284],\"mapped\",[53,26085]],[[13285,13285],\"mapped\",[54,26085]],[[13286,13286],\"mapped\",[55,26085]],[[13287,13287],\"mapped\",[56,26085]],[[13288,13288],\"mapped\",[57,26085]],[[13289,13289],\"mapped\",[49,48,26085]],[[13290,13290],\"mapped\",[49,49,26085]],[[13291,13291],\"mapped\",[49,50,26085]],[[13292,13292],\"mapped\",[49,51,26085]],[[13293,13293],\"mapped\",[49,52,26085]],[[13294,13294],\"mapped\",[49,53,26085]],[[13295,13295],\"mapped\",[49,54,26085]],[[13296,13296],\"mapped\",[49,55,26085]],[[13297,13297],\"mapped\",[49,56,26085]],[[13298,13298],\"mapped\",[49,57,26085]],[[13299,13299],\"mapped\",[50,48,26085]],[[13300,13300],\"mapped\",[50,49,26085]],[[13301,13301],\"mapped\",[50,50,26085]],[[13302,13302],\"mapped\",[50,51,26085]],[[13303,13303],\"mapped\",[50,52,26085]],[[13304,13304],\"mapped\",[50,53,26085]],[[13305,13305],\"mapped\",[50,54,26085]],[[13306,13306],\"mapped\",[50,55,26085]],[[13307,13307],\"mapped\",[50,56,26085]],[[13308,13308],\"mapped\",[50,57,26085]],[[13309,13309],\"mapped\",[51,48,26085]],[[13310,13310],\"mapped\",[51,49,26085]],[[13311,13311],\"mapped\",[103,97,108]],[[13312,19893],\"valid\"],[[19894,19903],\"disallowed\"],[[19904,19967],\"valid\",[],\"NV8\"],[[19968,40869],\"valid\"],[[40870,40891],\"valid\"],[[40892,40899],\"valid\"],[[40900,40907],\"valid\"],[[40908,40908],\"valid\"],[[40909,40917],\"valid\"],[[40918,40959],\"disallowed\"],[[40960,42124],\"valid\"],[[42125,42127],\"disallowed\"],[[42128,42145],\"valid\",[],\"NV8\"],[[42146,42147],\"valid\",[],\"NV8\"],[[42148,42163],\"valid\",[],\"NV8\"],[[42164,42164],\"valid\",[],\"NV8\"],[[42165,42176],\"valid\",[],\"NV8\"],[[42177,42177],\"valid\",[],\"NV8\"],[[42178,42180],\"valid\",[],\"NV8\"],[[42181,42181],\"valid\",[],\"NV8\"],[[42182,42182],\"valid\",[],\"NV8\"],[[42183,42191],\"disallowed\"],[[42192,42237],\"valid\"],[[42238,42239],\"valid\",[],\"NV8\"],[[42240,42508],\"valid\"],[[42509,42511],\"valid\",[],\"NV8\"],[[42512,42539],\"valid\"],[[42540,42559],\"disallowed\"],[[42560,42560],\"mapped\",[42561]],[[42561,42561],\"valid\"],[[42562,42562],\"mapped\",[42563]],[[42563,42563],\"valid\"],[[42564,42564],\"mapped\",[42565]],[[42565,42565],\"valid\"],[[42566,42566],\"mapped\",[42567]],[[42567,42567],\"valid\"],[[42568,42568],\"mapped\",[42569]],[[42569,42569],\"valid\"],[[42570,42570],\"mapped\",[42571]],[[42571,42571],\"valid\"],[[42572,42572],\"mapped\",[42573]],[[42573,42573],\"valid\"],[[42574,42574],\"mapped\",[42575]],[[42575,42575],\"valid\"],[[42576,42576],\"mapped\",[42577]],[[42577,42577],\"valid\"],[[42578,42578],\"mapped\",[42579]],[[42579,42579],\"valid\"],[[42580,42580],\"mapped\",[42581]],[[42581,42581],\"valid\"],[[42582,42582],\"mapped\",[42583]],[[42583,42583],\"valid\"],[[42584,42584],\"mapped\",[42585]],[[42585,42585],\"valid\"],[[42586,42586],\"mapped\",[42587]],[[42587,42587],\"valid\"],[[42588,42588],\"mapped\",[42589]],[[42589,42589],\"valid\"],[[42590,42590],\"mapped\",[42591]],[[42591,42591],\"valid\"],[[42592,42592],\"mapped\",[42593]],[[42593,42593],\"valid\"],[[42594,42594],\"mapped\",[42595]],[[42595,42595],\"valid\"],[[42596,42596],\"mapped\",[42597]],[[42597,42597],\"valid\"],[[42598,42598],\"mapped\",[42599]],[[42599,42599],\"valid\"],[[42600,42600],\"mapped\",[42601]],[[42601,42601],\"valid\"],[[42602,42602],\"mapped\",[42603]],[[42603,42603],\"valid\"],[[42604,42604],\"mapped\",[42605]],[[42605,42607],\"valid\"],[[42608,42611],\"valid\",[],\"NV8\"],[[42612,42619],\"valid\"],[[42620,42621],\"valid\"],[[42622,42622],\"valid\",[],\"NV8\"],[[42623,42623],\"valid\"],[[42624,42624],\"mapped\",[42625]],[[42625,42625],\"valid\"],[[42626,42626],\"mapped\",[42627]],[[42627,42627],\"valid\"],[[42628,42628],\"mapped\",[42629]],[[42629,42629],\"valid\"],[[42630,42630],\"mapped\",[42631]],[[42631,42631],\"valid\"],[[42632,42632],\"mapped\",[42633]],[[42633,42633],\"valid\"],[[42634,42634],\"mapped\",[42635]],[[42635,42635],\"valid\"],[[42636,42636],\"mapped\",[42637]],[[42637,42637],\"valid\"],[[42638,42638],\"mapped\",[42639]],[[42639,42639],\"valid\"],[[42640,42640],\"mapped\",[42641]],[[42641,42641],\"valid\"],[[42642,42642],\"mapped\",[42643]],[[42643,42643],\"valid\"],[[42644,42644],\"mapped\",[42645]],[[42645,42645],\"valid\"],[[42646,42646],\"mapped\",[42647]],[[42647,42647],\"valid\"],[[42648,42648],\"mapped\",[42649]],[[42649,42649],\"valid\"],[[42650,42650],\"mapped\",[42651]],[[42651,42651],\"valid\"],[[42652,42652],\"mapped\",[1098]],[[42653,42653],\"mapped\",[1100]],[[42654,42654],\"valid\"],[[42655,42655],\"valid\"],[[42656,42725],\"valid\"],[[42726,42735],\"valid\",[],\"NV8\"],[[42736,42737],\"valid\"],[[42738,42743],\"valid\",[],\"NV8\"],[[42744,42751],\"disallowed\"],[[42752,42774],\"valid\",[],\"NV8\"],[[42775,42778],\"valid\"],[[42779,42783],\"valid\"],[[42784,42785],\"valid\",[],\"NV8\"],[[42786,42786],\"mapped\",[42787]],[[42787,42787],\"valid\"],[[42788,42788],\"mapped\",[42789]],[[42789,42789],\"valid\"],[[42790,42790],\"mapped\",[42791]],[[42791,42791],\"valid\"],[[42792,42792],\"mapped\",[42793]],[[42793,42793],\"valid\"],[[42794,42794],\"mapped\",[42795]],[[42795,42795],\"valid\"],[[42796,42796],\"mapped\",[42797]],[[42797,42797],\"valid\"],[[42798,42798],\"mapped\",[42799]],[[42799,42801],\"valid\"],[[42802,42802],\"mapped\",[42803]],[[42803,42803],\"valid\"],[[42804,42804],\"mapped\",[42805]],[[42805,42805],\"valid\"],[[42806,42806],\"mapped\",[42807]],[[42807,42807],\"valid\"],[[42808,42808],\"mapped\",[42809]],[[42809,42809],\"valid\"],[[42810,42810],\"mapped\",[42811]],[[42811,42811],\"valid\"],[[42812,42812],\"mapped\",[42813]],[[42813,42813],\"valid\"],[[42814,42814],\"mapped\",[42815]],[[42815,42815],\"valid\"],[[42816,42816],\"mapped\",[42817]],[[42817,42817],\"valid\"],[[42818,42818],\"mapped\",[42819]],[[42819,42819],\"valid\"],[[42820,42820],\"mapped\",[42821]],[[42821,42821],\"valid\"],[[42822,42822],\"mapped\",[42823]],[[42823,42823],\"valid\"],[[42824,42824],\"mapped\",[42825]],[[42825,42825],\"valid\"],[[42826,42826],\"mapped\",[42827]],[[42827,42827],\"valid\"],[[42828,42828],\"mapped\",[42829]],[[42829,42829],\"valid\"],[[42830,42830],\"mapped\",[42831]],[[42831,42831],\"valid\"],[[42832,42832],\"mapped\",[42833]],[[42833,42833],\"valid\"],[[42834,42834],\"mapped\",[42835]],[[42835,42835],\"valid\"],[[42836,42836],\"mapped\",[42837]],[[42837,42837],\"valid\"],[[42838,42838],\"mapped\",[42839]],[[42839,42839],\"valid\"],[[42840,42840],\"mapped\",[42841]],[[42841,42841],\"valid\"],[[42842,42842],\"mapped\",[42843]],[[42843,42843],\"valid\"],[[42844,42844],\"mapped\",[42845]],[[42845,42845],\"valid\"],[[42846,42846],\"mapped\",[42847]],[[42847,42847],\"valid\"],[[42848,42848],\"mapped\",[42849]],[[42849,42849],\"valid\"],[[42850,42850],\"mapped\",[42851]],[[42851,42851],\"valid\"],[[42852,42852],\"mapped\",[42853]],[[42853,42853],\"valid\"],[[42854,42854],\"mapped\",[42855]],[[42855,42855],\"valid\"],[[42856,42856],\"mapped\",[42857]],[[42857,42857],\"valid\"],[[42858,42858],\"mapped\",[42859]],[[42859,42859],\"valid\"],[[42860,42860],\"mapped\",[42861]],[[42861,42861],\"valid\"],[[42862,42862],\"mapped\",[42863]],[[42863,42863],\"valid\"],[[42864,42864],\"mapped\",[42863]],[[42865,42872],\"valid\"],[[42873,42873],\"mapped\",[42874]],[[42874,42874],\"valid\"],[[42875,42875],\"mapped\",[42876]],[[42876,42876],\"valid\"],[[42877,42877],\"mapped\",[7545]],[[42878,42878],\"mapped\",[42879]],[[42879,42879],\"valid\"],[[42880,42880],\"mapped\",[42881]],[[42881,42881],\"valid\"],[[42882,42882],\"mapped\",[42883]],[[42883,42883],\"valid\"],[[42884,42884],\"mapped\",[42885]],[[42885,42885],\"valid\"],[[42886,42886],\"mapped\",[42887]],[[42887,42888],\"valid\"],[[42889,42890],\"valid\",[],\"NV8\"],[[42891,42891],\"mapped\",[42892]],[[42892,42892],\"valid\"],[[42893,42893],\"mapped\",[613]],[[42894,42894],\"valid\"],[[42895,42895],\"valid\"],[[42896,42896],\"mapped\",[42897]],[[42897,42897],\"valid\"],[[42898,42898],\"mapped\",[42899]],[[42899,42899],\"valid\"],[[42900,42901],\"valid\"],[[42902,42902],\"mapped\",[42903]],[[42903,42903],\"valid\"],[[42904,42904],\"mapped\",[42905]],[[42905,42905],\"valid\"],[[42906,42906],\"mapped\",[42907]],[[42907,42907],\"valid\"],[[42908,42908],\"mapped\",[42909]],[[42909,42909],\"valid\"],[[42910,42910],\"mapped\",[42911]],[[42911,42911],\"valid\"],[[42912,42912],\"mapped\",[42913]],[[42913,42913],\"valid\"],[[42914,42914],\"mapped\",[42915]],[[42915,42915],\"valid\"],[[42916,42916],\"mapped\",[42917]],[[42917,42917],\"valid\"],[[42918,42918],\"mapped\",[42919]],[[42919,42919],\"valid\"],[[42920,42920],\"mapped\",[42921]],[[42921,42921],\"valid\"],[[42922,42922],\"mapped\",[614]],[[42923,42923],\"mapped\",[604]],[[42924,42924],\"mapped\",[609]],[[42925,42925],\"mapped\",[620]],[[42926,42927],\"disallowed\"],[[42928,42928],\"mapped\",[670]],[[42929,42929],\"mapped\",[647]],[[42930,42930],\"mapped\",[669]],[[42931,42931],\"mapped\",[43859]],[[42932,42932],\"mapped\",[42933]],[[42933,42933],\"valid\"],[[42934,42934],\"mapped\",[42935]],[[42935,42935],\"valid\"],[[42936,42998],\"disallowed\"],[[42999,42999],\"valid\"],[[43000,43000],\"mapped\",[295]],[[43001,43001],\"mapped\",[339]],[[43002,43002],\"valid\"],[[43003,43007],\"valid\"],[[43008,43047],\"valid\"],[[43048,43051],\"valid\",[],\"NV8\"],[[43052,43055],\"disallowed\"],[[43056,43065],\"valid\",[],\"NV8\"],[[43066,43071],\"disallowed\"],[[43072,43123],\"valid\"],[[43124,43127],\"valid\",[],\"NV8\"],[[43128,43135],\"disallowed\"],[[43136,43204],\"valid\"],[[43205,43213],\"disallowed\"],[[43214,43215],\"valid\",[],\"NV8\"],[[43216,43225],\"valid\"],[[43226,43231],\"disallowed\"],[[43232,43255],\"valid\"],[[43256,43258],\"valid\",[],\"NV8\"],[[43259,43259],\"valid\"],[[43260,43260],\"valid\",[],\"NV8\"],[[43261,43261],\"valid\"],[[43262,43263],\"disallowed\"],[[43264,43309],\"valid\"],[[43310,43311],\"valid\",[],\"NV8\"],[[43312,43347],\"valid\"],[[43348,43358],\"disallowed\"],[[43359,43359],\"valid\",[],\"NV8\"],[[43360,43388],\"valid\",[],\"NV8\"],[[43389,43391],\"disallowed\"],[[43392,43456],\"valid\"],[[43457,43469],\"valid\",[],\"NV8\"],[[43470,43470],\"disallowed\"],[[43471,43481],\"valid\"],[[43482,43485],\"disallowed\"],[[43486,43487],\"valid\",[],\"NV8\"],[[43488,43518],\"valid\"],[[43519,43519],\"disallowed\"],[[43520,43574],\"valid\"],[[43575,43583],\"disallowed\"],[[43584,43597],\"valid\"],[[43598,43599],\"disallowed\"],[[43600,43609],\"valid\"],[[43610,43611],\"disallowed\"],[[43612,43615],\"valid\",[],\"NV8\"],[[43616,43638],\"valid\"],[[43639,43641],\"valid\",[],\"NV8\"],[[43642,43643],\"valid\"],[[43644,43647],\"valid\"],[[43648,43714],\"valid\"],[[43715,43738],\"disallowed\"],[[43739,43741],\"valid\"],[[43742,43743],\"valid\",[],\"NV8\"],[[43744,43759],\"valid\"],[[43760,43761],\"valid\",[],\"NV8\"],[[43762,43766],\"valid\"],[[43767,43776],\"disallowed\"],[[43777,43782],\"valid\"],[[43783,43784],\"disallowed\"],[[43785,43790],\"valid\"],[[43791,43792],\"disallowed\"],[[43793,43798],\"valid\"],[[43799,43807],\"disallowed\"],[[43808,43814],\"valid\"],[[43815,43815],\"disallowed\"],[[43816,43822],\"valid\"],[[43823,43823],\"disallowed\"],[[43824,43866],\"valid\"],[[43867,43867],\"valid\",[],\"NV8\"],[[43868,43868],\"mapped\",[42791]],[[43869,43869],\"mapped\",[43831]],[[43870,43870],\"mapped\",[619]],[[43871,43871],\"mapped\",[43858]],[[43872,43875],\"valid\"],[[43876,43877],\"valid\"],[[43878,43887],\"disallowed\"],[[43888,43888],\"mapped\",[5024]],[[43889,43889],\"mapped\",[5025]],[[43890,43890],\"mapped\",[5026]],[[43891,43891],\"mapped\",[5027]],[[43892,43892],\"mapped\",[5028]],[[43893,43893],\"mapped\",[5029]],[[43894,43894],\"mapped\",[5030]],[[43895,43895],\"mapped\",[5031]],[[43896,43896],\"mapped\",[5032]],[[43897,43897],\"mapped\",[5033]],[[43898,43898],\"mapped\",[5034]],[[43899,43899],\"mapped\",[5035]],[[43900,43900],\"mapped\",[5036]],[[43901,43901],\"mapped\",[5037]],[[43902,43902],\"mapped\",[5038]],[[43903,43903],\"mapped\",[5039]],[[43904,43904],\"mapped\",[5040]],[[43905,43905],\"mapped\",[5041]],[[43906,43906],\"mapped\",[5042]],[[43907,43907],\"mapped\",[5043]],[[43908,43908],\"mapped\",[5044]],[[43909,43909],\"mapped\",[5045]],[[43910,43910],\"mapped\",[5046]],[[43911,43911],\"mapped\",[5047]],[[43912,43912],\"mapped\",[5048]],[[43913,43913],\"mapped\",[5049]],[[43914,43914],\"mapped\",[5050]],[[43915,43915],\"mapped\",[5051]],[[43916,43916],\"mapped\",[5052]],[[43917,43917],\"mapped\",[5053]],[[43918,43918],\"mapped\",[5054]],[[43919,43919],\"mapped\",[5055]],[[43920,43920],\"mapped\",[5056]],[[43921,43921],\"mapped\",[5057]],[[43922,43922],\"mapped\",[5058]],[[43923,43923],\"mapped\",[5059]],[[43924,43924],\"mapped\",[5060]],[[43925,43925],\"mapped\",[5061]],[[43926,43926],\"mapped\",[5062]],[[43927,43927],\"mapped\",[5063]],[[43928,43928],\"mapped\",[5064]],[[43929,43929],\"mapped\",[5065]],[[43930,43930],\"mapped\",[5066]],[[43931,43931],\"mapped\",[5067]],[[43932,43932],\"mapped\",[5068]],[[43933,43933],\"mapped\",[5069]],[[43934,43934],\"mapped\",[5070]],[[43935,43935],\"mapped\",[5071]],[[43936,43936],\"mapped\",[5072]],[[43937,43937],\"mapped\",[5073]],[[43938,43938],\"mapped\",[5074]],[[43939,43939],\"mapped\",[5075]],[[43940,43940],\"mapped\",[5076]],[[43941,43941],\"mapped\",[5077]],[[43942,43942],\"mapped\",[5078]],[[43943,43943],\"mapped\",[5079]],[[43944,43944],\"mapped\",[5080]],[[43945,43945],\"mapped\",[5081]],[[43946,43946],\"mapped\",[5082]],[[43947,43947],\"mapped\",[5083]],[[43948,43948],\"mapped\",[5084]],[[43949,43949],\"mapped\",[5085]],[[43950,43950],\"mapped\",[5086]],[[43951,43951],\"mapped\",[5087]],[[43952,43952],\"mapped\",[5088]],[[43953,43953],\"mapped\",[5089]],[[43954,43954],\"mapped\",[5090]],[[43955,43955],\"mapped\",[5091]],[[43956,43956],\"mapped\",[5092]],[[43957,43957],\"mapped\",[5093]],[[43958,43958],\"mapped\",[5094]],[[43959,43959],\"mapped\",[5095]],[[43960,43960],\"mapped\",[5096]],[[43961,43961],\"mapped\",[5097]],[[43962,43962],\"mapped\",[5098]],[[43963,43963],\"mapped\",[5099]],[[43964,43964],\"mapped\",[5100]],[[43965,43965],\"mapped\",[5101]],[[43966,43966],\"mapped\",[5102]],[[43967,43967],\"mapped\",[5103]],[[43968,44010],\"valid\"],[[44011,44011],\"valid\",[],\"NV8\"],[[44012,44013],\"valid\"],[[44014,44015],\"disallowed\"],[[44016,44025],\"valid\"],[[44026,44031],\"disallowed\"],[[44032,55203],\"valid\"],[[55204,55215],\"disallowed\"],[[55216,55238],\"valid\",[],\"NV8\"],[[55239,55242],\"disallowed\"],[[55243,55291],\"valid\",[],\"NV8\"],[[55292,55295],\"disallowed\"],[[55296,57343],\"disallowed\"],[[57344,63743],\"disallowed\"],[[63744,63744],\"mapped\",[35912]],[[63745,63745],\"mapped\",[26356]],[[63746,63746],\"mapped\",[36554]],[[63747,63747],\"mapped\",[36040]],[[63748,63748],\"mapped\",[28369]],[[63749,63749],\"mapped\",[20018]],[[63750,63750],\"mapped\",[21477]],[[63751,63752],\"mapped\",[40860]],[[63753,63753],\"mapped\",[22865]],[[63754,63754],\"mapped\",[37329]],[[63755,63755],\"mapped\",[21895]],[[63756,63756],\"mapped\",[22856]],[[63757,63757],\"mapped\",[25078]],[[63758,63758],\"mapped\",[30313]],[[63759,63759],\"mapped\",[32645]],[[63760,63760],\"mapped\",[34367]],[[63761,63761],\"mapped\",[34746]],[[63762,63762],\"mapped\",[35064]],[[63763,63763],\"mapped\",[37007]],[[63764,63764],\"mapped\",[27138]],[[63765,63765],\"mapped\",[27931]],[[63766,63766],\"mapped\",[28889]],[[63767,63767],\"mapped\",[29662]],[[63768,63768],\"mapped\",[33853]],[[63769,63769],\"mapped\",[37226]],[[63770,63770],\"mapped\",[39409]],[[63771,63771],\"mapped\",[20098]],[[63772,63772],\"mapped\",[21365]],[[63773,63773],\"mapped\",[27396]],[[63774,63774],\"mapped\",[29211]],[[63775,63775],\"mapped\",[34349]],[[63776,63776],\"mapped\",[40478]],[[63777,63777],\"mapped\",[23888]],[[63778,63778],\"mapped\",[28651]],[[63779,63779],\"mapped\",[34253]],[[63780,63780],\"mapped\",[35172]],[[63781,63781],\"mapped\",[25289]],[[63782,63782],\"mapped\",[33240]],[[63783,63783],\"mapped\",[34847]],[[63784,63784],\"mapped\",[24266]],[[63785,63785],\"mapped\",[26391]],[[63786,63786],\"mapped\",[28010]],[[63787,63787],\"mapped\",[29436]],[[63788,63788],\"mapped\",[37070]],[[63789,63789],\"mapped\",[20358]],[[63790,63790],\"mapped\",[20919]],[[63791,63791],\"mapped\",[21214]],[[63792,63792],\"mapped\",[25796]],[[63793,63793],\"mapped\",[27347]],[[63794,63794],\"mapped\",[29200]],[[63795,63795],\"mapped\",[30439]],[[63796,63796],\"mapped\",[32769]],[[63797,63797],\"mapped\",[34310]],[[63798,63798],\"mapped\",[34396]],[[63799,63799],\"mapped\",[36335]],[[63800,63800],\"mapped\",[38706]],[[63801,63801],\"mapped\",[39791]],[[63802,63802],\"mapped\",[40442]],[[63803,63803],\"mapped\",[30860]],[[63804,63804],\"mapped\",[31103]],[[63805,63805],\"mapped\",[32160]],[[63806,63806],\"mapped\",[33737]],[[63807,63807],\"mapped\",[37636]],[[63808,63808],\"mapped\",[40575]],[[63809,63809],\"mapped\",[35542]],[[63810,63810],\"mapped\",[22751]],[[63811,63811],\"mapped\",[24324]],[[63812,63812],\"mapped\",[31840]],[[63813,63813],\"mapped\",[32894]],[[63814,63814],\"mapped\",[29282]],[[63815,63815],\"mapped\",[30922]],[[63816,63816],\"mapped\",[36034]],[[63817,63817],\"mapped\",[38647]],[[63818,63818],\"mapped\",[22744]],[[63819,63819],\"mapped\",[23650]],[[63820,63820],\"mapped\",[27155]],[[63821,63821],\"mapped\",[28122]],[[63822,63822],\"mapped\",[28431]],[[63823,63823],\"mapped\",[32047]],[[63824,63824],\"mapped\",[32311]],[[63825,63825],\"mapped\",[38475]],[[63826,63826],\"mapped\",[21202]],[[63827,63827],\"mapped\",[32907]],[[63828,63828],\"mapped\",[20956]],[[63829,63829],\"mapped\",[20940]],[[63830,63830],\"mapped\",[31260]],[[63831,63831],\"mapped\",[32190]],[[63832,63832],\"mapped\",[33777]],[[63833,63833],\"mapped\",[38517]],[[63834,63834],\"mapped\",[35712]],[[63835,63835],\"mapped\",[25295]],[[63836,63836],\"mapped\",[27138]],[[63837,63837],\"mapped\",[35582]],[[63838,63838],\"mapped\",[20025]],[[63839,63839],\"mapped\",[23527]],[[63840,63840],\"mapped\",[24594]],[[63841,63841],\"mapped\",[29575]],[[63842,63842],\"mapped\",[30064]],[[63843,63843],\"mapped\",[21271]],[[63844,63844],\"mapped\",[30971]],[[63845,63845],\"mapped\",[20415]],[[63846,63846],\"mapped\",[24489]],[[63847,63847],\"mapped\",[19981]],[[63848,63848],\"mapped\",[27852]],[[63849,63849],\"mapped\",[25976]],[[63850,63850],\"mapped\",[32034]],[[63851,63851],\"mapped\",[21443]],[[63852,63852],\"mapped\",[22622]],[[63853,63853],\"mapped\",[30465]],[[63854,63854],\"mapped\",[33865]],[[63855,63855],\"mapped\",[35498]],[[63856,63856],\"mapped\",[27578]],[[63857,63857],\"mapped\",[36784]],[[63858,63858],\"mapped\",[27784]],[[63859,63859],\"mapped\",[25342]],[[63860,63860],\"mapped\",[33509]],[[63861,63861],\"mapped\",[25504]],[[63862,63862],\"mapped\",[30053]],[[63863,63863],\"mapped\",[20142]],[[63864,63864],\"mapped\",[20841]],[[63865,63865],\"mapped\",[20937]],[[63866,63866],\"mapped\",[26753]],[[63867,63867],\"mapped\",[31975]],[[63868,63868],\"mapped\",[33391]],[[63869,63869],\"mapped\",[35538]],[[63870,63870],\"mapped\",[37327]],[[63871,63871],\"mapped\",[21237]],[[63872,63872],\"mapped\",[21570]],[[63873,63873],\"mapped\",[22899]],[[63874,63874],\"mapped\",[24300]],[[63875,63875],\"mapped\",[26053]],[[63876,63876],\"mapped\",[28670]],[[63877,63877],\"mapped\",[31018]],[[63878,63878],\"mapped\",[38317]],[[63879,63879],\"mapped\",[39530]],[[63880,63880],\"mapped\",[40599]],[[63881,63881],\"mapped\",[40654]],[[63882,63882],\"mapped\",[21147]],[[63883,63883],\"mapped\",[26310]],[[63884,63884],\"mapped\",[27511]],[[63885,63885],\"mapped\",[36706]],[[63886,63886],\"mapped\",[24180]],[[63887,63887],\"mapped\",[24976]],[[63888,63888],\"mapped\",[25088]],[[63889,63889],\"mapped\",[25754]],[[63890,63890],\"mapped\",[28451]],[[63891,63891],\"mapped\",[29001]],[[63892,63892],\"mapped\",[29833]],[[63893,63893],\"mapped\",[31178]],[[63894,63894],\"mapped\",[32244]],[[63895,63895],\"mapped\",[32879]],[[63896,63896],\"mapped\",[36646]],[[63897,63897],\"mapped\",[34030]],[[63898,63898],\"mapped\",[36899]],[[63899,63899],\"mapped\",[37706]],[[63900,63900],\"mapped\",[21015]],[[63901,63901],\"mapped\",[21155]],[[63902,63902],\"mapped\",[21693]],[[63903,63903],\"mapped\",[28872]],[[63904,63904],\"mapped\",[35010]],[[63905,63905],\"mapped\",[35498]],[[63906,63906],\"mapped\",[24265]],[[63907,63907],\"mapped\",[24565]],[[63908,63908],\"mapped\",[25467]],[[63909,63909],\"mapped\",[27566]],[[63910,63910],\"mapped\",[31806]],[[63911,63911],\"mapped\",[29557]],[[63912,63912],\"mapped\",[20196]],[[63913,63913],\"mapped\",[22265]],[[63914,63914],\"mapped\",[23527]],[[63915,63915],\"mapped\",[23994]],[[63916,63916],\"mapped\",[24604]],[[63917,63917],\"mapped\",[29618]],[[63918,63918],\"mapped\",[29801]],[[63919,63919],\"mapped\",[32666]],[[63920,63920],\"mapped\",[32838]],[[63921,63921],\"mapped\",[37428]],[[63922,63922],\"mapped\",[38646]],[[63923,63923],\"mapped\",[38728]],[[63924,63924],\"mapped\",[38936]],[[63925,63925],\"mapped\",[20363]],[[63926,63926],\"mapped\",[31150]],[[63927,63927],\"mapped\",[37300]],[[63928,63928],\"mapped\",[38584]],[[63929,63929],\"mapped\",[24801]],[[63930,63930],\"mapped\",[20102]],[[63931,63931],\"mapped\",[20698]],[[63932,63932],\"mapped\",[23534]],[[63933,63933],\"mapped\",[23615]],[[63934,63934],\"mapped\",[26009]],[[63935,63935],\"mapped\",[27138]],[[63936,63936],\"mapped\",[29134]],[[63937,63937],\"mapped\",[30274]],[[63938,63938],\"mapped\",[34044]],[[63939,63939],\"mapped\",[36988]],[[63940,63940],\"mapped\",[40845]],[[63941,63941],\"mapped\",[26248]],[[63942,63942],\"mapped\",[38446]],[[63943,63943],\"mapped\",[21129]],[[63944,63944],\"mapped\",[26491]],[[63945,63945],\"mapped\",[26611]],[[63946,63946],\"mapped\",[27969]],[[63947,63947],\"mapped\",[28316]],[[63948,63948],\"mapped\",[29705]],[[63949,63949],\"mapped\",[30041]],[[63950,63950],\"mapped\",[30827]],[[63951,63951],\"mapped\",[32016]],[[63952,63952],\"mapped\",[39006]],[[63953,63953],\"mapped\",[20845]],[[63954,63954],\"mapped\",[25134]],[[63955,63955],\"mapped\",[38520]],[[63956,63956],\"mapped\",[20523]],[[63957,63957],\"mapped\",[23833]],[[63958,63958],\"mapped\",[28138]],[[63959,63959],\"mapped\",[36650]],[[63960,63960],\"mapped\",[24459]],[[63961,63961],\"mapped\",[24900]],[[63962,63962],\"mapped\",[26647]],[[63963,63963],\"mapped\",[29575]],[[63964,63964],\"mapped\",[38534]],[[63965,63965],\"mapped\",[21033]],[[63966,63966],\"mapped\",[21519]],[[63967,63967],\"mapped\",[23653]],[[63968,63968],\"mapped\",[26131]],[[63969,63969],\"mapped\",[26446]],[[63970,63970],\"mapped\",[26792]],[[63971,63971],\"mapped\",[27877]],[[63972,63972],\"mapped\",[29702]],[[63973,63973],\"mapped\",[30178]],[[63974,63974],\"mapped\",[32633]],[[63975,63975],\"mapped\",[35023]],[[63976,63976],\"mapped\",[35041]],[[63977,63977],\"mapped\",[37324]],[[63978,63978],\"mapped\",[38626]],[[63979,63979],\"mapped\",[21311]],[[63980,63980],\"mapped\",[28346]],[[63981,63981],\"mapped\",[21533]],[[63982,63982],\"mapped\",[29136]],[[63983,63983],\"mapped\",[29848]],[[63984,63984],\"mapped\",[34298]],[[63985,63985],\"mapped\",[38563]],[[63986,63986],\"mapped\",[40023]],[[63987,63987],\"mapped\",[40607]],[[63988,63988],\"mapped\",[26519]],[[63989,63989],\"mapped\",[28107]],[[63990,63990],\"mapped\",[33256]],[[63991,63991],\"mapped\",[31435]],[[63992,63992],\"mapped\",[31520]],[[63993,63993],\"mapped\",[31890]],[[63994,63994],\"mapped\",[29376]],[[63995,63995],\"mapped\",[28825]],[[63996,63996],\"mapped\",[35672]],[[63997,63997],\"mapped\",[20160]],[[63998,63998],\"mapped\",[33590]],[[63999,63999],\"mapped\",[21050]],[[64000,64000],\"mapped\",[20999]],[[64001,64001],\"mapped\",[24230]],[[64002,64002],\"mapped\",[25299]],[[64003,64003],\"mapped\",[31958]],[[64004,64004],\"mapped\",[23429]],[[64005,64005],\"mapped\",[27934]],[[64006,64006],\"mapped\",[26292]],[[64007,64007],\"mapped\",[36667]],[[64008,64008],\"mapped\",[34892]],[[64009,64009],\"mapped\",[38477]],[[64010,64010],\"mapped\",[35211]],[[64011,64011],\"mapped\",[24275]],[[64012,64012],\"mapped\",[20800]],[[64013,64013],\"mapped\",[21952]],[[64014,64015],\"valid\"],[[64016,64016],\"mapped\",[22618]],[[64017,64017],\"valid\"],[[64018,64018],\"mapped\",[26228]],[[64019,64020],\"valid\"],[[64021,64021],\"mapped\",[20958]],[[64022,64022],\"mapped\",[29482]],[[64023,64023],\"mapped\",[30410]],[[64024,64024],\"mapped\",[31036]],[[64025,64025],\"mapped\",[31070]],[[64026,64026],\"mapped\",[31077]],[[64027,64027],\"mapped\",[31119]],[[64028,64028],\"mapped\",[38742]],[[64029,64029],\"mapped\",[31934]],[[64030,64030],\"mapped\",[32701]],[[64031,64031],\"valid\"],[[64032,64032],\"mapped\",[34322]],[[64033,64033],\"valid\"],[[64034,64034],\"mapped\",[35576]],[[64035,64036],\"valid\"],[[64037,64037],\"mapped\",[36920]],[[64038,64038],\"mapped\",[37117]],[[64039,64041],\"valid\"],[[64042,64042],\"mapped\",[39151]],[[64043,64043],\"mapped\",[39164]],[[64044,64044],\"mapped\",[39208]],[[64045,64045],\"mapped\",[40372]],[[64046,64046],\"mapped\",[37086]],[[64047,64047],\"mapped\",[38583]],[[64048,64048],\"mapped\",[20398]],[[64049,64049],\"mapped\",[20711]],[[64050,64050],\"mapped\",[20813]],[[64051,64051],\"mapped\",[21193]],[[64052,64052],\"mapped\",[21220]],[[64053,64053],\"mapped\",[21329]],[[64054,64054],\"mapped\",[21917]],[[64055,64055],\"mapped\",[22022]],[[64056,64056],\"mapped\",[22120]],[[64057,64057],\"mapped\",[22592]],[[64058,64058],\"mapped\",[22696]],[[64059,64059],\"mapped\",[23652]],[[64060,64060],\"mapped\",[23662]],[[64061,64061],\"mapped\",[24724]],[[64062,64062],\"mapped\",[24936]],[[64063,64063],\"mapped\",[24974]],[[64064,64064],\"mapped\",[25074]],[[64065,64065],\"mapped\",[25935]],[[64066,64066],\"mapped\",[26082]],[[64067,64067],\"mapped\",[26257]],[[64068,64068],\"mapped\",[26757]],[[64069,64069],\"mapped\",[28023]],[[64070,64070],\"mapped\",[28186]],[[64071,64071],\"mapped\",[28450]],[[64072,64072],\"mapped\",[29038]],[[64073,64073],\"mapped\",[29227]],[[64074,64074],\"mapped\",[29730]],[[64075,64075],\"mapped\",[30865]],[[64076,64076],\"mapped\",[31038]],[[64077,64077],\"mapped\",[31049]],[[64078,64078],\"mapped\",[31048]],[[64079,64079],\"mapped\",[31056]],[[64080,64080],\"mapped\",[31062]],[[64081,64081],\"mapped\",[31069]],[[64082,64082],\"mapped\",[31117]],[[64083,64083],\"mapped\",[31118]],[[64084,64084],\"mapped\",[31296]],[[64085,64085],\"mapped\",[31361]],[[64086,64086],\"mapped\",[31680]],[[64087,64087],\"mapped\",[32244]],[[64088,64088],\"mapped\",[32265]],[[64089,64089],\"mapped\",[32321]],[[64090,64090],\"mapped\",[32626]],[[64091,64091],\"mapped\",[32773]],[[64092,64092],\"mapped\",[33261]],[[64093,64094],\"mapped\",[33401]],[[64095,64095],\"mapped\",[33879]],[[64096,64096],\"mapped\",[35088]],[[64097,64097],\"mapped\",[35222]],[[64098,64098],\"mapped\",[35585]],[[64099,64099],\"mapped\",[35641]],[[64100,64100],\"mapped\",[36051]],[[64101,64101],\"mapped\",[36104]],[[64102,64102],\"mapped\",[36790]],[[64103,64103],\"mapped\",[36920]],[[64104,64104],\"mapped\",[38627]],[[64105,64105],\"mapped\",[38911]],[[64106,64106],\"mapped\",[38971]],[[64107,64107],\"mapped\",[24693]],[[64108,64108],\"mapped\",[148206]],[[64109,64109],\"mapped\",[33304]],[[64110,64111],\"disallowed\"],[[64112,64112],\"mapped\",[20006]],[[64113,64113],\"mapped\",[20917]],[[64114,64114],\"mapped\",[20840]],[[64115,64115],\"mapped\",[20352]],[[64116,64116],\"mapped\",[20805]],[[64117,64117],\"mapped\",[20864]],[[64118,64118],\"mapped\",[21191]],[[64119,64119],\"mapped\",[21242]],[[64120,64120],\"mapped\",[21917]],[[64121,64121],\"mapped\",[21845]],[[64122,64122],\"mapped\",[21913]],[[64123,64123],\"mapped\",[21986]],[[64124,64124],\"mapped\",[22618]],[[64125,64125],\"mapped\",[22707]],[[64126,64126],\"mapped\",[22852]],[[64127,64127],\"mapped\",[22868]],[[64128,64128],\"mapped\",[23138]],[[64129,64129],\"mapped\",[23336]],[[64130,64130],\"mapped\",[24274]],[[64131,64131],\"mapped\",[24281]],[[64132,64132],\"mapped\",[24425]],[[64133,64133],\"mapped\",[24493]],[[64134,64134],\"mapped\",[24792]],[[64135,64135],\"mapped\",[24910]],[[64136,64136],\"mapped\",[24840]],[[64137,64137],\"mapped\",[24974]],[[64138,64138],\"mapped\",[24928]],[[64139,64139],\"mapped\",[25074]],[[64140,64140],\"mapped\",[25140]],[[64141,64141],\"mapped\",[25540]],[[64142,64142],\"mapped\",[25628]],[[64143,64143],\"mapped\",[25682]],[[64144,64144],\"mapped\",[25942]],[[64145,64145],\"mapped\",[26228]],[[64146,64146],\"mapped\",[26391]],[[64147,64147],\"mapped\",[26395]],[[64148,64148],\"mapped\",[26454]],[[64149,64149],\"mapped\",[27513]],[[64150,64150],\"mapped\",[27578]],[[64151,64151],\"mapped\",[27969]],[[64152,64152],\"mapped\",[28379]],[[64153,64153],\"mapped\",[28363]],[[64154,64154],\"mapped\",[28450]],[[64155,64155],\"mapped\",[28702]],[[64156,64156],\"mapped\",[29038]],[[64157,64157],\"mapped\",[30631]],[[64158,64158],\"mapped\",[29237]],[[64159,64159],\"mapped\",[29359]],[[64160,64160],\"mapped\",[29482]],[[64161,64161],\"mapped\",[29809]],[[64162,64162],\"mapped\",[29958]],[[64163,64163],\"mapped\",[30011]],[[64164,64164],\"mapped\",[30237]],[[64165,64165],\"mapped\",[30239]],[[64166,64166],\"mapped\",[30410]],[[64167,64167],\"mapped\",[30427]],[[64168,64168],\"mapped\",[30452]],[[64169,64169],\"mapped\",[30538]],[[64170,64170],\"mapped\",[30528]],[[64171,64171],\"mapped\",[30924]],[[64172,64172],\"mapped\",[31409]],[[64173,64173],\"mapped\",[31680]],[[64174,64174],\"mapped\",[31867]],[[64175,64175],\"mapped\",[32091]],[[64176,64176],\"mapped\",[32244]],[[64177,64177],\"mapped\",[32574]],[[64178,64178],\"mapped\",[32773]],[[64179,64179],\"mapped\",[33618]],[[64180,64180],\"mapped\",[33775]],[[64181,64181],\"mapped\",[34681]],[[64182,64182],\"mapped\",[35137]],[[64183,64183],\"mapped\",[35206]],[[64184,64184],\"mapped\",[35222]],[[64185,64185],\"mapped\",[35519]],[[64186,64186],\"mapped\",[35576]],[[64187,64187],\"mapped\",[35531]],[[64188,64188],\"mapped\",[35585]],[[64189,64189],\"mapped\",[35582]],[[64190,64190],\"mapped\",[35565]],[[64191,64191],\"mapped\",[35641]],[[64192,64192],\"mapped\",[35722]],[[64193,64193],\"mapped\",[36104]],[[64194,64194],\"mapped\",[36664]],[[64195,64195],\"mapped\",[36978]],[[64196,64196],\"mapped\",[37273]],[[64197,64197],\"mapped\",[37494]],[[64198,64198],\"mapped\",[38524]],[[64199,64199],\"mapped\",[38627]],[[64200,64200],\"mapped\",[38742]],[[64201,64201],\"mapped\",[38875]],[[64202,64202],\"mapped\",[38911]],[[64203,64203],\"mapped\",[38923]],[[64204,64204],\"mapped\",[38971]],[[64205,64205],\"mapped\",[39698]],[[64206,64206],\"mapped\",[40860]],[[64207,64207],\"mapped\",[141386]],[[64208,64208],\"mapped\",[141380]],[[64209,64209],\"mapped\",[144341]],[[64210,64210],\"mapped\",[15261]],[[64211,64211],\"mapped\",[16408]],[[64212,64212],\"mapped\",[16441]],[[64213,64213],\"mapped\",[152137]],[[64214,64214],\"mapped\",[154832]],[[64215,64215],\"mapped\",[163539]],[[64216,64216],\"mapped\",[40771]],[[64217,64217],\"mapped\",[40846]],[[64218,64255],\"disallowed\"],[[64256,64256],\"mapped\",[102,102]],[[64257,64257],\"mapped\",[102,105]],[[64258,64258],\"mapped\",[102,108]],[[64259,64259],\"mapped\",[102,102,105]],[[64260,64260],\"mapped\",[102,102,108]],[[64261,64262],\"mapped\",[115,116]],[[64263,64274],\"disallowed\"],[[64275,64275],\"mapped\",[1396,1398]],[[64276,64276],\"mapped\",[1396,1381]],[[64277,64277],\"mapped\",[1396,1387]],[[64278,64278],\"mapped\",[1406,1398]],[[64279,64279],\"mapped\",[1396,1389]],[[64280,64284],\"disallowed\"],[[64285,64285],\"mapped\",[1497,1460]],[[64286,64286],\"valid\"],[[64287,64287],\"mapped\",[1522,1463]],[[64288,64288],\"mapped\",[1506]],[[64289,64289],\"mapped\",[1488]],[[64290,64290],\"mapped\",[1491]],[[64291,64291],\"mapped\",[1492]],[[64292,64292],\"mapped\",[1499]],[[64293,64293],\"mapped\",[1500]],[[64294,64294],\"mapped\",[1501]],[[64295,64295],\"mapped\",[1512]],[[64296,64296],\"mapped\",[1514]],[[64297,64297],\"disallowed_STD3_mapped\",[43]],[[64298,64298],\"mapped\",[1513,1473]],[[64299,64299],\"mapped\",[1513,1474]],[[64300,64300],\"mapped\",[1513,1468,1473]],[[64301,64301],\"mapped\",[1513,1468,1474]],[[64302,64302],\"mapped\",[1488,1463]],[[64303,64303],\"mapped\",[1488,1464]],[[64304,64304],\"mapped\",[1488,1468]],[[64305,64305],\"mapped\",[1489,1468]],[[64306,64306],\"mapped\",[1490,1468]],[[64307,64307],\"mapped\",[1491,1468]],[[64308,64308],\"mapped\",[1492,1468]],[[64309,64309],\"mapped\",[1493,1468]],[[64310,64310],\"mapped\",[1494,1468]],[[64311,64311],\"disallowed\"],[[64312,64312],\"mapped\",[1496,1468]],[[64313,64313],\"mapped\",[1497,1468]],[[64314,64314],\"mapped\",[1498,1468]],[[64315,64315],\"mapped\",[1499,1468]],[[64316,64316],\"mapped\",[1500,1468]],[[64317,64317],\"disallowed\"],[[64318,64318],\"mapped\",[1502,1468]],[[64319,64319],\"disallowed\"],[[64320,64320],\"mapped\",[1504,1468]],[[64321,64321],\"mapped\",[1505,1468]],[[64322,64322],\"disallowed\"],[[64323,64323],\"mapped\",[1507,1468]],[[64324,64324],\"mapped\",[1508,1468]],[[64325,64325],\"disallowed\"],[[64326,64326],\"mapped\",[1510,1468]],[[64327,64327],\"mapped\",[1511,1468]],[[64328,64328],\"mapped\",[1512,1468]],[[64329,64329],\"mapped\",[1513,1468]],[[64330,64330],\"mapped\",[1514,1468]],[[64331,64331],\"mapped\",[1493,1465]],[[64332,64332],\"mapped\",[1489,1471]],[[64333,64333],\"mapped\",[1499,1471]],[[64334,64334],\"mapped\",[1508,1471]],[[64335,64335],\"mapped\",[1488,1500]],[[64336,64337],\"mapped\",[1649]],[[64338,64341],\"mapped\",[1659]],[[64342,64345],\"mapped\",[1662]],[[64346,64349],\"mapped\",[1664]],[[64350,64353],\"mapped\",[1658]],[[64354,64357],\"mapped\",[1663]],[[64358,64361],\"mapped\",[1657]],[[64362,64365],\"mapped\",[1700]],[[64366,64369],\"mapped\",[1702]],[[64370,64373],\"mapped\",[1668]],[[64374,64377],\"mapped\",[1667]],[[64378,64381],\"mapped\",[1670]],[[64382,64385],\"mapped\",[1671]],[[64386,64387],\"mapped\",[1677]],[[64388,64389],\"mapped\",[1676]],[[64390,64391],\"mapped\",[1678]],[[64392,64393],\"mapped\",[1672]],[[64394,64395],\"mapped\",[1688]],[[64396,64397],\"mapped\",[1681]],[[64398,64401],\"mapped\",[1705]],[[64402,64405],\"mapped\",[1711]],[[64406,64409],\"mapped\",[1715]],[[64410,64413],\"mapped\",[1713]],[[64414,64415],\"mapped\",[1722]],[[64416,64419],\"mapped\",[1723]],[[64420,64421],\"mapped\",[1728]],[[64422,64425],\"mapped\",[1729]],[[64426,64429],\"mapped\",[1726]],[[64430,64431],\"mapped\",[1746]],[[64432,64433],\"mapped\",[1747]],[[64434,64449],\"valid\",[],\"NV8\"],[[64450,64466],\"disallowed\"],[[64467,64470],\"mapped\",[1709]],[[64471,64472],\"mapped\",[1735]],[[64473,64474],\"mapped\",[1734]],[[64475,64476],\"mapped\",[1736]],[[64477,64477],\"mapped\",[1735,1652]],[[64478,64479],\"mapped\",[1739]],[[64480,64481],\"mapped\",[1733]],[[64482,64483],\"mapped\",[1737]],[[64484,64487],\"mapped\",[1744]],[[64488,64489],\"mapped\",[1609]],[[64490,64491],\"mapped\",[1574,1575]],[[64492,64493],\"mapped\",[1574,1749]],[[64494,64495],\"mapped\",[1574,1608]],[[64496,64497],\"mapped\",[1574,1735]],[[64498,64499],\"mapped\",[1574,1734]],[[64500,64501],\"mapped\",[1574,1736]],[[64502,64504],\"mapped\",[1574,1744]],[[64505,64507],\"mapped\",[1574,1609]],[[64508,64511],\"mapped\",[1740]],[[64512,64512],\"mapped\",[1574,1580]],[[64513,64513],\"mapped\",[1574,1581]],[[64514,64514],\"mapped\",[1574,1605]],[[64515,64515],\"mapped\",[1574,1609]],[[64516,64516],\"mapped\",[1574,1610]],[[64517,64517],\"mapped\",[1576,1580]],[[64518,64518],\"mapped\",[1576,1581]],[[64519,64519],\"mapped\",[1576,1582]],[[64520,64520],\"mapped\",[1576,1605]],[[64521,64521],\"mapped\",[1576,1609]],[[64522,64522],\"mapped\",[1576,1610]],[[64523,64523],\"mapped\",[1578,1580]],[[64524,64524],\"mapped\",[1578,1581]],[[64525,64525],\"mapped\",[1578,1582]],[[64526,64526],\"mapped\",[1578,1605]],[[64527,64527],\"mapped\",[1578,1609]],[[64528,64528],\"mapped\",[1578,1610]],[[64529,64529],\"mapped\",[1579,1580]],[[64530,64530],\"mapped\",[1579,1605]],[[64531,64531],\"mapped\",[1579,1609]],[[64532,64532],\"mapped\",[1579,1610]],[[64533,64533],\"mapped\",[1580,1581]],[[64534,64534],\"mapped\",[1580,1605]],[[64535,64535],\"mapped\",[1581,1580]],[[64536,64536],\"mapped\",[1581,1605]],[[64537,64537],\"mapped\",[1582,1580]],[[64538,64538],\"mapped\",[1582,1581]],[[64539,64539],\"mapped\",[1582,1605]],[[64540,64540],\"mapped\",[1587,1580]],[[64541,64541],\"mapped\",[1587,1581]],[[64542,64542],\"mapped\",[1587,1582]],[[64543,64543],\"mapped\",[1587,1605]],[[64544,64544],\"mapped\",[1589,1581]],[[64545,64545],\"mapped\",[1589,1605]],[[64546,64546],\"mapped\",[1590,1580]],[[64547,64547],\"mapped\",[1590,1581]],[[64548,64548],\"mapped\",[1590,1582]],[[64549,64549],\"mapped\",[1590,1605]],[[64550,64550],\"mapped\",[1591,1581]],[[64551,64551],\"mapped\",[1591,1605]],[[64552,64552],\"mapped\",[1592,1605]],[[64553,64553],\"mapped\",[1593,1580]],[[64554,64554],\"mapped\",[1593,1605]],[[64555,64555],\"mapped\",[1594,1580]],[[64556,64556],\"mapped\",[1594,1605]],[[64557,64557],\"mapped\",[1601,1580]],[[64558,64558],\"mapped\",[1601,1581]],[[64559,64559],\"mapped\",[1601,1582]],[[64560,64560],\"mapped\",[1601,1605]],[[64561,64561],\"mapped\",[1601,1609]],[[64562,64562],\"mapped\",[1601,1610]],[[64563,64563],\"mapped\",[1602,1581]],[[64564,64564],\"mapped\",[1602,1605]],[[64565,64565],\"mapped\",[1602,1609]],[[64566,64566],\"mapped\",[1602,1610]],[[64567,64567],\"mapped\",[1603,1575]],[[64568,64568],\"mapped\",[1603,1580]],[[64569,64569],\"mapped\",[1603,1581]],[[64570,64570],\"mapped\",[1603,1582]],[[64571,64571],\"mapped\",[1603,1604]],[[64572,64572],\"mapped\",[1603,1605]],[[64573,64573],\"mapped\",[1603,1609]],[[64574,64574],\"mapped\",[1603,1610]],[[64575,64575],\"mapped\",[1604,1580]],[[64576,64576],\"mapped\",[1604,1581]],[[64577,64577],\"mapped\",[1604,1582]],[[64578,64578],\"mapped\",[1604,1605]],[[64579,64579],\"mapped\",[1604,1609]],[[64580,64580],\"mapped\",[1604,1610]],[[64581,64581],\"mapped\",[1605,1580]],[[64582,64582],\"mapped\",[1605,1581]],[[64583,64583],\"mapped\",[1605,1582]],[[64584,64584],\"mapped\",[1605,1605]],[[64585,64585],\"mapped\",[1605,1609]],[[64586,64586],\"mapped\",[1605,1610]],[[64587,64587],\"mapped\",[1606,1580]],[[64588,64588],\"mapped\",[1606,1581]],[[64589,64589],\"mapped\",[1606,1582]],[[64590,64590],\"mapped\",[1606,1605]],[[64591,64591],\"mapped\",[1606,1609]],[[64592,64592],\"mapped\",[1606,1610]],[[64593,64593],\"mapped\",[1607,1580]],[[64594,64594],\"mapped\",[1607,1605]],[[64595,64595],\"mapped\",[1607,1609]],[[64596,64596],\"mapped\",[1607,1610]],[[64597,64597],\"mapped\",[1610,1580]],[[64598,64598],\"mapped\",[1610,1581]],[[64599,64599],\"mapped\",[1610,1582]],[[64600,64600],\"mapped\",[1610,1605]],[[64601,64601],\"mapped\",[1610,1609]],[[64602,64602],\"mapped\",[1610,1610]],[[64603,64603],\"mapped\",[1584,1648]],[[64604,64604],\"mapped\",[1585,1648]],[[64605,64605],\"mapped\",[1609,1648]],[[64606,64606],\"disallowed_STD3_mapped\",[32,1612,1617]],[[64607,64607],\"disallowed_STD3_mapped\",[32,1613,1617]],[[64608,64608],\"disallowed_STD3_mapped\",[32,1614,1617]],[[64609,64609],\"disallowed_STD3_mapped\",[32,1615,1617]],[[64610,64610],\"disallowed_STD3_mapped\",[32,1616,1617]],[[64611,64611],\"disallowed_STD3_mapped\",[32,1617,1648]],[[64612,64612],\"mapped\",[1574,1585]],[[64613,64613],\"mapped\",[1574,1586]],[[64614,64614],\"mapped\",[1574,1605]],[[64615,64615],\"mapped\",[1574,1606]],[[64616,64616],\"mapped\",[1574,1609]],[[64617,64617],\"mapped\",[1574,1610]],[[64618,64618],\"mapped\",[1576,1585]],[[64619,64619],\"mapped\",[1576,1586]],[[64620,64620],\"mapped\",[1576,1605]],[[64621,64621],\"mapped\",[1576,1606]],[[64622,64622],\"mapped\",[1576,1609]],[[64623,64623],\"mapped\",[1576,1610]],[[64624,64624],\"mapped\",[1578,1585]],[[64625,64625],\"mapped\",[1578,1586]],[[64626,64626],\"mapped\",[1578,1605]],[[64627,64627],\"mapped\",[1578,1606]],[[64628,64628],\"mapped\",[1578,1609]],[[64629,64629],\"mapped\",[1578,1610]],[[64630,64630],\"mapped\",[1579,1585]],[[64631,64631],\"mapped\",[1579,1586]],[[64632,64632],\"mapped\",[1579,1605]],[[64633,64633],\"mapped\",[1579,1606]],[[64634,64634],\"mapped\",[1579,1609]],[[64635,64635],\"mapped\",[1579,1610]],[[64636,64636],\"mapped\",[1601,1609]],[[64637,64637],\"mapped\",[1601,1610]],[[64638,64638],\"mapped\",[1602,1609]],[[64639,64639],\"mapped\",[1602,1610]],[[64640,64640],\"mapped\",[1603,1575]],[[64641,64641],\"mapped\",[1603,1604]],[[64642,64642],\"mapped\",[1603,1605]],[[64643,64643],\"mapped\",[1603,1609]],[[64644,64644],\"mapped\",[1603,1610]],[[64645,64645],\"mapped\",[1604,1605]],[[64646,64646],\"mapped\",[1604,1609]],[[64647,64647],\"mapped\",[1604,1610]],[[64648,64648],\"mapped\",[1605,1575]],[[64649,64649],\"mapped\",[1605,1605]],[[64650,64650],\"mapped\",[1606,1585]],[[64651,64651],\"mapped\",[1606,1586]],[[64652,64652],\"mapped\",[1606,1605]],[[64653,64653],\"mapped\",[1606,1606]],[[64654,64654],\"mapped\",[1606,1609]],[[64655,64655],\"mapped\",[1606,1610]],[[64656,64656],\"mapped\",[1609,1648]],[[64657,64657],\"mapped\",[1610,1585]],[[64658,64658],\"mapped\",[1610,1586]],[[64659,64659],\"mapped\",[1610,1605]],[[64660,64660],\"mapped\",[1610,1606]],[[64661,64661],\"mapped\",[1610,1609]],[[64662,64662],\"mapped\",[1610,1610]],[[64663,64663],\"mapped\",[1574,1580]],[[64664,64664],\"mapped\",[1574,1581]],[[64665,64665],\"mapped\",[1574,1582]],[[64666,64666],\"mapped\",[1574,1605]],[[64667,64667],\"mapped\",[1574,1607]],[[64668,64668],\"mapped\",[1576,1580]],[[64669,64669],\"mapped\",[1576,1581]],[[64670,64670],\"mapped\",[1576,1582]],[[64671,64671],\"mapped\",[1576,1605]],[[64672,64672],\"mapped\",[1576,1607]],[[64673,64673],\"mapped\",[1578,1580]],[[64674,64674],\"mapped\",[1578,1581]],[[64675,64675],\"mapped\",[1578,1582]],[[64676,64676],\"mapped\",[1578,1605]],[[64677,64677],\"mapped\",[1578,1607]],[[64678,64678],\"mapped\",[1579,1605]],[[64679,64679],\"mapped\",[1580,1581]],[[64680,64680],\"mapped\",[1580,1605]],[[64681,64681],\"mapped\",[1581,1580]],[[64682,64682],\"mapped\",[1581,1605]],[[64683,64683],\"mapped\",[1582,1580]],[[64684,64684],\"mapped\",[1582,1605]],[[64685,64685],\"mapped\",[1587,1580]],[[64686,64686],\"mapped\",[1587,1581]],[[64687,64687],\"mapped\",[1587,1582]],[[64688,64688],\"mapped\",[1587,1605]],[[64689,64689],\"mapped\",[1589,1581]],[[64690,64690],\"mapped\",[1589,1582]],[[64691,64691],\"mapped\",[1589,1605]],[[64692,64692],\"mapped\",[1590,1580]],[[64693,64693],\"mapped\",[1590,1581]],[[64694,64694],\"mapped\",[1590,1582]],[[64695,64695],\"mapped\",[1590,1605]],[[64696,64696],\"mapped\",[1591,1581]],[[64697,64697],\"mapped\",[1592,1605]],[[64698,64698],\"mapped\",[1593,1580]],[[64699,64699],\"mapped\",[1593,1605]],[[64700,64700],\"mapped\",[1594,1580]],[[64701,64701],\"mapped\",[1594,1605]],[[64702,64702],\"mapped\",[1601,1580]],[[64703,64703],\"mapped\",[1601,1581]],[[64704,64704],\"mapped\",[1601,1582]],[[64705,64705],\"mapped\",[1601,1605]],[[64706,64706],\"mapped\",[1602,1581]],[[64707,64707],\"mapped\",[1602,1605]],[[64708,64708],\"mapped\",[1603,1580]],[[64709,64709],\"mapped\",[1603,1581]],[[64710,64710],\"mapped\",[1603,1582]],[[64711,64711],\"mapped\",[1603,1604]],[[64712,64712],\"mapped\",[1603,1605]],[[64713,64713],\"mapped\",[1604,1580]],[[64714,64714],\"mapped\",[1604,1581]],[[64715,64715],\"mapped\",[1604,1582]],[[64716,64716],\"mapped\",[1604,1605]],[[64717,64717],\"mapped\",[1604,1607]],[[64718,64718],\"mapped\",[1605,1580]],[[64719,64719],\"mapped\",[1605,1581]],[[64720,64720],\"mapped\",[1605,1582]],[[64721,64721],\"mapped\",[1605,1605]],[[64722,64722],\"mapped\",[1606,1580]],[[64723,64723],\"mapped\",[1606,1581]],[[64724,64724],\"mapped\",[1606,1582]],[[64725,64725],\"mapped\",[1606,1605]],[[64726,64726],\"mapped\",[1606,1607]],[[64727,64727],\"mapped\",[1607,1580]],[[64728,64728],\"mapped\",[1607,1605]],[[64729,64729],\"mapped\",[1607,1648]],[[64730,64730],\"mapped\",[1610,1580]],[[64731,64731],\"mapped\",[1610,1581]],[[64732,64732],\"mapped\",[1610,1582]],[[64733,64733],\"mapped\",[1610,1605]],[[64734,64734],\"mapped\",[1610,1607]],[[64735,64735],\"mapped\",[1574,1605]],[[64736,64736],\"mapped\",[1574,1607]],[[64737,64737],\"mapped\",[1576,1605]],[[64738,64738],\"mapped\",[1576,1607]],[[64739,64739],\"mapped\",[1578,1605]],[[64740,64740],\"mapped\",[1578,1607]],[[64741,64741],\"mapped\",[1579,1605]],[[64742,64742],\"mapped\",[1579,1607]],[[64743,64743],\"mapped\",[1587,1605]],[[64744,64744],\"mapped\",[1587,1607]],[[64745,64745],\"mapped\",[1588,1605]],[[64746,64746],\"mapped\",[1588,1607]],[[64747,64747],\"mapped\",[1603,1604]],[[64748,64748],\"mapped\",[1603,1605]],[[64749,64749],\"mapped\",[1604,1605]],[[64750,64750],\"mapped\",[1606,1605]],[[64751,64751],\"mapped\",[1606,1607]],[[64752,64752],\"mapped\",[1610,1605]],[[64753,64753],\"mapped\",[1610,1607]],[[64754,64754],\"mapped\",[1600,1614,1617]],[[64755,64755],\"mapped\",[1600,1615,1617]],[[64756,64756],\"mapped\",[1600,1616,1617]],[[64757,64757],\"mapped\",[1591,1609]],[[64758,64758],\"mapped\",[1591,1610]],[[64759,64759],\"mapped\",[1593,1609]],[[64760,64760],\"mapped\",[1593,1610]],[[64761,64761],\"mapped\",[1594,1609]],[[64762,64762],\"mapped\",[1594,1610]],[[64763,64763],\"mapped\",[1587,1609]],[[64764,64764],\"mapped\",[1587,1610]],[[64765,64765],\"mapped\",[1588,1609]],[[64766,64766],\"mapped\",[1588,1610]],[[64767,64767],\"mapped\",[1581,1609]],[[64768,64768],\"mapped\",[1581,1610]],[[64769,64769],\"mapped\",[1580,1609]],[[64770,64770],\"mapped\",[1580,1610]],[[64771,64771],\"mapped\",[1582,1609]],[[64772,64772],\"mapped\",[1582,1610]],[[64773,64773],\"mapped\",[1589,1609]],[[64774,64774],\"mapped\",[1589,1610]],[[64775,64775],\"mapped\",[1590,1609]],[[64776,64776],\"mapped\",[1590,1610]],[[64777,64777],\"mapped\",[1588,1580]],[[64778,64778],\"mapped\",[1588,1581]],[[64779,64779],\"mapped\",[1588,1582]],[[64780,64780],\"mapped\",[1588,1605]],[[64781,64781],\"mapped\",[1588,1585]],[[64782,64782],\"mapped\",[1587,1585]],[[64783,64783],\"mapped\",[1589,1585]],[[64784,64784],\"mapped\",[1590,1585]],[[64785,64785],\"mapped\",[1591,1609]],[[64786,64786],\"mapped\",[1591,1610]],[[64787,64787],\"mapped\",[1593,1609]],[[64788,64788],\"mapped\",[1593,1610]],[[64789,64789],\"mapped\",[1594,1609]],[[64790,64790],\"mapped\",[1594,1610]],[[64791,64791],\"mapped\",[1587,1609]],[[64792,64792],\"mapped\",[1587,1610]],[[64793,64793],\"mapped\",[1588,1609]],[[64794,64794],\"mapped\",[1588,1610]],[[64795,64795],\"mapped\",[1581,1609]],[[64796,64796],\"mapped\",[1581,1610]],[[64797,64797],\"mapped\",[1580,1609]],[[64798,64798],\"mapped\",[1580,1610]],[[64799,64799],\"mapped\",[1582,1609]],[[64800,64800],\"mapped\",[1582,1610]],[[64801,64801],\"mapped\",[1589,1609]],[[64802,64802],\"mapped\",[1589,1610]],[[64803,64803],\"mapped\",[1590,1609]],[[64804,64804],\"mapped\",[1590,1610]],[[64805,64805],\"mapped\",[1588,1580]],[[64806,64806],\"mapped\",[1588,1581]],[[64807,64807],\"mapped\",[1588,1582]],[[64808,64808],\"mapped\",[1588,1605]],[[64809,64809],\"mapped\",[1588,1585]],[[64810,64810],\"mapped\",[1587,1585]],[[64811,64811],\"mapped\",[1589,1585]],[[64812,64812],\"mapped\",[1590,1585]],[[64813,64813],\"mapped\",[1588,1580]],[[64814,64814],\"mapped\",[1588,1581]],[[64815,64815],\"mapped\",[1588,1582]],[[64816,64816],\"mapped\",[1588,1605]],[[64817,64817],\"mapped\",[1587,1607]],[[64818,64818],\"mapped\",[1588,1607]],[[64819,64819],\"mapped\",[1591,1605]],[[64820,64820],\"mapped\",[1587,1580]],[[64821,64821],\"mapped\",[1587,1581]],[[64822,64822],\"mapped\",[1587,1582]],[[64823,64823],\"mapped\",[1588,1580]],[[64824,64824],\"mapped\",[1588,1581]],[[64825,64825],\"mapped\",[1588,1582]],[[64826,64826],\"mapped\",[1591,1605]],[[64827,64827],\"mapped\",[1592,1605]],[[64828,64829],\"mapped\",[1575,1611]],[[64830,64831],\"valid\",[],\"NV8\"],[[64832,64847],\"disallowed\"],[[64848,64848],\"mapped\",[1578,1580,1605]],[[64849,64850],\"mapped\",[1578,1581,1580]],[[64851,64851],\"mapped\",[1578,1581,1605]],[[64852,64852],\"mapped\",[1578,1582,1605]],[[64853,64853],\"mapped\",[1578,1605,1580]],[[64854,64854],\"mapped\",[1578,1605,1581]],[[64855,64855],\"mapped\",[1578,1605,1582]],[[64856,64857],\"mapped\",[1580,1605,1581]],[[64858,64858],\"mapped\",[1581,1605,1610]],[[64859,64859],\"mapped\",[1581,1605,1609]],[[64860,64860],\"mapped\",[1587,1581,1580]],[[64861,64861],\"mapped\",[1587,1580,1581]],[[64862,64862],\"mapped\",[1587,1580,1609]],[[64863,64864],\"mapped\",[1587,1605,1581]],[[64865,64865],\"mapped\",[1587,1605,1580]],[[64866,64867],\"mapped\",[1587,1605,1605]],[[64868,64869],\"mapped\",[1589,1581,1581]],[[64870,64870],\"mapped\",[1589,1605,1605]],[[64871,64872],\"mapped\",[1588,1581,1605]],[[64873,64873],\"mapped\",[1588,1580,1610]],[[64874,64875],\"mapped\",[1588,1605,1582]],[[64876,64877],\"mapped\",[1588,1605,1605]],[[64878,64878],\"mapped\",[1590,1581,1609]],[[64879,64880],\"mapped\",[1590,1582,1605]],[[64881,64882],\"mapped\",[1591,1605,1581]],[[64883,64883],\"mapped\",[1591,1605,1605]],[[64884,64884],\"mapped\",[1591,1605,1610]],[[64885,64885],\"mapped\",[1593,1580,1605]],[[64886,64887],\"mapped\",[1593,1605,1605]],[[64888,64888],\"mapped\",[1593,1605,1609]],[[64889,64889],\"mapped\",[1594,1605,1605]],[[64890,64890],\"mapped\",[1594,1605,1610]],[[64891,64891],\"mapped\",[1594,1605,1609]],[[64892,64893],\"mapped\",[1601,1582,1605]],[[64894,64894],\"mapped\",[1602,1605,1581]],[[64895,64895],\"mapped\",[1602,1605,1605]],[[64896,64896],\"mapped\",[1604,1581,1605]],[[64897,64897],\"mapped\",[1604,1581,1610]],[[64898,64898],\"mapped\",[1604,1581,1609]],[[64899,64900],\"mapped\",[1604,1580,1580]],[[64901,64902],\"mapped\",[1604,1582,1605]],[[64903,64904],\"mapped\",[1604,1605,1581]],[[64905,64905],\"mapped\",[1605,1581,1580]],[[64906,64906],\"mapped\",[1605,1581,1605]],[[64907,64907],\"mapped\",[1605,1581,1610]],[[64908,64908],\"mapped\",[1605,1580,1581]],[[64909,64909],\"mapped\",[1605,1580,1605]],[[64910,64910],\"mapped\",[1605,1582,1580]],[[64911,64911],\"mapped\",[1605,1582,1605]],[[64912,64913],\"disallowed\"],[[64914,64914],\"mapped\",[1605,1580,1582]],[[64915,64915],\"mapped\",[1607,1605,1580]],[[64916,64916],\"mapped\",[1607,1605,1605]],[[64917,64917],\"mapped\",[1606,1581,1605]],[[64918,64918],\"mapped\",[1606,1581,1609]],[[64919,64920],\"mapped\",[1606,1580,1605]],[[64921,64921],\"mapped\",[1606,1580,1609]],[[64922,64922],\"mapped\",[1606,1605,1610]],[[64923,64923],\"mapped\",[1606,1605,1609]],[[64924,64925],\"mapped\",[1610,1605,1605]],[[64926,64926],\"mapped\",[1576,1582,1610]],[[64927,64927],\"mapped\",[1578,1580,1610]],[[64928,64928],\"mapped\",[1578,1580,1609]],[[64929,64929],\"mapped\",[1578,1582,1610]],[[64930,64930],\"mapped\",[1578,1582,1609]],[[64931,64931],\"mapped\",[1578,1605,1610]],[[64932,64932],\"mapped\",[1578,1605,1609]],[[64933,64933],\"mapped\",[1580,1605,1610]],[[64934,64934],\"mapped\",[1580,1581,1609]],[[64935,64935],\"mapped\",[1580,1605,1609]],[[64936,64936],\"mapped\",[1587,1582,1609]],[[64937,64937],\"mapped\",[1589,1581,1610]],[[64938,64938],\"mapped\",[1588,1581,1610]],[[64939,64939],\"mapped\",[1590,1581,1610]],[[64940,64940],\"mapped\",[1604,1580,1610]],[[64941,64941],\"mapped\",[1604,1605,1610]],[[64942,64942],\"mapped\",[1610,1581,1610]],[[64943,64943],\"mapped\",[1610,1580,1610]],[[64944,64944],\"mapped\",[1610,1605,1610]],[[64945,64945],\"mapped\",[1605,1605,1610]],[[64946,64946],\"mapped\",[1602,1605,1610]],[[64947,64947],\"mapped\",[1606,1581,1610]],[[64948,64948],\"mapped\",[1602,1605,1581]],[[64949,64949],\"mapped\",[1604,1581,1605]],[[64950,64950],\"mapped\",[1593,1605,1610]],[[64951,64951],\"mapped\",[1603,1605,1610]],[[64952,64952],\"mapped\",[1606,1580,1581]],[[64953,64953],\"mapped\",[1605,1582,1610]],[[64954,64954],\"mapped\",[1604,1580,1605]],[[64955,64955],\"mapped\",[1603,1605,1605]],[[64956,64956],\"mapped\",[1604,1580,1605]],[[64957,64957],\"mapped\",[1606,1580,1581]],[[64958,64958],\"mapped\",[1580,1581,1610]],[[64959,64959],\"mapped\",[1581,1580,1610]],[[64960,64960],\"mapped\",[1605,1580,1610]],[[64961,64961],\"mapped\",[1601,1605,1610]],[[64962,64962],\"mapped\",[1576,1581,1610]],[[64963,64963],\"mapped\",[1603,1605,1605]],[[64964,64964],\"mapped\",[1593,1580,1605]],[[64965,64965],\"mapped\",[1589,1605,1605]],[[64966,64966],\"mapped\",[1587,1582,1610]],[[64967,64967],\"mapped\",[1606,1580,1610]],[[64968,64975],\"disallowed\"],[[64976,65007],\"disallowed\"],[[65008,65008],\"mapped\",[1589,1604,1746]],[[65009,65009],\"mapped\",[1602,1604,1746]],[[65010,65010],\"mapped\",[1575,1604,1604,1607]],[[65011,65011],\"mapped\",[1575,1603,1576,1585]],[[65012,65012],\"mapped\",[1605,1581,1605,1583]],[[65013,65013],\"mapped\",[1589,1604,1593,1605]],[[65014,65014],\"mapped\",[1585,1587,1608,1604]],[[65015,65015],\"mapped\",[1593,1604,1610,1607]],[[65016,65016],\"mapped\",[1608,1587,1604,1605]],[[65017,65017],\"mapped\",[1589,1604,1609]],[[65018,65018],\"disallowed_STD3_mapped\",[1589,1604,1609,32,1575,1604,1604,1607,32,1593,1604,1610,1607,32,1608,1587,1604,1605]],[[65019,65019],\"disallowed_STD3_mapped\",[1580,1604,32,1580,1604,1575,1604,1607]],[[65020,65020],\"mapped\",[1585,1740,1575,1604]],[[65021,65021],\"valid\",[],\"NV8\"],[[65022,65023],\"disallowed\"],[[65024,65039],\"ignored\"],[[65040,65040],\"disallowed_STD3_mapped\",[44]],[[65041,65041],\"mapped\",[12289]],[[65042,65042],\"disallowed\"],[[65043,65043],\"disallowed_STD3_mapped\",[58]],[[65044,65044],\"disallowed_STD3_mapped\",[59]],[[65045,65045],\"disallowed_STD3_mapped\",[33]],[[65046,65046],\"disallowed_STD3_mapped\",[63]],[[65047,65047],\"mapped\",[12310]],[[65048,65048],\"mapped\",[12311]],[[65049,65049],\"disallowed\"],[[65050,65055],\"disallowed\"],[[65056,65059],\"valid\"],[[65060,65062],\"valid\"],[[65063,65069],\"valid\"],[[65070,65071],\"valid\"],[[65072,65072],\"disallowed\"],[[65073,65073],\"mapped\",[8212]],[[65074,65074],\"mapped\",[8211]],[[65075,65076],\"disallowed_STD3_mapped\",[95]],[[65077,65077],\"disallowed_STD3_mapped\",[40]],[[65078,65078],\"disallowed_STD3_mapped\",[41]],[[65079,65079],\"disallowed_STD3_mapped\",[123]],[[65080,65080],\"disallowed_STD3_mapped\",[125]],[[65081,65081],\"mapped\",[12308]],[[65082,65082],\"mapped\",[12309]],[[65083,65083],\"mapped\",[12304]],[[65084,65084],\"mapped\",[12305]],[[65085,65085],\"mapped\",[12298]],[[65086,65086],\"mapped\",[12299]],[[65087,65087],\"mapped\",[12296]],[[65088,65088],\"mapped\",[12297]],[[65089,65089],\"mapped\",[12300]],[[65090,65090],\"mapped\",[12301]],[[65091,65091],\"mapped\",[12302]],[[65092,65092],\"mapped\",[12303]],[[65093,65094],\"valid\",[],\"NV8\"],[[65095,65095],\"disallowed_STD3_mapped\",[91]],[[65096,65096],\"disallowed_STD3_mapped\",[93]],[[65097,65100],\"disallowed_STD3_mapped\",[32,773]],[[65101,65103],\"disallowed_STD3_mapped\",[95]],[[65104,65104],\"disallowed_STD3_mapped\",[44]],[[65105,65105],\"mapped\",[12289]],[[65106,65106],\"disallowed\"],[[65107,65107],\"disallowed\"],[[65108,65108],\"disallowed_STD3_mapped\",[59]],[[65109,65109],\"disallowed_STD3_mapped\",[58]],[[65110,65110],\"disallowed_STD3_mapped\",[63]],[[65111,65111],\"disallowed_STD3_mapped\",[33]],[[65112,65112],\"mapped\",[8212]],[[65113,65113],\"disallowed_STD3_mapped\",[40]],[[65114,65114],\"disallowed_STD3_mapped\",[41]],[[65115,65115],\"disallowed_STD3_mapped\",[123]],[[65116,65116],\"disallowed_STD3_mapped\",[125]],[[65117,65117],\"mapped\",[12308]],[[65118,65118],\"mapped\",[12309]],[[65119,65119],\"disallowed_STD3_mapped\",[35]],[[65120,65120],\"disallowed_STD3_mapped\",[38]],[[65121,65121],\"disallowed_STD3_mapped\",[42]],[[65122,65122],\"disallowed_STD3_mapped\",[43]],[[65123,65123],\"mapped\",[45]],[[65124,65124],\"disallowed_STD3_mapped\",[60]],[[65125,65125],\"disallowed_STD3_mapped\",[62]],[[65126,65126],\"disallowed_STD3_mapped\",[61]],[[65127,65127],\"disallowed\"],[[65128,65128],\"disallowed_STD3_mapped\",[92]],[[65129,65129],\"disallowed_STD3_mapped\",[36]],[[65130,65130],\"disallowed_STD3_mapped\",[37]],[[65131,65131],\"disallowed_STD3_mapped\",[64]],[[65132,65135],\"disallowed\"],[[65136,65136],\"disallowed_STD3_mapped\",[32,1611]],[[65137,65137],\"mapped\",[1600,1611]],[[65138,65138],\"disallowed_STD3_mapped\",[32,1612]],[[65139,65139],\"valid\"],[[65140,65140],\"disallowed_STD3_mapped\",[32,1613]],[[65141,65141],\"disallowed\"],[[65142,65142],\"disallowed_STD3_mapped\",[32,1614]],[[65143,65143],\"mapped\",[1600,1614]],[[65144,65144],\"disallowed_STD3_mapped\",[32,1615]],[[65145,65145],\"mapped\",[1600,1615]],[[65146,65146],\"disallowed_STD3_mapped\",[32,1616]],[[65147,65147],\"mapped\",[1600,1616]],[[65148,65148],\"disallowed_STD3_mapped\",[32,1617]],[[65149,65149],\"mapped\",[1600,1617]],[[65150,65150],\"disallowed_STD3_mapped\",[32,1618]],[[65151,65151],\"mapped\",[1600,1618]],[[65152,65152],\"mapped\",[1569]],[[65153,65154],\"mapped\",[1570]],[[65155,65156],\"mapped\",[1571]],[[65157,65158],\"mapped\",[1572]],[[65159,65160],\"mapped\",[1573]],[[65161,65164],\"mapped\",[1574]],[[65165,65166],\"mapped\",[1575]],[[65167,65170],\"mapped\",[1576]],[[65171,65172],\"mapped\",[1577]],[[65173,65176],\"mapped\",[1578]],[[65177,65180],\"mapped\",[1579]],[[65181,65184],\"mapped\",[1580]],[[65185,65188],\"mapped\",[1581]],[[65189,65192],\"mapped\",[1582]],[[65193,65194],\"mapped\",[1583]],[[65195,65196],\"mapped\",[1584]],[[65197,65198],\"mapped\",[1585]],[[65199,65200],\"mapped\",[1586]],[[65201,65204],\"mapped\",[1587]],[[65205,65208],\"mapped\",[1588]],[[65209,65212],\"mapped\",[1589]],[[65213,65216],\"mapped\",[1590]],[[65217,65220],\"mapped\",[1591]],[[65221,65224],\"mapped\",[1592]],[[65225,65228],\"mapped\",[1593]],[[65229,65232],\"mapped\",[1594]],[[65233,65236],\"mapped\",[1601]],[[65237,65240],\"mapped\",[1602]],[[65241,65244],\"mapped\",[1603]],[[65245,65248],\"mapped\",[1604]],[[65249,65252],\"mapped\",[1605]],[[65253,65256],\"mapped\",[1606]],[[65257,65260],\"mapped\",[1607]],[[65261,65262],\"mapped\",[1608]],[[65263,65264],\"mapped\",[1609]],[[65265,65268],\"mapped\",[1610]],[[65269,65270],\"mapped\",[1604,1570]],[[65271,65272],\"mapped\",[1604,1571]],[[65273,65274],\"mapped\",[1604,1573]],[[65275,65276],\"mapped\",[1604,1575]],[[65277,65278],\"disallowed\"],[[65279,65279],\"ignored\"],[[65280,65280],\"disallowed\"],[[65281,65281],\"disallowed_STD3_mapped\",[33]],[[65282,65282],\"disallowed_STD3_mapped\",[34]],[[65283,65283],\"disallowed_STD3_mapped\",[35]],[[65284,65284],\"disallowed_STD3_mapped\",[36]],[[65285,65285],\"disallowed_STD3_mapped\",[37]],[[65286,65286],\"disallowed_STD3_mapped\",[38]],[[65287,65287],\"disallowed_STD3_mapped\",[39]],[[65288,65288],\"disallowed_STD3_mapped\",[40]],[[65289,65289],\"disallowed_STD3_mapped\",[41]],[[65290,65290],\"disallowed_STD3_mapped\",[42]],[[65291,65291],\"disallowed_STD3_mapped\",[43]],[[65292,65292],\"disallowed_STD3_mapped\",[44]],[[65293,65293],\"mapped\",[45]],[[65294,65294],\"mapped\",[46]],[[65295,65295],\"disallowed_STD3_mapped\",[47]],[[65296,65296],\"mapped\",[48]],[[65297,65297],\"mapped\",[49]],[[65298,65298],\"mapped\",[50]],[[65299,65299],\"mapped\",[51]],[[65300,65300],\"mapped\",[52]],[[65301,65301],\"mapped\",[53]],[[65302,65302],\"mapped\",[54]],[[65303,65303],\"mapped\",[55]],[[65304,65304],\"mapped\",[56]],[[65305,65305],\"mapped\",[57]],[[65306,65306],\"disallowed_STD3_mapped\",[58]],[[65307,65307],\"disallowed_STD3_mapped\",[59]],[[65308,65308],\"disallowed_STD3_mapped\",[60]],[[65309,65309],\"disallowed_STD3_mapped\",[61]],[[65310,65310],\"disallowed_STD3_mapped\",[62]],[[65311,65311],\"disallowed_STD3_mapped\",[63]],[[65312,65312],\"disallowed_STD3_mapped\",[64]],[[65313,65313],\"mapped\",[97]],[[65314,65314],\"mapped\",[98]],[[65315,65315],\"mapped\",[99]],[[65316,65316],\"mapped\",[100]],[[65317,65317],\"mapped\",[101]],[[65318,65318],\"mapped\",[102]],[[65319,65319],\"mapped\",[103]],[[65320,65320],\"mapped\",[104]],[[65321,65321],\"mapped\",[105]],[[65322,65322],\"mapped\",[106]],[[65323,65323],\"mapped\",[107]],[[65324,65324],\"mapped\",[108]],[[65325,65325],\"mapped\",[109]],[[65326,65326],\"mapped\",[110]],[[65327,65327],\"mapped\",[111]],[[65328,65328],\"mapped\",[112]],[[65329,65329],\"mapped\",[113]],[[65330,65330],\"mapped\",[114]],[[65331,65331],\"mapped\",[115]],[[65332,65332],\"mapped\",[116]],[[65333,65333],\"mapped\",[117]],[[65334,65334],\"mapped\",[118]],[[65335,65335],\"mapped\",[119]],[[65336,65336],\"mapped\",[120]],[[65337,65337],\"mapped\",[121]],[[65338,65338],\"mapped\",[122]],[[65339,65339],\"disallowed_STD3_mapped\",[91]],[[65340,65340],\"disallowed_STD3_mapped\",[92]],[[65341,65341],\"disallowed_STD3_mapped\",[93]],[[65342,65342],\"disallowed_STD3_mapped\",[94]],[[65343,65343],\"disallowed_STD3_mapped\",[95]],[[65344,65344],\"disallowed_STD3_mapped\",[96]],[[65345,65345],\"mapped\",[97]],[[65346,65346],\"mapped\",[98]],[[65347,65347],\"mapped\",[99]],[[65348,65348],\"mapped\",[100]],[[65349,65349],\"mapped\",[101]],[[65350,65350],\"mapped\",[102]],[[65351,65351],\"mapped\",[103]],[[65352,65352],\"mapped\",[104]],[[65353,65353],\"mapped\",[105]],[[65354,65354],\"mapped\",[106]],[[65355,65355],\"mapped\",[107]],[[65356,65356],\"mapped\",[108]],[[65357,65357],\"mapped\",[109]],[[65358,65358],\"mapped\",[110]],[[65359,65359],\"mapped\",[111]],[[65360,65360],\"mapped\",[112]],[[65361,65361],\"mapped\",[113]],[[65362,65362],\"mapped\",[114]],[[65363,65363],\"mapped\",[115]],[[65364,65364],\"mapped\",[116]],[[65365,65365],\"mapped\",[117]],[[65366,65366],\"mapped\",[118]],[[65367,65367],\"mapped\",[119]],[[65368,65368],\"mapped\",[120]],[[65369,65369],\"mapped\",[121]],[[65370,65370],\"mapped\",[122]],[[65371,65371],\"disallowed_STD3_mapped\",[123]],[[65372,65372],\"disallowed_STD3_mapped\",[124]],[[65373,65373],\"disallowed_STD3_mapped\",[125]],[[65374,65374],\"disallowed_STD3_mapped\",[126]],[[65375,65375],\"mapped\",[10629]],[[65376,65376],\"mapped\",[10630]],[[65377,65377],\"mapped\",[46]],[[65378,65378],\"mapped\",[12300]],[[65379,65379],\"mapped\",[12301]],[[65380,65380],\"mapped\",[12289]],[[65381,65381],\"mapped\",[12539]],[[65382,65382],\"mapped\",[12530]],[[65383,65383],\"mapped\",[12449]],[[65384,65384],\"mapped\",[12451]],[[65385,65385],\"mapped\",[12453]],[[65386,65386],\"mapped\",[12455]],[[65387,65387],\"mapped\",[12457]],[[65388,65388],\"mapped\",[12515]],[[65389,65389],\"mapped\",[12517]],[[65390,65390],\"mapped\",[12519]],[[65391,65391],\"mapped\",[12483]],[[65392,65392],\"mapped\",[12540]],[[65393,65393],\"mapped\",[12450]],[[65394,65394],\"mapped\",[12452]],[[65395,65395],\"mapped\",[12454]],[[65396,65396],\"mapped\",[12456]],[[65397,65397],\"mapped\",[12458]],[[65398,65398],\"mapped\",[12459]],[[65399,65399],\"mapped\",[12461]],[[65400,65400],\"mapped\",[12463]],[[65401,65401],\"mapped\",[12465]],[[65402,65402],\"mapped\",[12467]],[[65403,65403],\"mapped\",[12469]],[[65404,65404],\"mapped\",[12471]],[[65405,65405],\"mapped\",[12473]],[[65406,65406],\"mapped\",[12475]],[[65407,65407],\"mapped\",[12477]],[[65408,65408],\"mapped\",[12479]],[[65409,65409],\"mapped\",[12481]],[[65410,65410],\"mapped\",[12484]],[[65411,65411],\"mapped\",[12486]],[[65412,65412],\"mapped\",[12488]],[[65413,65413],\"mapped\",[12490]],[[65414,65414],\"mapped\",[12491]],[[65415,65415],\"mapped\",[12492]],[[65416,65416],\"mapped\",[12493]],[[65417,65417],\"mapped\",[12494]],[[65418,65418],\"mapped\",[12495]],[[65419,65419],\"mapped\",[12498]],[[65420,65420],\"mapped\",[12501]],[[65421,65421],\"mapped\",[12504]],[[65422,65422],\"mapped\",[12507]],[[65423,65423],\"mapped\",[12510]],[[65424,65424],\"mapped\",[12511]],[[65425,65425],\"mapped\",[12512]],[[65426,65426],\"mapped\",[12513]],[[65427,65427],\"mapped\",[12514]],[[65428,65428],\"mapped\",[12516]],[[65429,65429],\"mapped\",[12518]],[[65430,65430],\"mapped\",[12520]],[[65431,65431],\"mapped\",[12521]],[[65432,65432],\"mapped\",[12522]],[[65433,65433],\"mapped\",[12523]],[[65434,65434],\"mapped\",[12524]],[[65435,65435],\"mapped\",[12525]],[[65436,65436],\"mapped\",[12527]],[[65437,65437],\"mapped\",[12531]],[[65438,65438],\"mapped\",[12441]],[[65439,65439],\"mapped\",[12442]],[[65440,65440],\"disallowed\"],[[65441,65441],\"mapped\",[4352]],[[65442,65442],\"mapped\",[4353]],[[65443,65443],\"mapped\",[4522]],[[65444,65444],\"mapped\",[4354]],[[65445,65445],\"mapped\",[4524]],[[65446,65446],\"mapped\",[4525]],[[65447,65447],\"mapped\",[4355]],[[65448,65448],\"mapped\",[4356]],[[65449,65449],\"mapped\",[4357]],[[65450,65450],\"mapped\",[4528]],[[65451,65451],\"mapped\",[4529]],[[65452,65452],\"mapped\",[4530]],[[65453,65453],\"mapped\",[4531]],[[65454,65454],\"mapped\",[4532]],[[65455,65455],\"mapped\",[4533]],[[65456,65456],\"mapped\",[4378]],[[65457,65457],\"mapped\",[4358]],[[65458,65458],\"mapped\",[4359]],[[65459,65459],\"mapped\",[4360]],[[65460,65460],\"mapped\",[4385]],[[65461,65461],\"mapped\",[4361]],[[65462,65462],\"mapped\",[4362]],[[65463,65463],\"mapped\",[4363]],[[65464,65464],\"mapped\",[4364]],[[65465,65465],\"mapped\",[4365]],[[65466,65466],\"mapped\",[4366]],[[65467,65467],\"mapped\",[4367]],[[65468,65468],\"mapped\",[4368]],[[65469,65469],\"mapped\",[4369]],[[65470,65470],\"mapped\",[4370]],[[65471,65473],\"disallowed\"],[[65474,65474],\"mapped\",[4449]],[[65475,65475],\"mapped\",[4450]],[[65476,65476],\"mapped\",[4451]],[[65477,65477],\"mapped\",[4452]],[[65478,65478],\"mapped\",[4453]],[[65479,65479],\"mapped\",[4454]],[[65480,65481],\"disallowed\"],[[65482,65482],\"mapped\",[4455]],[[65483,65483],\"mapped\",[4456]],[[65484,65484],\"mapped\",[4457]],[[65485,65485],\"mapped\",[4458]],[[65486,65486],\"mapped\",[4459]],[[65487,65487],\"mapped\",[4460]],[[65488,65489],\"disallowed\"],[[65490,65490],\"mapped\",[4461]],[[65491,65491],\"mapped\",[4462]],[[65492,65492],\"mapped\",[4463]],[[65493,65493],\"mapped\",[4464]],[[65494,65494],\"mapped\",[4465]],[[65495,65495],\"mapped\",[4466]],[[65496,65497],\"disallowed\"],[[65498,65498],\"mapped\",[4467]],[[65499,65499],\"mapped\",[4468]],[[65500,65500],\"mapped\",[4469]],[[65501,65503],\"disallowed\"],[[65504,65504],\"mapped\",[162]],[[65505,65505],\"mapped\",[163]],[[65506,65506],\"mapped\",[172]],[[65507,65507],\"disallowed_STD3_mapped\",[32,772]],[[65508,65508],\"mapped\",[166]],[[65509,65509],\"mapped\",[165]],[[65510,65510],\"mapped\",[8361]],[[65511,65511],\"disallowed\"],[[65512,65512],\"mapped\",[9474]],[[65513,65513],\"mapped\",[8592]],[[65514,65514],\"mapped\",[8593]],[[65515,65515],\"mapped\",[8594]],[[65516,65516],\"mapped\",[8595]],[[65517,65517],\"mapped\",[9632]],[[65518,65518],\"mapped\",[9675]],[[65519,65528],\"disallowed\"],[[65529,65531],\"disallowed\"],[[65532,65532],\"disallowed\"],[[65533,65533],\"disallowed\"],[[65534,65535],\"disallowed\"],[[65536,65547],\"valid\"],[[65548,65548],\"disallowed\"],[[65549,65574],\"valid\"],[[65575,65575],\"disallowed\"],[[65576,65594],\"valid\"],[[65595,65595],\"disallowed\"],[[65596,65597],\"valid\"],[[65598,65598],\"disallowed\"],[[65599,65613],\"valid\"],[[65614,65615],\"disallowed\"],[[65616,65629],\"valid\"],[[65630,65663],\"disallowed\"],[[65664,65786],\"valid\"],[[65787,65791],\"disallowed\"],[[65792,65794],\"valid\",[],\"NV8\"],[[65795,65798],\"disallowed\"],[[65799,65843],\"valid\",[],\"NV8\"],[[65844,65846],\"disallowed\"],[[65847,65855],\"valid\",[],\"NV8\"],[[65856,65930],\"valid\",[],\"NV8\"],[[65931,65932],\"valid\",[],\"NV8\"],[[65933,65935],\"disallowed\"],[[65936,65947],\"valid\",[],\"NV8\"],[[65948,65951],\"disallowed\"],[[65952,65952],\"valid\",[],\"NV8\"],[[65953,65999],\"disallowed\"],[[66000,66044],\"valid\",[],\"NV8\"],[[66045,66045],\"valid\"],[[66046,66175],\"disallowed\"],[[66176,66204],\"valid\"],[[66205,66207],\"disallowed\"],[[66208,66256],\"valid\"],[[66257,66271],\"disallowed\"],[[66272,66272],\"valid\"],[[66273,66299],\"valid\",[],\"NV8\"],[[66300,66303],\"disallowed\"],[[66304,66334],\"valid\"],[[66335,66335],\"valid\"],[[66336,66339],\"valid\",[],\"NV8\"],[[66340,66351],\"disallowed\"],[[66352,66368],\"valid\"],[[66369,66369],\"valid\",[],\"NV8\"],[[66370,66377],\"valid\"],[[66378,66378],\"valid\",[],\"NV8\"],[[66379,66383],\"disallowed\"],[[66384,66426],\"valid\"],[[66427,66431],\"disallowed\"],[[66432,66461],\"valid\"],[[66462,66462],\"disallowed\"],[[66463,66463],\"valid\",[],\"NV8\"],[[66464,66499],\"valid\"],[[66500,66503],\"disallowed\"],[[66504,66511],\"valid\"],[[66512,66517],\"valid\",[],\"NV8\"],[[66518,66559],\"disallowed\"],[[66560,66560],\"mapped\",[66600]],[[66561,66561],\"mapped\",[66601]],[[66562,66562],\"mapped\",[66602]],[[66563,66563],\"mapped\",[66603]],[[66564,66564],\"mapped\",[66604]],[[66565,66565],\"mapped\",[66605]],[[66566,66566],\"mapped\",[66606]],[[66567,66567],\"mapped\",[66607]],[[66568,66568],\"mapped\",[66608]],[[66569,66569],\"mapped\",[66609]],[[66570,66570],\"mapped\",[66610]],[[66571,66571],\"mapped\",[66611]],[[66572,66572],\"mapped\",[66612]],[[66573,66573],\"mapped\",[66613]],[[66574,66574],\"mapped\",[66614]],[[66575,66575],\"mapped\",[66615]],[[66576,66576],\"mapped\",[66616]],[[66577,66577],\"mapped\",[66617]],[[66578,66578],\"mapped\",[66618]],[[66579,66579],\"mapped\",[66619]],[[66580,66580],\"mapped\",[66620]],[[66581,66581],\"mapped\",[66621]],[[66582,66582],\"mapped\",[66622]],[[66583,66583],\"mapped\",[66623]],[[66584,66584],\"mapped\",[66624]],[[66585,66585],\"mapped\",[66625]],[[66586,66586],\"mapped\",[66626]],[[66587,66587],\"mapped\",[66627]],[[66588,66588],\"mapped\",[66628]],[[66589,66589],\"mapped\",[66629]],[[66590,66590],\"mapped\",[66630]],[[66591,66591],\"mapped\",[66631]],[[66592,66592],\"mapped\",[66632]],[[66593,66593],\"mapped\",[66633]],[[66594,66594],\"mapped\",[66634]],[[66595,66595],\"mapped\",[66635]],[[66596,66596],\"mapped\",[66636]],[[66597,66597],\"mapped\",[66637]],[[66598,66598],\"mapped\",[66638]],[[66599,66599],\"mapped\",[66639]],[[66600,66637],\"valid\"],[[66638,66717],\"valid\"],[[66718,66719],\"disallowed\"],[[66720,66729],\"valid\"],[[66730,66815],\"disallowed\"],[[66816,66855],\"valid\"],[[66856,66863],\"disallowed\"],[[66864,66915],\"valid\"],[[66916,66926],\"disallowed\"],[[66927,66927],\"valid\",[],\"NV8\"],[[66928,67071],\"disallowed\"],[[67072,67382],\"valid\"],[[67383,67391],\"disallowed\"],[[67392,67413],\"valid\"],[[67414,67423],\"disallowed\"],[[67424,67431],\"valid\"],[[67432,67583],\"disallowed\"],[[67584,67589],\"valid\"],[[67590,67591],\"disallowed\"],[[67592,67592],\"valid\"],[[67593,67593],\"disallowed\"],[[67594,67637],\"valid\"],[[67638,67638],\"disallowed\"],[[67639,67640],\"valid\"],[[67641,67643],\"disallowed\"],[[67644,67644],\"valid\"],[[67645,67646],\"disallowed\"],[[67647,67647],\"valid\"],[[67648,67669],\"valid\"],[[67670,67670],\"disallowed\"],[[67671,67679],\"valid\",[],\"NV8\"],[[67680,67702],\"valid\"],[[67703,67711],\"valid\",[],\"NV8\"],[[67712,67742],\"valid\"],[[67743,67750],\"disallowed\"],[[67751,67759],\"valid\",[],\"NV8\"],[[67760,67807],\"disallowed\"],[[67808,67826],\"valid\"],[[67827,67827],\"disallowed\"],[[67828,67829],\"valid\"],[[67830,67834],\"disallowed\"],[[67835,67839],\"valid\",[],\"NV8\"],[[67840,67861],\"valid\"],[[67862,67865],\"valid\",[],\"NV8\"],[[67866,67867],\"valid\",[],\"NV8\"],[[67868,67870],\"disallowed\"],[[67871,67871],\"valid\",[],\"NV8\"],[[67872,67897],\"valid\"],[[67898,67902],\"disallowed\"],[[67903,67903],\"valid\",[],\"NV8\"],[[67904,67967],\"disallowed\"],[[67968,68023],\"valid\"],[[68024,68027],\"disallowed\"],[[68028,68029],\"valid\",[],\"NV8\"],[[68030,68031],\"valid\"],[[68032,68047],\"valid\",[],\"NV8\"],[[68048,68049],\"disallowed\"],[[68050,68095],\"valid\",[],\"NV8\"],[[68096,68099],\"valid\"],[[68100,68100],\"disallowed\"],[[68101,68102],\"valid\"],[[68103,68107],\"disallowed\"],[[68108,68115],\"valid\"],[[68116,68116],\"disallowed\"],[[68117,68119],\"valid\"],[[68120,68120],\"disallowed\"],[[68121,68147],\"valid\"],[[68148,68151],\"disallowed\"],[[68152,68154],\"valid\"],[[68155,68158],\"disallowed\"],[[68159,68159],\"valid\"],[[68160,68167],\"valid\",[],\"NV8\"],[[68168,68175],\"disallowed\"],[[68176,68184],\"valid\",[],\"NV8\"],[[68185,68191],\"disallowed\"],[[68192,68220],\"valid\"],[[68221,68223],\"valid\",[],\"NV8\"],[[68224,68252],\"valid\"],[[68253,68255],\"valid\",[],\"NV8\"],[[68256,68287],\"disallowed\"],[[68288,68295],\"valid\"],[[68296,68296],\"valid\",[],\"NV8\"],[[68297,68326],\"valid\"],[[68327,68330],\"disallowed\"],[[68331,68342],\"valid\",[],\"NV8\"],[[68343,68351],\"disallowed\"],[[68352,68405],\"valid\"],[[68406,68408],\"disallowed\"],[[68409,68415],\"valid\",[],\"NV8\"],[[68416,68437],\"valid\"],[[68438,68439],\"disallowed\"],[[68440,68447],\"valid\",[],\"NV8\"],[[68448,68466],\"valid\"],[[68467,68471],\"disallowed\"],[[68472,68479],\"valid\",[],\"NV8\"],[[68480,68497],\"valid\"],[[68498,68504],\"disallowed\"],[[68505,68508],\"valid\",[],\"NV8\"],[[68509,68520],\"disallowed\"],[[68521,68527],\"valid\",[],\"NV8\"],[[68528,68607],\"disallowed\"],[[68608,68680],\"valid\"],[[68681,68735],\"disallowed\"],[[68736,68736],\"mapped\",[68800]],[[68737,68737],\"mapped\",[68801]],[[68738,68738],\"mapped\",[68802]],[[68739,68739],\"mapped\",[68803]],[[68740,68740],\"mapped\",[68804]],[[68741,68741],\"mapped\",[68805]],[[68742,68742],\"mapped\",[68806]],[[68743,68743],\"mapped\",[68807]],[[68744,68744],\"mapped\",[68808]],[[68745,68745],\"mapped\",[68809]],[[68746,68746],\"mapped\",[68810]],[[68747,68747],\"mapped\",[68811]],[[68748,68748],\"mapped\",[68812]],[[68749,68749],\"mapped\",[68813]],[[68750,68750],\"mapped\",[68814]],[[68751,68751],\"mapped\",[68815]],[[68752,68752],\"mapped\",[68816]],[[68753,68753],\"mapped\",[68817]],[[68754,68754],\"mapped\",[68818]],[[68755,68755],\"mapped\",[68819]],[[68756,68756],\"mapped\",[68820]],[[68757,68757],\"mapped\",[68821]],[[68758,68758],\"mapped\",[68822]],[[68759,68759],\"mapped\",[68823]],[[68760,68760],\"mapped\",[68824]],[[68761,68761],\"mapped\",[68825]],[[68762,68762],\"mapped\",[68826]],[[68763,68763],\"mapped\",[68827]],[[68764,68764],\"mapped\",[68828]],[[68765,68765],\"mapped\",[68829]],[[68766,68766],\"mapped\",[68830]],[[68767,68767],\"mapped\",[68831]],[[68768,68768],\"mapped\",[68832]],[[68769,68769],\"mapped\",[68833]],[[68770,68770],\"mapped\",[68834]],[[68771,68771],\"mapped\",[68835]],[[68772,68772],\"mapped\",[68836]],[[68773,68773],\"mapped\",[68837]],[[68774,68774],\"mapped\",[68838]],[[68775,68775],\"mapped\",[68839]],[[68776,68776],\"mapped\",[68840]],[[68777,68777],\"mapped\",[68841]],[[68778,68778],\"mapped\",[68842]],[[68779,68779],\"mapped\",[68843]],[[68780,68780],\"mapped\",[68844]],[[68781,68781],\"mapped\",[68845]],[[68782,68782],\"mapped\",[68846]],[[68783,68783],\"mapped\",[68847]],[[68784,68784],\"mapped\",[68848]],[[68785,68785],\"mapped\",[68849]],[[68786,68786],\"mapped\",[68850]],[[68787,68799],\"disallowed\"],[[68800,68850],\"valid\"],[[68851,68857],\"disallowed\"],[[68858,68863],\"valid\",[],\"NV8\"],[[68864,69215],\"disallowed\"],[[69216,69246],\"valid\",[],\"NV8\"],[[69247,69631],\"disallowed\"],[[69632,69702],\"valid\"],[[69703,69709],\"valid\",[],\"NV8\"],[[69710,69713],\"disallowed\"],[[69714,69733],\"valid\",[],\"NV8\"],[[69734,69743],\"valid\"],[[69744,69758],\"disallowed\"],[[69759,69759],\"valid\"],[[69760,69818],\"valid\"],[[69819,69820],\"valid\",[],\"NV8\"],[[69821,69821],\"disallowed\"],[[69822,69825],\"valid\",[],\"NV8\"],[[69826,69839],\"disallowed\"],[[69840,69864],\"valid\"],[[69865,69871],\"disallowed\"],[[69872,69881],\"valid\"],[[69882,69887],\"disallowed\"],[[69888,69940],\"valid\"],[[69941,69941],\"disallowed\"],[[69942,69951],\"valid\"],[[69952,69955],\"valid\",[],\"NV8\"],[[69956,69967],\"disallowed\"],[[69968,70003],\"valid\"],[[70004,70005],\"valid\",[],\"NV8\"],[[70006,70006],\"valid\"],[[70007,70015],\"disallowed\"],[[70016,70084],\"valid\"],[[70085,70088],\"valid\",[],\"NV8\"],[[70089,70089],\"valid\",[],\"NV8\"],[[70090,70092],\"valid\"],[[70093,70093],\"valid\",[],\"NV8\"],[[70094,70095],\"disallowed\"],[[70096,70105],\"valid\"],[[70106,70106],\"valid\"],[[70107,70107],\"valid\",[],\"NV8\"],[[70108,70108],\"valid\"],[[70109,70111],\"valid\",[],\"NV8\"],[[70112,70112],\"disallowed\"],[[70113,70132],\"valid\",[],\"NV8\"],[[70133,70143],\"disallowed\"],[[70144,70161],\"valid\"],[[70162,70162],\"disallowed\"],[[70163,70199],\"valid\"],[[70200,70205],\"valid\",[],\"NV8\"],[[70206,70271],\"disallowed\"],[[70272,70278],\"valid\"],[[70279,70279],\"disallowed\"],[[70280,70280],\"valid\"],[[70281,70281],\"disallowed\"],[[70282,70285],\"valid\"],[[70286,70286],\"disallowed\"],[[70287,70301],\"valid\"],[[70302,70302],\"disallowed\"],[[70303,70312],\"valid\"],[[70313,70313],\"valid\",[],\"NV8\"],[[70314,70319],\"disallowed\"],[[70320,70378],\"valid\"],[[70379,70383],\"disallowed\"],[[70384,70393],\"valid\"],[[70394,70399],\"disallowed\"],[[70400,70400],\"valid\"],[[70401,70403],\"valid\"],[[70404,70404],\"disallowed\"],[[70405,70412],\"valid\"],[[70413,70414],\"disallowed\"],[[70415,70416],\"valid\"],[[70417,70418],\"disallowed\"],[[70419,70440],\"valid\"],[[70441,70441],\"disallowed\"],[[70442,70448],\"valid\"],[[70449,70449],\"disallowed\"],[[70450,70451],\"valid\"],[[70452,70452],\"disallowed\"],[[70453,70457],\"valid\"],[[70458,70459],\"disallowed\"],[[70460,70468],\"valid\"],[[70469,70470],\"disallowed\"],[[70471,70472],\"valid\"],[[70473,70474],\"disallowed\"],[[70475,70477],\"valid\"],[[70478,70479],\"disallowed\"],[[70480,70480],\"valid\"],[[70481,70486],\"disallowed\"],[[70487,70487],\"valid\"],[[70488,70492],\"disallowed\"],[[70493,70499],\"valid\"],[[70500,70501],\"disallowed\"],[[70502,70508],\"valid\"],[[70509,70511],\"disallowed\"],[[70512,70516],\"valid\"],[[70517,70783],\"disallowed\"],[[70784,70853],\"valid\"],[[70854,70854],\"valid\",[],\"NV8\"],[[70855,70855],\"valid\"],[[70856,70863],\"disallowed\"],[[70864,70873],\"valid\"],[[70874,71039],\"disallowed\"],[[71040,71093],\"valid\"],[[71094,71095],\"disallowed\"],[[71096,71104],\"valid\"],[[71105,71113],\"valid\",[],\"NV8\"],[[71114,71127],\"valid\",[],\"NV8\"],[[71128,71133],\"valid\"],[[71134,71167],\"disallowed\"],[[71168,71232],\"valid\"],[[71233,71235],\"valid\",[],\"NV8\"],[[71236,71236],\"valid\"],[[71237,71247],\"disallowed\"],[[71248,71257],\"valid\"],[[71258,71295],\"disallowed\"],[[71296,71351],\"valid\"],[[71352,71359],\"disallowed\"],[[71360,71369],\"valid\"],[[71370,71423],\"disallowed\"],[[71424,71449],\"valid\"],[[71450,71452],\"disallowed\"],[[71453,71467],\"valid\"],[[71468,71471],\"disallowed\"],[[71472,71481],\"valid\"],[[71482,71487],\"valid\",[],\"NV8\"],[[71488,71839],\"disallowed\"],[[71840,71840],\"mapped\",[71872]],[[71841,71841],\"mapped\",[71873]],[[71842,71842],\"mapped\",[71874]],[[71843,71843],\"mapped\",[71875]],[[71844,71844],\"mapped\",[71876]],[[71845,71845],\"mapped\",[71877]],[[71846,71846],\"mapped\",[71878]],[[71847,71847],\"mapped\",[71879]],[[71848,71848],\"mapped\",[71880]],[[71849,71849],\"mapped\",[71881]],[[71850,71850],\"mapped\",[71882]],[[71851,71851],\"mapped\",[71883]],[[71852,71852],\"mapped\",[71884]],[[71853,71853],\"mapped\",[71885]],[[71854,71854],\"mapped\",[71886]],[[71855,71855],\"mapped\",[71887]],[[71856,71856],\"mapped\",[71888]],[[71857,71857],\"mapped\",[71889]],[[71858,71858],\"mapped\",[71890]],[[71859,71859],\"mapped\",[71891]],[[71860,71860],\"mapped\",[71892]],[[71861,71861],\"mapped\",[71893]],[[71862,71862],\"mapped\",[71894]],[[71863,71863],\"mapped\",[71895]],[[71864,71864],\"mapped\",[71896]],[[71865,71865],\"mapped\",[71897]],[[71866,71866],\"mapped\",[71898]],[[71867,71867],\"mapped\",[71899]],[[71868,71868],\"mapped\",[71900]],[[71869,71869],\"mapped\",[71901]],[[71870,71870],\"mapped\",[71902]],[[71871,71871],\"mapped\",[71903]],[[71872,71913],\"valid\"],[[71914,71922],\"valid\",[],\"NV8\"],[[71923,71934],\"disallowed\"],[[71935,71935],\"valid\"],[[71936,72383],\"disallowed\"],[[72384,72440],\"valid\"],[[72441,73727],\"disallowed\"],[[73728,74606],\"valid\"],[[74607,74648],\"valid\"],[[74649,74649],\"valid\"],[[74650,74751],\"disallowed\"],[[74752,74850],\"valid\",[],\"NV8\"],[[74851,74862],\"valid\",[],\"NV8\"],[[74863,74863],\"disallowed\"],[[74864,74867],\"valid\",[],\"NV8\"],[[74868,74868],\"valid\",[],\"NV8\"],[[74869,74879],\"disallowed\"],[[74880,75075],\"valid\"],[[75076,77823],\"disallowed\"],[[77824,78894],\"valid\"],[[78895,82943],\"disallowed\"],[[82944,83526],\"valid\"],[[83527,92159],\"disallowed\"],[[92160,92728],\"valid\"],[[92729,92735],\"disallowed\"],[[92736,92766],\"valid\"],[[92767,92767],\"disallowed\"],[[92768,92777],\"valid\"],[[92778,92781],\"disallowed\"],[[92782,92783],\"valid\",[],\"NV8\"],[[92784,92879],\"disallowed\"],[[92880,92909],\"valid\"],[[92910,92911],\"disallowed\"],[[92912,92916],\"valid\"],[[92917,92917],\"valid\",[],\"NV8\"],[[92918,92927],\"disallowed\"],[[92928,92982],\"valid\"],[[92983,92991],\"valid\",[],\"NV8\"],[[92992,92995],\"valid\"],[[92996,92997],\"valid\",[],\"NV8\"],[[92998,93007],\"disallowed\"],[[93008,93017],\"valid\"],[[93018,93018],\"disallowed\"],[[93019,93025],\"valid\",[],\"NV8\"],[[93026,93026],\"disallowed\"],[[93027,93047],\"valid\"],[[93048,93052],\"disallowed\"],[[93053,93071],\"valid\"],[[93072,93951],\"disallowed\"],[[93952,94020],\"valid\"],[[94021,94031],\"disallowed\"],[[94032,94078],\"valid\"],[[94079,94094],\"disallowed\"],[[94095,94111],\"valid\"],[[94112,110591],\"disallowed\"],[[110592,110593],\"valid\"],[[110594,113663],\"disallowed\"],[[113664,113770],\"valid\"],[[113771,113775],\"disallowed\"],[[113776,113788],\"valid\"],[[113789,113791],\"disallowed\"],[[113792,113800],\"valid\"],[[113801,113807],\"disallowed\"],[[113808,113817],\"valid\"],[[113818,113819],\"disallowed\"],[[113820,113820],\"valid\",[],\"NV8\"],[[113821,113822],\"valid\"],[[113823,113823],\"valid\",[],\"NV8\"],[[113824,113827],\"ignored\"],[[113828,118783],\"disallowed\"],[[118784,119029],\"valid\",[],\"NV8\"],[[119030,119039],\"disallowed\"],[[119040,119078],\"valid\",[],\"NV8\"],[[119079,119080],\"disallowed\"],[[119081,119081],\"valid\",[],\"NV8\"],[[119082,119133],\"valid\",[],\"NV8\"],[[119134,119134],\"mapped\",[119127,119141]],[[119135,119135],\"mapped\",[119128,119141]],[[119136,119136],\"mapped\",[119128,119141,119150]],[[119137,119137],\"mapped\",[119128,119141,119151]],[[119138,119138],\"mapped\",[119128,119141,119152]],[[119139,119139],\"mapped\",[119128,119141,119153]],[[119140,119140],\"mapped\",[119128,119141,119154]],[[119141,119154],\"valid\",[],\"NV8\"],[[119155,119162],\"disallowed\"],[[119163,119226],\"valid\",[],\"NV8\"],[[119227,119227],\"mapped\",[119225,119141]],[[119228,119228],\"mapped\",[119226,119141]],[[119229,119229],\"mapped\",[119225,119141,119150]],[[119230,119230],\"mapped\",[119226,119141,119150]],[[119231,119231],\"mapped\",[119225,119141,119151]],[[119232,119232],\"mapped\",[119226,119141,119151]],[[119233,119261],\"valid\",[],\"NV8\"],[[119262,119272],\"valid\",[],\"NV8\"],[[119273,119295],\"disallowed\"],[[119296,119365],\"valid\",[],\"NV8\"],[[119366,119551],\"disallowed\"],[[119552,119638],\"valid\",[],\"NV8\"],[[119639,119647],\"disallowed\"],[[119648,119665],\"valid\",[],\"NV8\"],[[119666,119807],\"disallowed\"],[[119808,119808],\"mapped\",[97]],[[119809,119809],\"mapped\",[98]],[[119810,119810],\"mapped\",[99]],[[119811,119811],\"mapped\",[100]],[[119812,119812],\"mapped\",[101]],[[119813,119813],\"mapped\",[102]],[[119814,119814],\"mapped\",[103]],[[119815,119815],\"mapped\",[104]],[[119816,119816],\"mapped\",[105]],[[119817,119817],\"mapped\",[106]],[[119818,119818],\"mapped\",[107]],[[119819,119819],\"mapped\",[108]],[[119820,119820],\"mapped\",[109]],[[119821,119821],\"mapped\",[110]],[[119822,119822],\"mapped\",[111]],[[119823,119823],\"mapped\",[112]],[[119824,119824],\"mapped\",[113]],[[119825,119825],\"mapped\",[114]],[[119826,119826],\"mapped\",[115]],[[119827,119827],\"mapped\",[116]],[[119828,119828],\"mapped\",[117]],[[119829,119829],\"mapped\",[118]],[[119830,119830],\"mapped\",[119]],[[119831,119831],\"mapped\",[120]],[[119832,119832],\"mapped\",[121]],[[119833,119833],\"mapped\",[122]],[[119834,119834],\"mapped\",[97]],[[119835,119835],\"mapped\",[98]],[[119836,119836],\"mapped\",[99]],[[119837,119837],\"mapped\",[100]],[[119838,119838],\"mapped\",[101]],[[119839,119839],\"mapped\",[102]],[[119840,119840],\"mapped\",[103]],[[119841,119841],\"mapped\",[104]],[[119842,119842],\"mapped\",[105]],[[119843,119843],\"mapped\",[106]],[[119844,119844],\"mapped\",[107]],[[119845,119845],\"mapped\",[108]],[[119846,119846],\"mapped\",[109]],[[119847,119847],\"mapped\",[110]],[[119848,119848],\"mapped\",[111]],[[119849,119849],\"mapped\",[112]],[[119850,119850],\"mapped\",[113]],[[119851,119851],\"mapped\",[114]],[[119852,119852],\"mapped\",[115]],[[119853,119853],\"mapped\",[116]],[[119854,119854],\"mapped\",[117]],[[119855,119855],\"mapped\",[118]],[[119856,119856],\"mapped\",[119]],[[119857,119857],\"mapped\",[120]],[[119858,119858],\"mapped\",[121]],[[119859,119859],\"mapped\",[122]],[[119860,119860],\"mapped\",[97]],[[119861,119861],\"mapped\",[98]],[[119862,119862],\"mapped\",[99]],[[119863,119863],\"mapped\",[100]],[[119864,119864],\"mapped\",[101]],[[119865,119865],\"mapped\",[102]],[[119866,119866],\"mapped\",[103]],[[119867,119867],\"mapped\",[104]],[[119868,119868],\"mapped\",[105]],[[119869,119869],\"mapped\",[106]],[[119870,119870],\"mapped\",[107]],[[119871,119871],\"mapped\",[108]],[[119872,119872],\"mapped\",[109]],[[119873,119873],\"mapped\",[110]],[[119874,119874],\"mapped\",[111]],[[119875,119875],\"mapped\",[112]],[[119876,119876],\"mapped\",[113]],[[119877,119877],\"mapped\",[114]],[[119878,119878],\"mapped\",[115]],[[119879,119879],\"mapped\",[116]],[[119880,119880],\"mapped\",[117]],[[119881,119881],\"mapped\",[118]],[[119882,119882],\"mapped\",[119]],[[119883,119883],\"mapped\",[120]],[[119884,119884],\"mapped\",[121]],[[119885,119885],\"mapped\",[122]],[[119886,119886],\"mapped\",[97]],[[119887,119887],\"mapped\",[98]],[[119888,119888],\"mapped\",[99]],[[119889,119889],\"mapped\",[100]],[[119890,119890],\"mapped\",[101]],[[119891,119891],\"mapped\",[102]],[[119892,119892],\"mapped\",[103]],[[119893,119893],\"disallowed\"],[[119894,119894],\"mapped\",[105]],[[119895,119895],\"mapped\",[106]],[[119896,119896],\"mapped\",[107]],[[119897,119897],\"mapped\",[108]],[[119898,119898],\"mapped\",[109]],[[119899,119899],\"mapped\",[110]],[[119900,119900],\"mapped\",[111]],[[119901,119901],\"mapped\",[112]],[[119902,119902],\"mapped\",[113]],[[119903,119903],\"mapped\",[114]],[[119904,119904],\"mapped\",[115]],[[119905,119905],\"mapped\",[116]],[[119906,119906],\"mapped\",[117]],[[119907,119907],\"mapped\",[118]],[[119908,119908],\"mapped\",[119]],[[119909,119909],\"mapped\",[120]],[[119910,119910],\"mapped\",[121]],[[119911,119911],\"mapped\",[122]],[[119912,119912],\"mapped\",[97]],[[119913,119913],\"mapped\",[98]],[[119914,119914],\"mapped\",[99]],[[119915,119915],\"mapped\",[100]],[[119916,119916],\"mapped\",[101]],[[119917,119917],\"mapped\",[102]],[[119918,119918],\"mapped\",[103]],[[119919,119919],\"mapped\",[104]],[[119920,119920],\"mapped\",[105]],[[119921,119921],\"mapped\",[106]],[[119922,119922],\"mapped\",[107]],[[119923,119923],\"mapped\",[108]],[[119924,119924],\"mapped\",[109]],[[119925,119925],\"mapped\",[110]],[[119926,119926],\"mapped\",[111]],[[119927,119927],\"mapped\",[112]],[[119928,119928],\"mapped\",[113]],[[119929,119929],\"mapped\",[114]],[[119930,119930],\"mapped\",[115]],[[119931,119931],\"mapped\",[116]],[[119932,119932],\"mapped\",[117]],[[119933,119933],\"mapped\",[118]],[[119934,119934],\"mapped\",[119]],[[119935,119935],\"mapped\",[120]],[[119936,119936],\"mapped\",[121]],[[119937,119937],\"mapped\",[122]],[[119938,119938],\"mapped\",[97]],[[119939,119939],\"mapped\",[98]],[[119940,119940],\"mapped\",[99]],[[119941,119941],\"mapped\",[100]],[[119942,119942],\"mapped\",[101]],[[119943,119943],\"mapped\",[102]],[[119944,119944],\"mapped\",[103]],[[119945,119945],\"mapped\",[104]],[[119946,119946],\"mapped\",[105]],[[119947,119947],\"mapped\",[106]],[[119948,119948],\"mapped\",[107]],[[119949,119949],\"mapped\",[108]],[[119950,119950],\"mapped\",[109]],[[119951,119951],\"mapped\",[110]],[[119952,119952],\"mapped\",[111]],[[119953,119953],\"mapped\",[112]],[[119954,119954],\"mapped\",[113]],[[119955,119955],\"mapped\",[114]],[[119956,119956],\"mapped\",[115]],[[119957,119957],\"mapped\",[116]],[[119958,119958],\"mapped\",[117]],[[119959,119959],\"mapped\",[118]],[[119960,119960],\"mapped\",[119]],[[119961,119961],\"mapped\",[120]],[[119962,119962],\"mapped\",[121]],[[119963,119963],\"mapped\",[122]],[[119964,119964],\"mapped\",[97]],[[119965,119965],\"disallowed\"],[[119966,119966],\"mapped\",[99]],[[119967,119967],\"mapped\",[100]],[[119968,119969],\"disallowed\"],[[119970,119970],\"mapped\",[103]],[[119971,119972],\"disallowed\"],[[119973,119973],\"mapped\",[106]],[[119974,119974],\"mapped\",[107]],[[119975,119976],\"disallowed\"],[[119977,119977],\"mapped\",[110]],[[119978,119978],\"mapped\",[111]],[[119979,119979],\"mapped\",[112]],[[119980,119980],\"mapped\",[113]],[[119981,119981],\"disallowed\"],[[119982,119982],\"mapped\",[115]],[[119983,119983],\"mapped\",[116]],[[119984,119984],\"mapped\",[117]],[[119985,119985],\"mapped\",[118]],[[119986,119986],\"mapped\",[119]],[[119987,119987],\"mapped\",[120]],[[119988,119988],\"mapped\",[121]],[[119989,119989],\"mapped\",[122]],[[119990,119990],\"mapped\",[97]],[[119991,119991],\"mapped\",[98]],[[119992,119992],\"mapped\",[99]],[[119993,119993],\"mapped\",[100]],[[119994,119994],\"disallowed\"],[[119995,119995],\"mapped\",[102]],[[119996,119996],\"disallowed\"],[[119997,119997],\"mapped\",[104]],[[119998,119998],\"mapped\",[105]],[[119999,119999],\"mapped\",[106]],[[120000,120000],\"mapped\",[107]],[[120001,120001],\"mapped\",[108]],[[120002,120002],\"mapped\",[109]],[[120003,120003],\"mapped\",[110]],[[120004,120004],\"disallowed\"],[[120005,120005],\"mapped\",[112]],[[120006,120006],\"mapped\",[113]],[[120007,120007],\"mapped\",[114]],[[120008,120008],\"mapped\",[115]],[[120009,120009],\"mapped\",[116]],[[120010,120010],\"mapped\",[117]],[[120011,120011],\"mapped\",[118]],[[120012,120012],\"mapped\",[119]],[[120013,120013],\"mapped\",[120]],[[120014,120014],\"mapped\",[121]],[[120015,120015],\"mapped\",[122]],[[120016,120016],\"mapped\",[97]],[[120017,120017],\"mapped\",[98]],[[120018,120018],\"mapped\",[99]],[[120019,120019],\"mapped\",[100]],[[120020,120020],\"mapped\",[101]],[[120021,120021],\"mapped\",[102]],[[120022,120022],\"mapped\",[103]],[[120023,120023],\"mapped\",[104]],[[120024,120024],\"mapped\",[105]],[[120025,120025],\"mapped\",[106]],[[120026,120026],\"mapped\",[107]],[[120027,120027],\"mapped\",[108]],[[120028,120028],\"mapped\",[109]],[[120029,120029],\"mapped\",[110]],[[120030,120030],\"mapped\",[111]],[[120031,120031],\"mapped\",[112]],[[120032,120032],\"mapped\",[113]],[[120033,120033],\"mapped\",[114]],[[120034,120034],\"mapped\",[115]],[[120035,120035],\"mapped\",[116]],[[120036,120036],\"mapped\",[117]],[[120037,120037],\"mapped\",[118]],[[120038,120038],\"mapped\",[119]],[[120039,120039],\"mapped\",[120]],[[120040,120040],\"mapped\",[121]],[[120041,120041],\"mapped\",[122]],[[120042,120042],\"mapped\",[97]],[[120043,120043],\"mapped\",[98]],[[120044,120044],\"mapped\",[99]],[[120045,120045],\"mapped\",[100]],[[120046,120046],\"mapped\",[101]],[[120047,120047],\"mapped\",[102]],[[120048,120048],\"mapped\",[103]],[[120049,120049],\"mapped\",[104]],[[120050,120050],\"mapped\",[105]],[[120051,120051],\"mapped\",[106]],[[120052,120052],\"mapped\",[107]],[[120053,120053],\"mapped\",[108]],[[120054,120054],\"mapped\",[109]],[[120055,120055],\"mapped\",[110]],[[120056,120056],\"mapped\",[111]],[[120057,120057],\"mapped\",[112]],[[120058,120058],\"mapped\",[113]],[[120059,120059],\"mapped\",[114]],[[120060,120060],\"mapped\",[115]],[[120061,120061],\"mapped\",[116]],[[120062,120062],\"mapped\",[117]],[[120063,120063],\"mapped\",[118]],[[120064,120064],\"mapped\",[119]],[[120065,120065],\"mapped\",[120]],[[120066,120066],\"mapped\",[121]],[[120067,120067],\"mapped\",[122]],[[120068,120068],\"mapped\",[97]],[[120069,120069],\"mapped\",[98]],[[120070,120070],\"disallowed\"],[[120071,120071],\"mapped\",[100]],[[120072,120072],\"mapped\",[101]],[[120073,120073],\"mapped\",[102]],[[120074,120074],\"mapped\",[103]],[[120075,120076],\"disallowed\"],[[120077,120077],\"mapped\",[106]],[[120078,120078],\"mapped\",[107]],[[120079,120079],\"mapped\",[108]],[[120080,120080],\"mapped\",[109]],[[120081,120081],\"mapped\",[110]],[[120082,120082],\"mapped\",[111]],[[120083,120083],\"mapped\",[112]],[[120084,120084],\"mapped\",[113]],[[120085,120085],\"disallowed\"],[[120086,120086],\"mapped\",[115]],[[120087,120087],\"mapped\",[116]],[[120088,120088],\"mapped\",[117]],[[120089,120089],\"mapped\",[118]],[[120090,120090],\"mapped\",[119]],[[120091,120091],\"mapped\",[120]],[[120092,120092],\"mapped\",[121]],[[120093,120093],\"disallowed\"],[[120094,120094],\"mapped\",[97]],[[120095,120095],\"mapped\",[98]],[[120096,120096],\"mapped\",[99]],[[120097,120097],\"mapped\",[100]],[[120098,120098],\"mapped\",[101]],[[120099,120099],\"mapped\",[102]],[[120100,120100],\"mapped\",[103]],[[120101,120101],\"mapped\",[104]],[[120102,120102],\"mapped\",[105]],[[120103,120103],\"mapped\",[106]],[[120104,120104],\"mapped\",[107]],[[120105,120105],\"mapped\",[108]],[[120106,120106],\"mapped\",[109]],[[120107,120107],\"mapped\",[110]],[[120108,120108],\"mapped\",[111]],[[120109,120109],\"mapped\",[112]],[[120110,120110],\"mapped\",[113]],[[120111,120111],\"mapped\",[114]],[[120112,120112],\"mapped\",[115]],[[120113,120113],\"mapped\",[116]],[[120114,120114],\"mapped\",[117]],[[120115,120115],\"mapped\",[118]],[[120116,120116],\"mapped\",[119]],[[120117,120117],\"mapped\",[120]],[[120118,120118],\"mapped\",[121]],[[120119,120119],\"mapped\",[122]],[[120120,120120],\"mapped\",[97]],[[120121,120121],\"mapped\",[98]],[[120122,120122],\"disallowed\"],[[120123,120123],\"mapped\",[100]],[[120124,120124],\"mapped\",[101]],[[120125,120125],\"mapped\",[102]],[[120126,120126],\"mapped\",[103]],[[120127,120127],\"disallowed\"],[[120128,120128],\"mapped\",[105]],[[120129,120129],\"mapped\",[106]],[[120130,120130],\"mapped\",[107]],[[120131,120131],\"mapped\",[108]],[[120132,120132],\"mapped\",[109]],[[120133,120133],\"disallowed\"],[[120134,120134],\"mapped\",[111]],[[120135,120137],\"disallowed\"],[[120138,120138],\"mapped\",[115]],[[120139,120139],\"mapped\",[116]],[[120140,120140],\"mapped\",[117]],[[120141,120141],\"mapped\",[118]],[[120142,120142],\"mapped\",[119]],[[120143,120143],\"mapped\",[120]],[[120144,120144],\"mapped\",[121]],[[120145,120145],\"disallowed\"],[[120146,120146],\"mapped\",[97]],[[120147,120147],\"mapped\",[98]],[[120148,120148],\"mapped\",[99]],[[120149,120149],\"mapped\",[100]],[[120150,120150],\"mapped\",[101]],[[120151,120151],\"mapped\",[102]],[[120152,120152],\"mapped\",[103]],[[120153,120153],\"mapped\",[104]],[[120154,120154],\"mapped\",[105]],[[120155,120155],\"mapped\",[106]],[[120156,120156],\"mapped\",[107]],[[120157,120157],\"mapped\",[108]],[[120158,120158],\"mapped\",[109]],[[120159,120159],\"mapped\",[110]],[[120160,120160],\"mapped\",[111]],[[120161,120161],\"mapped\",[112]],[[120162,120162],\"mapped\",[113]],[[120163,120163],\"mapped\",[114]],[[120164,120164],\"mapped\",[115]],[[120165,120165],\"mapped\",[116]],[[120166,120166],\"mapped\",[117]],[[120167,120167],\"mapped\",[118]],[[120168,120168],\"mapped\",[119]],[[120169,120169],\"mapped\",[120]],[[120170,120170],\"mapped\",[121]],[[120171,120171],\"mapped\",[122]],[[120172,120172],\"mapped\",[97]],[[120173,120173],\"mapped\",[98]],[[120174,120174],\"mapped\",[99]],[[120175,120175],\"mapped\",[100]],[[120176,120176],\"mapped\",[101]],[[120177,120177],\"mapped\",[102]],[[120178,120178],\"mapped\",[103]],[[120179,120179],\"mapped\",[104]],[[120180,120180],\"mapped\",[105]],[[120181,120181],\"mapped\",[106]],[[120182,120182],\"mapped\",[107]],[[120183,120183],\"mapped\",[108]],[[120184,120184],\"mapped\",[109]],[[120185,120185],\"mapped\",[110]],[[120186,120186],\"mapped\",[111]],[[120187,120187],\"mapped\",[112]],[[120188,120188],\"mapped\",[113]],[[120189,120189],\"mapped\",[114]],[[120190,120190],\"mapped\",[115]],[[120191,120191],\"mapped\",[116]],[[120192,120192],\"mapped\",[117]],[[120193,120193],\"mapped\",[118]],[[120194,120194],\"mapped\",[119]],[[120195,120195],\"mapped\",[120]],[[120196,120196],\"mapped\",[121]],[[120197,120197],\"mapped\",[122]],[[120198,120198],\"mapped\",[97]],[[120199,120199],\"mapped\",[98]],[[120200,120200],\"mapped\",[99]],[[120201,120201],\"mapped\",[100]],[[120202,120202],\"mapped\",[101]],[[120203,120203],\"mapped\",[102]],[[120204,120204],\"mapped\",[103]],[[120205,120205],\"mapped\",[104]],[[120206,120206],\"mapped\",[105]],[[120207,120207],\"mapped\",[106]],[[120208,120208],\"mapped\",[107]],[[120209,120209],\"mapped\",[108]],[[120210,120210],\"mapped\",[109]],[[120211,120211],\"mapped\",[110]],[[120212,120212],\"mapped\",[111]],[[120213,120213],\"mapped\",[112]],[[120214,120214],\"mapped\",[113]],[[120215,120215],\"mapped\",[114]],[[120216,120216],\"mapped\",[115]],[[120217,120217],\"mapped\",[116]],[[120218,120218],\"mapped\",[117]],[[120219,120219],\"mapped\",[118]],[[120220,120220],\"mapped\",[119]],[[120221,120221],\"mapped\",[120]],[[120222,120222],\"mapped\",[121]],[[120223,120223],\"mapped\",[122]],[[120224,120224],\"mapped\",[97]],[[120225,120225],\"mapped\",[98]],[[120226,120226],\"mapped\",[99]],[[120227,120227],\"mapped\",[100]],[[120228,120228],\"mapped\",[101]],[[120229,120229],\"mapped\",[102]],[[120230,120230],\"mapped\",[103]],[[120231,120231],\"mapped\",[104]],[[120232,120232],\"mapped\",[105]],[[120233,120233],\"mapped\",[106]],[[120234,120234],\"mapped\",[107]],[[120235,120235],\"mapped\",[108]],[[120236,120236],\"mapped\",[109]],[[120237,120237],\"mapped\",[110]],[[120238,120238],\"mapped\",[111]],[[120239,120239],\"mapped\",[112]],[[120240,120240],\"mapped\",[113]],[[120241,120241],\"mapped\",[114]],[[120242,120242],\"mapped\",[115]],[[120243,120243],\"mapped\",[116]],[[120244,120244],\"mapped\",[117]],[[120245,120245],\"mapped\",[118]],[[120246,120246],\"mapped\",[119]],[[120247,120247],\"mapped\",[120]],[[120248,120248],\"mapped\",[121]],[[120249,120249],\"mapped\",[122]],[[120250,120250],\"mapped\",[97]],[[120251,120251],\"mapped\",[98]],[[120252,120252],\"mapped\",[99]],[[120253,120253],\"mapped\",[100]],[[120254,120254],\"mapped\",[101]],[[120255,120255],\"mapped\",[102]],[[120256,120256],\"mapped\",[103]],[[120257,120257],\"mapped\",[104]],[[120258,120258],\"mapped\",[105]],[[120259,120259],\"mapped\",[106]],[[120260,120260],\"mapped\",[107]],[[120261,120261],\"mapped\",[108]],[[120262,120262],\"mapped\",[109]],[[120263,120263],\"mapped\",[110]],[[120264,120264],\"mapped\",[111]],[[120265,120265],\"mapped\",[112]],[[120266,120266],\"mapped\",[113]],[[120267,120267],\"mapped\",[114]],[[120268,120268],\"mapped\",[115]],[[120269,120269],\"mapped\",[116]],[[120270,120270],\"mapped\",[117]],[[120271,120271],\"mapped\",[118]],[[120272,120272],\"mapped\",[119]],[[120273,120273],\"mapped\",[120]],[[120274,120274],\"mapped\",[121]],[[120275,120275],\"mapped\",[122]],[[120276,120276],\"mapped\",[97]],[[120277,120277],\"mapped\",[98]],[[120278,120278],\"mapped\",[99]],[[120279,120279],\"mapped\",[100]],[[120280,120280],\"mapped\",[101]],[[120281,120281],\"mapped\",[102]],[[120282,120282],\"mapped\",[103]],[[120283,120283],\"mapped\",[104]],[[120284,120284],\"mapped\",[105]],[[120285,120285],\"mapped\",[106]],[[120286,120286],\"mapped\",[107]],[[120287,120287],\"mapped\",[108]],[[120288,120288],\"mapped\",[109]],[[120289,120289],\"mapped\",[110]],[[120290,120290],\"mapped\",[111]],[[120291,120291],\"mapped\",[112]],[[120292,120292],\"mapped\",[113]],[[120293,120293],\"mapped\",[114]],[[120294,120294],\"mapped\",[115]],[[120295,120295],\"mapped\",[116]],[[120296,120296],\"mapped\",[117]],[[120297,120297],\"mapped\",[118]],[[120298,120298],\"mapped\",[119]],[[120299,120299],\"mapped\",[120]],[[120300,120300],\"mapped\",[121]],[[120301,120301],\"mapped\",[122]],[[120302,120302],\"mapped\",[97]],[[120303,120303],\"mapped\",[98]],[[120304,120304],\"mapped\",[99]],[[120305,120305],\"mapped\",[100]],[[120306,120306],\"mapped\",[101]],[[120307,120307],\"mapped\",[102]],[[120308,120308],\"mapped\",[103]],[[120309,120309],\"mapped\",[104]],[[120310,120310],\"mapped\",[105]],[[120311,120311],\"mapped\",[106]],[[120312,120312],\"mapped\",[107]],[[120313,120313],\"mapped\",[108]],[[120314,120314],\"mapped\",[109]],[[120315,120315],\"mapped\",[110]],[[120316,120316],\"mapped\",[111]],[[120317,120317],\"mapped\",[112]],[[120318,120318],\"mapped\",[113]],[[120319,120319],\"mapped\",[114]],[[120320,120320],\"mapped\",[115]],[[120321,120321],\"mapped\",[116]],[[120322,120322],\"mapped\",[117]],[[120323,120323],\"mapped\",[118]],[[120324,120324],\"mapped\",[119]],[[120325,120325],\"mapped\",[120]],[[120326,120326],\"mapped\",[121]],[[120327,120327],\"mapped\",[122]],[[120328,120328],\"mapped\",[97]],[[120329,120329],\"mapped\",[98]],[[120330,120330],\"mapped\",[99]],[[120331,120331],\"mapped\",[100]],[[120332,120332],\"mapped\",[101]],[[120333,120333],\"mapped\",[102]],[[120334,120334],\"mapped\",[103]],[[120335,120335],\"mapped\",[104]],[[120336,120336],\"mapped\",[105]],[[120337,120337],\"mapped\",[106]],[[120338,120338],\"mapped\",[107]],[[120339,120339],\"mapped\",[108]],[[120340,120340],\"mapped\",[109]],[[120341,120341],\"mapped\",[110]],[[120342,120342],\"mapped\",[111]],[[120343,120343],\"mapped\",[112]],[[120344,120344],\"mapped\",[113]],[[120345,120345],\"mapped\",[114]],[[120346,120346],\"mapped\",[115]],[[120347,120347],\"mapped\",[116]],[[120348,120348],\"mapped\",[117]],[[120349,120349],\"mapped\",[118]],[[120350,120350],\"mapped\",[119]],[[120351,120351],\"mapped\",[120]],[[120352,120352],\"mapped\",[121]],[[120353,120353],\"mapped\",[122]],[[120354,120354],\"mapped\",[97]],[[120355,120355],\"mapped\",[98]],[[120356,120356],\"mapped\",[99]],[[120357,120357],\"mapped\",[100]],[[120358,120358],\"mapped\",[101]],[[120359,120359],\"mapped\",[102]],[[120360,120360],\"mapped\",[103]],[[120361,120361],\"mapped\",[104]],[[120362,120362],\"mapped\",[105]],[[120363,120363],\"mapped\",[106]],[[120364,120364],\"mapped\",[107]],[[120365,120365],\"mapped\",[108]],[[120366,120366],\"mapped\",[109]],[[120367,120367],\"mapped\",[110]],[[120368,120368],\"mapped\",[111]],[[120369,120369],\"mapped\",[112]],[[120370,120370],\"mapped\",[113]],[[120371,120371],\"mapped\",[114]],[[120372,120372],\"mapped\",[115]],[[120373,120373],\"mapped\",[116]],[[120374,120374],\"mapped\",[117]],[[120375,120375],\"mapped\",[118]],[[120376,120376],\"mapped\",[119]],[[120377,120377],\"mapped\",[120]],[[120378,120378],\"mapped\",[121]],[[120379,120379],\"mapped\",[122]],[[120380,120380],\"mapped\",[97]],[[120381,120381],\"mapped\",[98]],[[120382,120382],\"mapped\",[99]],[[120383,120383],\"mapped\",[100]],[[120384,120384],\"mapped\",[101]],[[120385,120385],\"mapped\",[102]],[[120386,120386],\"mapped\",[103]],[[120387,120387],\"mapped\",[104]],[[120388,120388],\"mapped\",[105]],[[120389,120389],\"mapped\",[106]],[[120390,120390],\"mapped\",[107]],[[120391,120391],\"mapped\",[108]],[[120392,120392],\"mapped\",[109]],[[120393,120393],\"mapped\",[110]],[[120394,120394],\"mapped\",[111]],[[120395,120395],\"mapped\",[112]],[[120396,120396],\"mapped\",[113]],[[120397,120397],\"mapped\",[114]],[[120398,120398],\"mapped\",[115]],[[120399,120399],\"mapped\",[116]],[[120400,120400],\"mapped\",[117]],[[120401,120401],\"mapped\",[118]],[[120402,120402],\"mapped\",[119]],[[120403,120403],\"mapped\",[120]],[[120404,120404],\"mapped\",[121]],[[120405,120405],\"mapped\",[122]],[[120406,120406],\"mapped\",[97]],[[120407,120407],\"mapped\",[98]],[[120408,120408],\"mapped\",[99]],[[120409,120409],\"mapped\",[100]],[[120410,120410],\"mapped\",[101]],[[120411,120411],\"mapped\",[102]],[[120412,120412],\"mapped\",[103]],[[120413,120413],\"mapped\",[104]],[[120414,120414],\"mapped\",[105]],[[120415,120415],\"mapped\",[106]],[[120416,120416],\"mapped\",[107]],[[120417,120417],\"mapped\",[108]],[[120418,120418],\"mapped\",[109]],[[120419,120419],\"mapped\",[110]],[[120420,120420],\"mapped\",[111]],[[120421,120421],\"mapped\",[112]],[[120422,120422],\"mapped\",[113]],[[120423,120423],\"mapped\",[114]],[[120424,120424],\"mapped\",[115]],[[120425,120425],\"mapped\",[116]],[[120426,120426],\"mapped\",[117]],[[120427,120427],\"mapped\",[118]],[[120428,120428],\"mapped\",[119]],[[120429,120429],\"mapped\",[120]],[[120430,120430],\"mapped\",[121]],[[120431,120431],\"mapped\",[122]],[[120432,120432],\"mapped\",[97]],[[120433,120433],\"mapped\",[98]],[[120434,120434],\"mapped\",[99]],[[120435,120435],\"mapped\",[100]],[[120436,120436],\"mapped\",[101]],[[120437,120437],\"mapped\",[102]],[[120438,120438],\"mapped\",[103]],[[120439,120439],\"mapped\",[104]],[[120440,120440],\"mapped\",[105]],[[120441,120441],\"mapped\",[106]],[[120442,120442],\"mapped\",[107]],[[120443,120443],\"mapped\",[108]],[[120444,120444],\"mapped\",[109]],[[120445,120445],\"mapped\",[110]],[[120446,120446],\"mapped\",[111]],[[120447,120447],\"mapped\",[112]],[[120448,120448],\"mapped\",[113]],[[120449,120449],\"mapped\",[114]],[[120450,120450],\"mapped\",[115]],[[120451,120451],\"mapped\",[116]],[[120452,120452],\"mapped\",[117]],[[120453,120453],\"mapped\",[118]],[[120454,120454],\"mapped\",[119]],[[120455,120455],\"mapped\",[120]],[[120456,120456],\"mapped\",[121]],[[120457,120457],\"mapped\",[122]],[[120458,120458],\"mapped\",[97]],[[120459,120459],\"mapped\",[98]],[[120460,120460],\"mapped\",[99]],[[120461,120461],\"mapped\",[100]],[[120462,120462],\"mapped\",[101]],[[120463,120463],\"mapped\",[102]],[[120464,120464],\"mapped\",[103]],[[120465,120465],\"mapped\",[104]],[[120466,120466],\"mapped\",[105]],[[120467,120467],\"mapped\",[106]],[[120468,120468],\"mapped\",[107]],[[120469,120469],\"mapped\",[108]],[[120470,120470],\"mapped\",[109]],[[120471,120471],\"mapped\",[110]],[[120472,120472],\"mapped\",[111]],[[120473,120473],\"mapped\",[112]],[[120474,120474],\"mapped\",[113]],[[120475,120475],\"mapped\",[114]],[[120476,120476],\"mapped\",[115]],[[120477,120477],\"mapped\",[116]],[[120478,120478],\"mapped\",[117]],[[120479,120479],\"mapped\",[118]],[[120480,120480],\"mapped\",[119]],[[120481,120481],\"mapped\",[120]],[[120482,120482],\"mapped\",[121]],[[120483,120483],\"mapped\",[122]],[[120484,120484],\"mapped\",[305]],[[120485,120485],\"mapped\",[567]],[[120486,120487],\"disallowed\"],[[120488,120488],\"mapped\",[945]],[[120489,120489],\"mapped\",[946]],[[120490,120490],\"mapped\",[947]],[[120491,120491],\"mapped\",[948]],[[120492,120492],\"mapped\",[949]],[[120493,120493],\"mapped\",[950]],[[120494,120494],\"mapped\",[951]],[[120495,120495],\"mapped\",[952]],[[120496,120496],\"mapped\",[953]],[[120497,120497],\"mapped\",[954]],[[120498,120498],\"mapped\",[955]],[[120499,120499],\"mapped\",[956]],[[120500,120500],\"mapped\",[957]],[[120501,120501],\"mapped\",[958]],[[120502,120502],\"mapped\",[959]],[[120503,120503],\"mapped\",[960]],[[120504,120504],\"mapped\",[961]],[[120505,120505],\"mapped\",[952]],[[120506,120506],\"mapped\",[963]],[[120507,120507],\"mapped\",[964]],[[120508,120508],\"mapped\",[965]],[[120509,120509],\"mapped\",[966]],[[120510,120510],\"mapped\",[967]],[[120511,120511],\"mapped\",[968]],[[120512,120512],\"mapped\",[969]],[[120513,120513],\"mapped\",[8711]],[[120514,120514],\"mapped\",[945]],[[120515,120515],\"mapped\",[946]],[[120516,120516],\"mapped\",[947]],[[120517,120517],\"mapped\",[948]],[[120518,120518],\"mapped\",[949]],[[120519,120519],\"mapped\",[950]],[[120520,120520],\"mapped\",[951]],[[120521,120521],\"mapped\",[952]],[[120522,120522],\"mapped\",[953]],[[120523,120523],\"mapped\",[954]],[[120524,120524],\"mapped\",[955]],[[120525,120525],\"mapped\",[956]],[[120526,120526],\"mapped\",[957]],[[120527,120527],\"mapped\",[958]],[[120528,120528],\"mapped\",[959]],[[120529,120529],\"mapped\",[960]],[[120530,120530],\"mapped\",[961]],[[120531,120532],\"mapped\",[963]],[[120533,120533],\"mapped\",[964]],[[120534,120534],\"mapped\",[965]],[[120535,120535],\"mapped\",[966]],[[120536,120536],\"mapped\",[967]],[[120537,120537],\"mapped\",[968]],[[120538,120538],\"mapped\",[969]],[[120539,120539],\"mapped\",[8706]],[[120540,120540],\"mapped\",[949]],[[120541,120541],\"mapped\",[952]],[[120542,120542],\"mapped\",[954]],[[120543,120543],\"mapped\",[966]],[[120544,120544],\"mapped\",[961]],[[120545,120545],\"mapped\",[960]],[[120546,120546],\"mapped\",[945]],[[120547,120547],\"mapped\",[946]],[[120548,120548],\"mapped\",[947]],[[120549,120549],\"mapped\",[948]],[[120550,120550],\"mapped\",[949]],[[120551,120551],\"mapped\",[950]],[[120552,120552],\"mapped\",[951]],[[120553,120553],\"mapped\",[952]],[[120554,120554],\"mapped\",[953]],[[120555,120555],\"mapped\",[954]],[[120556,120556],\"mapped\",[955]],[[120557,120557],\"mapped\",[956]],[[120558,120558],\"mapped\",[957]],[[120559,120559],\"mapped\",[958]],[[120560,120560],\"mapped\",[959]],[[120561,120561],\"mapped\",[960]],[[120562,120562],\"mapped\",[961]],[[120563,120563],\"mapped\",[952]],[[120564,120564],\"mapped\",[963]],[[120565,120565],\"mapped\",[964]],[[120566,120566],\"mapped\",[965]],[[120567,120567],\"mapped\",[966]],[[120568,120568],\"mapped\",[967]],[[120569,120569],\"mapped\",[968]],[[120570,120570],\"mapped\",[969]],[[120571,120571],\"mapped\",[8711]],[[120572,120572],\"mapped\",[945]],[[120573,120573],\"mapped\",[946]],[[120574,120574],\"mapped\",[947]],[[120575,120575],\"mapped\",[948]],[[120576,120576],\"mapped\",[949]],[[120577,120577],\"mapped\",[950]],[[120578,120578],\"mapped\",[951]],[[120579,120579],\"mapped\",[952]],[[120580,120580],\"mapped\",[953]],[[120581,120581],\"mapped\",[954]],[[120582,120582],\"mapped\",[955]],[[120583,120583],\"mapped\",[956]],[[120584,120584],\"mapped\",[957]],[[120585,120585],\"mapped\",[958]],[[120586,120586],\"mapped\",[959]],[[120587,120587],\"mapped\",[960]],[[120588,120588],\"mapped\",[961]],[[120589,120590],\"mapped\",[963]],[[120591,120591],\"mapped\",[964]],[[120592,120592],\"mapped\",[965]],[[120593,120593],\"mapped\",[966]],[[120594,120594],\"mapped\",[967]],[[120595,120595],\"mapped\",[968]],[[120596,120596],\"mapped\",[969]],[[120597,120597],\"mapped\",[8706]],[[120598,120598],\"mapped\",[949]],[[120599,120599],\"mapped\",[952]],[[120600,120600],\"mapped\",[954]],[[120601,120601],\"mapped\",[966]],[[120602,120602],\"mapped\",[961]],[[120603,120603],\"mapped\",[960]],[[120604,120604],\"mapped\",[945]],[[120605,120605],\"mapped\",[946]],[[120606,120606],\"mapped\",[947]],[[120607,120607],\"mapped\",[948]],[[120608,120608],\"mapped\",[949]],[[120609,120609],\"mapped\",[950]],[[120610,120610],\"mapped\",[951]],[[120611,120611],\"mapped\",[952]],[[120612,120612],\"mapped\",[953]],[[120613,120613],\"mapped\",[954]],[[120614,120614],\"mapped\",[955]],[[120615,120615],\"mapped\",[956]],[[120616,120616],\"mapped\",[957]],[[120617,120617],\"mapped\",[958]],[[120618,120618],\"mapped\",[959]],[[120619,120619],\"mapped\",[960]],[[120620,120620],\"mapped\",[961]],[[120621,120621],\"mapped\",[952]],[[120622,120622],\"mapped\",[963]],[[120623,120623],\"mapped\",[964]],[[120624,120624],\"mapped\",[965]],[[120625,120625],\"mapped\",[966]],[[120626,120626],\"mapped\",[967]],[[120627,120627],\"mapped\",[968]],[[120628,120628],\"mapped\",[969]],[[120629,120629],\"mapped\",[8711]],[[120630,120630],\"mapped\",[945]],[[120631,120631],\"mapped\",[946]],[[120632,120632],\"mapped\",[947]],[[120633,120633],\"mapped\",[948]],[[120634,120634],\"mapped\",[949]],[[120635,120635],\"mapped\",[950]],[[120636,120636],\"mapped\",[951]],[[120637,120637],\"mapped\",[952]],[[120638,120638],\"mapped\",[953]],[[120639,120639],\"mapped\",[954]],[[120640,120640],\"mapped\",[955]],[[120641,120641],\"mapped\",[956]],[[120642,120642],\"mapped\",[957]],[[120643,120643],\"mapped\",[958]],[[120644,120644],\"mapped\",[959]],[[120645,120645],\"mapped\",[960]],[[120646,120646],\"mapped\",[961]],[[120647,120648],\"mapped\",[963]],[[120649,120649],\"mapped\",[964]],[[120650,120650],\"mapped\",[965]],[[120651,120651],\"mapped\",[966]],[[120652,120652],\"mapped\",[967]],[[120653,120653],\"mapped\",[968]],[[120654,120654],\"mapped\",[969]],[[120655,120655],\"mapped\",[8706]],[[120656,120656],\"mapped\",[949]],[[120657,120657],\"mapped\",[952]],[[120658,120658],\"mapped\",[954]],[[120659,120659],\"mapped\",[966]],[[120660,120660],\"mapped\",[961]],[[120661,120661],\"mapped\",[960]],[[120662,120662],\"mapped\",[945]],[[120663,120663],\"mapped\",[946]],[[120664,120664],\"mapped\",[947]],[[120665,120665],\"mapped\",[948]],[[120666,120666],\"mapped\",[949]],[[120667,120667],\"mapped\",[950]],[[120668,120668],\"mapped\",[951]],[[120669,120669],\"mapped\",[952]],[[120670,120670],\"mapped\",[953]],[[120671,120671],\"mapped\",[954]],[[120672,120672],\"mapped\",[955]],[[120673,120673],\"mapped\",[956]],[[120674,120674],\"mapped\",[957]],[[120675,120675],\"mapped\",[958]],[[120676,120676],\"mapped\",[959]],[[120677,120677],\"mapped\",[960]],[[120678,120678],\"mapped\",[961]],[[120679,120679],\"mapped\",[952]],[[120680,120680],\"mapped\",[963]],[[120681,120681],\"mapped\",[964]],[[120682,120682],\"mapped\",[965]],[[120683,120683],\"mapped\",[966]],[[120684,120684],\"mapped\",[967]],[[120685,120685],\"mapped\",[968]],[[120686,120686],\"mapped\",[969]],[[120687,120687],\"mapped\",[8711]],[[120688,120688],\"mapped\",[945]],[[120689,120689],\"mapped\",[946]],[[120690,120690],\"mapped\",[947]],[[120691,120691],\"mapped\",[948]],[[120692,120692],\"mapped\",[949]],[[120693,120693],\"mapped\",[950]],[[120694,120694],\"mapped\",[951]],[[120695,120695],\"mapped\",[952]],[[120696,120696],\"mapped\",[953]],[[120697,120697],\"mapped\",[954]],[[120698,120698],\"mapped\",[955]],[[120699,120699],\"mapped\",[956]],[[120700,120700],\"mapped\",[957]],[[120701,120701],\"mapped\",[958]],[[120702,120702],\"mapped\",[959]],[[120703,120703],\"mapped\",[960]],[[120704,120704],\"mapped\",[961]],[[120705,120706],\"mapped\",[963]],[[120707,120707],\"mapped\",[964]],[[120708,120708],\"mapped\",[965]],[[120709,120709],\"mapped\",[966]],[[120710,120710],\"mapped\",[967]],[[120711,120711],\"mapped\",[968]],[[120712,120712],\"mapped\",[969]],[[120713,120713],\"mapped\",[8706]],[[120714,120714],\"mapped\",[949]],[[120715,120715],\"mapped\",[952]],[[120716,120716],\"mapped\",[954]],[[120717,120717],\"mapped\",[966]],[[120718,120718],\"mapped\",[961]],[[120719,120719],\"mapped\",[960]],[[120720,120720],\"mapped\",[945]],[[120721,120721],\"mapped\",[946]],[[120722,120722],\"mapped\",[947]],[[120723,120723],\"mapped\",[948]],[[120724,120724],\"mapped\",[949]],[[120725,120725],\"mapped\",[950]],[[120726,120726],\"mapped\",[951]],[[120727,120727],\"mapped\",[952]],[[120728,120728],\"mapped\",[953]],[[120729,120729],\"mapped\",[954]],[[120730,120730],\"mapped\",[955]],[[120731,120731],\"mapped\",[956]],[[120732,120732],\"mapped\",[957]],[[120733,120733],\"mapped\",[958]],[[120734,120734],\"mapped\",[959]],[[120735,120735],\"mapped\",[960]],[[120736,120736],\"mapped\",[961]],[[120737,120737],\"mapped\",[952]],[[120738,120738],\"mapped\",[963]],[[120739,120739],\"mapped\",[964]],[[120740,120740],\"mapped\",[965]],[[120741,120741],\"mapped\",[966]],[[120742,120742],\"mapped\",[967]],[[120743,120743],\"mapped\",[968]],[[120744,120744],\"mapped\",[969]],[[120745,120745],\"mapped\",[8711]],[[120746,120746],\"mapped\",[945]],[[120747,120747],\"mapped\",[946]],[[120748,120748],\"mapped\",[947]],[[120749,120749],\"mapped\",[948]],[[120750,120750],\"mapped\",[949]],[[120751,120751],\"mapped\",[950]],[[120752,120752],\"mapped\",[951]],[[120753,120753],\"mapped\",[952]],[[120754,120754],\"mapped\",[953]],[[120755,120755],\"mapped\",[954]],[[120756,120756],\"mapped\",[955]],[[120757,120757],\"mapped\",[956]],[[120758,120758],\"mapped\",[957]],[[120759,120759],\"mapped\",[958]],[[120760,120760],\"mapped\",[959]],[[120761,120761],\"mapped\",[960]],[[120762,120762],\"mapped\",[961]],[[120763,120764],\"mapped\",[963]],[[120765,120765],\"mapped\",[964]],[[120766,120766],\"mapped\",[965]],[[120767,120767],\"mapped\",[966]],[[120768,120768],\"mapped\",[967]],[[120769,120769],\"mapped\",[968]],[[120770,120770],\"mapped\",[969]],[[120771,120771],\"mapped\",[8706]],[[120772,120772],\"mapped\",[949]],[[120773,120773],\"mapped\",[952]],[[120774,120774],\"mapped\",[954]],[[120775,120775],\"mapped\",[966]],[[120776,120776],\"mapped\",[961]],[[120777,120777],\"mapped\",[960]],[[120778,120779],\"mapped\",[989]],[[120780,120781],\"disallowed\"],[[120782,120782],\"mapped\",[48]],[[120783,120783],\"mapped\",[49]],[[120784,120784],\"mapped\",[50]],[[120785,120785],\"mapped\",[51]],[[120786,120786],\"mapped\",[52]],[[120787,120787],\"mapped\",[53]],[[120788,120788],\"mapped\",[54]],[[120789,120789],\"mapped\",[55]],[[120790,120790],\"mapped\",[56]],[[120791,120791],\"mapped\",[57]],[[120792,120792],\"mapped\",[48]],[[120793,120793],\"mapped\",[49]],[[120794,120794],\"mapped\",[50]],[[120795,120795],\"mapped\",[51]],[[120796,120796],\"mapped\",[52]],[[120797,120797],\"mapped\",[53]],[[120798,120798],\"mapped\",[54]],[[120799,120799],\"mapped\",[55]],[[120800,120800],\"mapped\",[56]],[[120801,120801],\"mapped\",[57]],[[120802,120802],\"mapped\",[48]],[[120803,120803],\"mapped\",[49]],[[120804,120804],\"mapped\",[50]],[[120805,120805],\"mapped\",[51]],[[120806,120806],\"mapped\",[52]],[[120807,120807],\"mapped\",[53]],[[120808,120808],\"mapped\",[54]],[[120809,120809],\"mapped\",[55]],[[120810,120810],\"mapped\",[56]],[[120811,120811],\"mapped\",[57]],[[120812,120812],\"mapped\",[48]],[[120813,120813],\"mapped\",[49]],[[120814,120814],\"mapped\",[50]],[[120815,120815],\"mapped\",[51]],[[120816,120816],\"mapped\",[52]],[[120817,120817],\"mapped\",[53]],[[120818,120818],\"mapped\",[54]],[[120819,120819],\"mapped\",[55]],[[120820,120820],\"mapped\",[56]],[[120821,120821],\"mapped\",[57]],[[120822,120822],\"mapped\",[48]],[[120823,120823],\"mapped\",[49]],[[120824,120824],\"mapped\",[50]],[[120825,120825],\"mapped\",[51]],[[120826,120826],\"mapped\",[52]],[[120827,120827],\"mapped\",[53]],[[120828,120828],\"mapped\",[54]],[[120829,120829],\"mapped\",[55]],[[120830,120830],\"mapped\",[56]],[[120831,120831],\"mapped\",[57]],[[120832,121343],\"valid\",[],\"NV8\"],[[121344,121398],\"valid\"],[[121399,121402],\"valid\",[],\"NV8\"],[[121403,121452],\"valid\"],[[121453,121460],\"valid\",[],\"NV8\"],[[121461,121461],\"valid\"],[[121462,121475],\"valid\",[],\"NV8\"],[[121476,121476],\"valid\"],[[121477,121483],\"valid\",[],\"NV8\"],[[121484,121498],\"disallowed\"],[[121499,121503],\"valid\"],[[121504,121504],\"disallowed\"],[[121505,121519],\"valid\"],[[121520,124927],\"disallowed\"],[[124928,125124],\"valid\"],[[125125,125126],\"disallowed\"],[[125127,125135],\"valid\",[],\"NV8\"],[[125136,125142],\"valid\"],[[125143,126463],\"disallowed\"],[[126464,126464],\"mapped\",[1575]],[[126465,126465],\"mapped\",[1576]],[[126466,126466],\"mapped\",[1580]],[[126467,126467],\"mapped\",[1583]],[[126468,126468],\"disallowed\"],[[126469,126469],\"mapped\",[1608]],[[126470,126470],\"mapped\",[1586]],[[126471,126471],\"mapped\",[1581]],[[126472,126472],\"mapped\",[1591]],[[126473,126473],\"mapped\",[1610]],[[126474,126474],\"mapped\",[1603]],[[126475,126475],\"mapped\",[1604]],[[126476,126476],\"mapped\",[1605]],[[126477,126477],\"mapped\",[1606]],[[126478,126478],\"mapped\",[1587]],[[126479,126479],\"mapped\",[1593]],[[126480,126480],\"mapped\",[1601]],[[126481,126481],\"mapped\",[1589]],[[126482,126482],\"mapped\",[1602]],[[126483,126483],\"mapped\",[1585]],[[126484,126484],\"mapped\",[1588]],[[126485,126485],\"mapped\",[1578]],[[126486,126486],\"mapped\",[1579]],[[126487,126487],\"mapped\",[1582]],[[126488,126488],\"mapped\",[1584]],[[126489,126489],\"mapped\",[1590]],[[126490,126490],\"mapped\",[1592]],[[126491,126491],\"mapped\",[1594]],[[126492,126492],\"mapped\",[1646]],[[126493,126493],\"mapped\",[1722]],[[126494,126494],\"mapped\",[1697]],[[126495,126495],\"mapped\",[1647]],[[126496,126496],\"disallowed\"],[[126497,126497],\"mapped\",[1576]],[[126498,126498],\"mapped\",[1580]],[[126499,126499],\"disallowed\"],[[126500,126500],\"mapped\",[1607]],[[126501,126502],\"disallowed\"],[[126503,126503],\"mapped\",[1581]],[[126504,126504],\"disallowed\"],[[126505,126505],\"mapped\",[1610]],[[126506,126506],\"mapped\",[1603]],[[126507,126507],\"mapped\",[1604]],[[126508,126508],\"mapped\",[1605]],[[126509,126509],\"mapped\",[1606]],[[126510,126510],\"mapped\",[1587]],[[126511,126511],\"mapped\",[1593]],[[126512,126512],\"mapped\",[1601]],[[126513,126513],\"mapped\",[1589]],[[126514,126514],\"mapped\",[1602]],[[126515,126515],\"disallowed\"],[[126516,126516],\"mapped\",[1588]],[[126517,126517],\"mapped\",[1578]],[[126518,126518],\"mapped\",[1579]],[[126519,126519],\"mapped\",[1582]],[[126520,126520],\"disallowed\"],[[126521,126521],\"mapped\",[1590]],[[126522,126522],\"disallowed\"],[[126523,126523],\"mapped\",[1594]],[[126524,126529],\"disallowed\"],[[126530,126530],\"mapped\",[1580]],[[126531,126534],\"disallowed\"],[[126535,126535],\"mapped\",[1581]],[[126536,126536],\"disallowed\"],[[126537,126537],\"mapped\",[1610]],[[126538,126538],\"disallowed\"],[[126539,126539],\"mapped\",[1604]],[[126540,126540],\"disallowed\"],[[126541,126541],\"mapped\",[1606]],[[126542,126542],\"mapped\",[1587]],[[126543,126543],\"mapped\",[1593]],[[126544,126544],\"disallowed\"],[[126545,126545],\"mapped\",[1589]],[[126546,126546],\"mapped\",[1602]],[[126547,126547],\"disallowed\"],[[126548,126548],\"mapped\",[1588]],[[126549,126550],\"disallowed\"],[[126551,126551],\"mapped\",[1582]],[[126552,126552],\"disallowed\"],[[126553,126553],\"mapped\",[1590]],[[126554,126554],\"disallowed\"],[[126555,126555],\"mapped\",[1594]],[[126556,126556],\"disallowed\"],[[126557,126557],\"mapped\",[1722]],[[126558,126558],\"disallowed\"],[[126559,126559],\"mapped\",[1647]],[[126560,126560],\"disallowed\"],[[126561,126561],\"mapped\",[1576]],[[126562,126562],\"mapped\",[1580]],[[126563,126563],\"disallowed\"],[[126564,126564],\"mapped\",[1607]],[[126565,126566],\"disallowed\"],[[126567,126567],\"mapped\",[1581]],[[126568,126568],\"mapped\",[1591]],[[126569,126569],\"mapped\",[1610]],[[126570,126570],\"mapped\",[1603]],[[126571,126571],\"disallowed\"],[[126572,126572],\"mapped\",[1605]],[[126573,126573],\"mapped\",[1606]],[[126574,126574],\"mapped\",[1587]],[[126575,126575],\"mapped\",[1593]],[[126576,126576],\"mapped\",[1601]],[[126577,126577],\"mapped\",[1589]],[[126578,126578],\"mapped\",[1602]],[[126579,126579],\"disallowed\"],[[126580,126580],\"mapped\",[1588]],[[126581,126581],\"mapped\",[1578]],[[126582,126582],\"mapped\",[1579]],[[126583,126583],\"mapped\",[1582]],[[126584,126584],\"disallowed\"],[[126585,126585],\"mapped\",[1590]],[[126586,126586],\"mapped\",[1592]],[[126587,126587],\"mapped\",[1594]],[[126588,126588],\"mapped\",[1646]],[[126589,126589],\"disallowed\"],[[126590,126590],\"mapped\",[1697]],[[126591,126591],\"disallowed\"],[[126592,126592],\"mapped\",[1575]],[[126593,126593],\"mapped\",[1576]],[[126594,126594],\"mapped\",[1580]],[[126595,126595],\"mapped\",[1583]],[[126596,126596],\"mapped\",[1607]],[[126597,126597],\"mapped\",[1608]],[[126598,126598],\"mapped\",[1586]],[[126599,126599],\"mapped\",[1581]],[[126600,126600],\"mapped\",[1591]],[[126601,126601],\"mapped\",[1610]],[[126602,126602],\"disallowed\"],[[126603,126603],\"mapped\",[1604]],[[126604,126604],\"mapped\",[1605]],[[126605,126605],\"mapped\",[1606]],[[126606,126606],\"mapped\",[1587]],[[126607,126607],\"mapped\",[1593]],[[126608,126608],\"mapped\",[1601]],[[126609,126609],\"mapped\",[1589]],[[126610,126610],\"mapped\",[1602]],[[126611,126611],\"mapped\",[1585]],[[126612,126612],\"mapped\",[1588]],[[126613,126613],\"mapped\",[1578]],[[126614,126614],\"mapped\",[1579]],[[126615,126615],\"mapped\",[1582]],[[126616,126616],\"mapped\",[1584]],[[126617,126617],\"mapped\",[1590]],[[126618,126618],\"mapped\",[1592]],[[126619,126619],\"mapped\",[1594]],[[126620,126624],\"disallowed\"],[[126625,126625],\"mapped\",[1576]],[[126626,126626],\"mapped\",[1580]],[[126627,126627],\"mapped\",[1583]],[[126628,126628],\"disallowed\"],[[126629,126629],\"mapped\",[1608]],[[126630,126630],\"mapped\",[1586]],[[126631,126631],\"mapped\",[1581]],[[126632,126632],\"mapped\",[1591]],[[126633,126633],\"mapped\",[1610]],[[126634,126634],\"disallowed\"],[[126635,126635],\"mapped\",[1604]],[[126636,126636],\"mapped\",[1605]],[[126637,126637],\"mapped\",[1606]],[[126638,126638],\"mapped\",[1587]],[[126639,126639],\"mapped\",[1593]],[[126640,126640],\"mapped\",[1601]],[[126641,126641],\"mapped\",[1589]],[[126642,126642],\"mapped\",[1602]],[[126643,126643],\"mapped\",[1585]],[[126644,126644],\"mapped\",[1588]],[[126645,126645],\"mapped\",[1578]],[[126646,126646],\"mapped\",[1579]],[[126647,126647],\"mapped\",[1582]],[[126648,126648],\"mapped\",[1584]],[[126649,126649],\"mapped\",[1590]],[[126650,126650],\"mapped\",[1592]],[[126651,126651],\"mapped\",[1594]],[[126652,126703],\"disallowed\"],[[126704,126705],\"valid\",[],\"NV8\"],[[126706,126975],\"disallowed\"],[[126976,127019],\"valid\",[],\"NV8\"],[[127020,127023],\"disallowed\"],[[127024,127123],\"valid\",[],\"NV8\"],[[127124,127135],\"disallowed\"],[[127136,127150],\"valid\",[],\"NV8\"],[[127151,127152],\"disallowed\"],[[127153,127166],\"valid\",[],\"NV8\"],[[127167,127167],\"valid\",[],\"NV8\"],[[127168,127168],\"disallowed\"],[[127169,127183],\"valid\",[],\"NV8\"],[[127184,127184],\"disallowed\"],[[127185,127199],\"valid\",[],\"NV8\"],[[127200,127221],\"valid\",[],\"NV8\"],[[127222,127231],\"disallowed\"],[[127232,127232],\"disallowed\"],[[127233,127233],\"disallowed_STD3_mapped\",[48,44]],[[127234,127234],\"disallowed_STD3_mapped\",[49,44]],[[127235,127235],\"disallowed_STD3_mapped\",[50,44]],[[127236,127236],\"disallowed_STD3_mapped\",[51,44]],[[127237,127237],\"disallowed_STD3_mapped\",[52,44]],[[127238,127238],\"disallowed_STD3_mapped\",[53,44]],[[127239,127239],\"disallowed_STD3_mapped\",[54,44]],[[127240,127240],\"disallowed_STD3_mapped\",[55,44]],[[127241,127241],\"disallowed_STD3_mapped\",[56,44]],[[127242,127242],\"disallowed_STD3_mapped\",[57,44]],[[127243,127244],\"valid\",[],\"NV8\"],[[127245,127247],\"disallowed\"],[[127248,127248],\"disallowed_STD3_mapped\",[40,97,41]],[[127249,127249],\"disallowed_STD3_mapped\",[40,98,41]],[[127250,127250],\"disallowed_STD3_mapped\",[40,99,41]],[[127251,127251],\"disallowed_STD3_mapped\",[40,100,41]],[[127252,127252],\"disallowed_STD3_mapped\",[40,101,41]],[[127253,127253],\"disallowed_STD3_mapped\",[40,102,41]],[[127254,127254],\"disallowed_STD3_mapped\",[40,103,41]],[[127255,127255],\"disallowed_STD3_mapped\",[40,104,41]],[[127256,127256],\"disallowed_STD3_mapped\",[40,105,41]],[[127257,127257],\"disallowed_STD3_mapped\",[40,106,41]],[[127258,127258],\"disallowed_STD3_mapped\",[40,107,41]],[[127259,127259],\"disallowed_STD3_mapped\",[40,108,41]],[[127260,127260],\"disallowed_STD3_mapped\",[40,109,41]],[[127261,127261],\"disallowed_STD3_mapped\",[40,110,41]],[[127262,127262],\"disallowed_STD3_mapped\",[40,111,41]],[[127263,127263],\"disallowed_STD3_mapped\",[40,112,41]],[[127264,127264],\"disallowed_STD3_mapped\",[40,113,41]],[[127265,127265],\"disallowed_STD3_mapped\",[40,114,41]],[[127266,127266],\"disallowed_STD3_mapped\",[40,115,41]],[[127267,127267],\"disallowed_STD3_mapped\",[40,116,41]],[[127268,127268],\"disallowed_STD3_mapped\",[40,117,41]],[[127269,127269],\"disallowed_STD3_mapped\",[40,118,41]],[[127270,127270],\"disallowed_STD3_mapped\",[40,119,41]],[[127271,127271],\"disallowed_STD3_mapped\",[40,120,41]],[[127272,127272],\"disallowed_STD3_mapped\",[40,121,41]],[[127273,127273],\"disallowed_STD3_mapped\",[40,122,41]],[[127274,127274],\"mapped\",[12308,115,12309]],[[127275,127275],\"mapped\",[99]],[[127276,127276],\"mapped\",[114]],[[127277,127277],\"mapped\",[99,100]],[[127278,127278],\"mapped\",[119,122]],[[127279,127279],\"disallowed\"],[[127280,127280],\"mapped\",[97]],[[127281,127281],\"mapped\",[98]],[[127282,127282],\"mapped\",[99]],[[127283,127283],\"mapped\",[100]],[[127284,127284],\"mapped\",[101]],[[127285,127285],\"mapped\",[102]],[[127286,127286],\"mapped\",[103]],[[127287,127287],\"mapped\",[104]],[[127288,127288],\"mapped\",[105]],[[127289,127289],\"mapped\",[106]],[[127290,127290],\"mapped\",[107]],[[127291,127291],\"mapped\",[108]],[[127292,127292],\"mapped\",[109]],[[127293,127293],\"mapped\",[110]],[[127294,127294],\"mapped\",[111]],[[127295,127295],\"mapped\",[112]],[[127296,127296],\"mapped\",[113]],[[127297,127297],\"mapped\",[114]],[[127298,127298],\"mapped\",[115]],[[127299,127299],\"mapped\",[116]],[[127300,127300],\"mapped\",[117]],[[127301,127301],\"mapped\",[118]],[[127302,127302],\"mapped\",[119]],[[127303,127303],\"mapped\",[120]],[[127304,127304],\"mapped\",[121]],[[127305,127305],\"mapped\",[122]],[[127306,127306],\"mapped\",[104,118]],[[127307,127307],\"mapped\",[109,118]],[[127308,127308],\"mapped\",[115,100]],[[127309,127309],\"mapped\",[115,115]],[[127310,127310],\"mapped\",[112,112,118]],[[127311,127311],\"mapped\",[119,99]],[[127312,127318],\"valid\",[],\"NV8\"],[[127319,127319],\"valid\",[],\"NV8\"],[[127320,127326],\"valid\",[],\"NV8\"],[[127327,127327],\"valid\",[],\"NV8\"],[[127328,127337],\"valid\",[],\"NV8\"],[[127338,127338],\"mapped\",[109,99]],[[127339,127339],\"mapped\",[109,100]],[[127340,127343],\"disallowed\"],[[127344,127352],\"valid\",[],\"NV8\"],[[127353,127353],\"valid\",[],\"NV8\"],[[127354,127354],\"valid\",[],\"NV8\"],[[127355,127356],\"valid\",[],\"NV8\"],[[127357,127358],\"valid\",[],\"NV8\"],[[127359,127359],\"valid\",[],\"NV8\"],[[127360,127369],\"valid\",[],\"NV8\"],[[127370,127373],\"valid\",[],\"NV8\"],[[127374,127375],\"valid\",[],\"NV8\"],[[127376,127376],\"mapped\",[100,106]],[[127377,127386],\"valid\",[],\"NV8\"],[[127387,127461],\"disallowed\"],[[127462,127487],\"valid\",[],\"NV8\"],[[127488,127488],\"mapped\",[12411,12363]],[[127489,127489],\"mapped\",[12467,12467]],[[127490,127490],\"mapped\",[12469]],[[127491,127503],\"disallowed\"],[[127504,127504],\"mapped\",[25163]],[[127505,127505],\"mapped\",[23383]],[[127506,127506],\"mapped\",[21452]],[[127507,127507],\"mapped\",[12487]],[[127508,127508],\"mapped\",[20108]],[[127509,127509],\"mapped\",[22810]],[[127510,127510],\"mapped\",[35299]],[[127511,127511],\"mapped\",[22825]],[[127512,127512],\"mapped\",[20132]],[[127513,127513],\"mapped\",[26144]],[[127514,127514],\"mapped\",[28961]],[[127515,127515],\"mapped\",[26009]],[[127516,127516],\"mapped\",[21069]],[[127517,127517],\"mapped\",[24460]],[[127518,127518],\"mapped\",[20877]],[[127519,127519],\"mapped\",[26032]],[[127520,127520],\"mapped\",[21021]],[[127521,127521],\"mapped\",[32066]],[[127522,127522],\"mapped\",[29983]],[[127523,127523],\"mapped\",[36009]],[[127524,127524],\"mapped\",[22768]],[[127525,127525],\"mapped\",[21561]],[[127526,127526],\"mapped\",[28436]],[[127527,127527],\"mapped\",[25237]],[[127528,127528],\"mapped\",[25429]],[[127529,127529],\"mapped\",[19968]],[[127530,127530],\"mapped\",[19977]],[[127531,127531],\"mapped\",[36938]],[[127532,127532],\"mapped\",[24038]],[[127533,127533],\"mapped\",[20013]],[[127534,127534],\"mapped\",[21491]],[[127535,127535],\"mapped\",[25351]],[[127536,127536],\"mapped\",[36208]],[[127537,127537],\"mapped\",[25171]],[[127538,127538],\"mapped\",[31105]],[[127539,127539],\"mapped\",[31354]],[[127540,127540],\"mapped\",[21512]],[[127541,127541],\"mapped\",[28288]],[[127542,127542],\"mapped\",[26377]],[[127543,127543],\"mapped\",[26376]],[[127544,127544],\"mapped\",[30003]],[[127545,127545],\"mapped\",[21106]],[[127546,127546],\"mapped\",[21942]],[[127547,127551],\"disallowed\"],[[127552,127552],\"mapped\",[12308,26412,12309]],[[127553,127553],\"mapped\",[12308,19977,12309]],[[127554,127554],\"mapped\",[12308,20108,12309]],[[127555,127555],\"mapped\",[12308,23433,12309]],[[127556,127556],\"mapped\",[12308,28857,12309]],[[127557,127557],\"mapped\",[12308,25171,12309]],[[127558,127558],\"mapped\",[12308,30423,12309]],[[127559,127559],\"mapped\",[12308,21213,12309]],[[127560,127560],\"mapped\",[12308,25943,12309]],[[127561,127567],\"disallowed\"],[[127568,127568],\"mapped\",[24471]],[[127569,127569],\"mapped\",[21487]],[[127570,127743],\"disallowed\"],[[127744,127776],\"valid\",[],\"NV8\"],[[127777,127788],\"valid\",[],\"NV8\"],[[127789,127791],\"valid\",[],\"NV8\"],[[127792,127797],\"valid\",[],\"NV8\"],[[127798,127798],\"valid\",[],\"NV8\"],[[127799,127868],\"valid\",[],\"NV8\"],[[127869,127869],\"valid\",[],\"NV8\"],[[127870,127871],\"valid\",[],\"NV8\"],[[127872,127891],\"valid\",[],\"NV8\"],[[127892,127903],\"valid\",[],\"NV8\"],[[127904,127940],\"valid\",[],\"NV8\"],[[127941,127941],\"valid\",[],\"NV8\"],[[127942,127946],\"valid\",[],\"NV8\"],[[127947,127950],\"valid\",[],\"NV8\"],[[127951,127955],\"valid\",[],\"NV8\"],[[127956,127967],\"valid\",[],\"NV8\"],[[127968,127984],\"valid\",[],\"NV8\"],[[127985,127991],\"valid\",[],\"NV8\"],[[127992,127999],\"valid\",[],\"NV8\"],[[128000,128062],\"valid\",[],\"NV8\"],[[128063,128063],\"valid\",[],\"NV8\"],[[128064,128064],\"valid\",[],\"NV8\"],[[128065,128065],\"valid\",[],\"NV8\"],[[128066,128247],\"valid\",[],\"NV8\"],[[128248,128248],\"valid\",[],\"NV8\"],[[128249,128252],\"valid\",[],\"NV8\"],[[128253,128254],\"valid\",[],\"NV8\"],[[128255,128255],\"valid\",[],\"NV8\"],[[128256,128317],\"valid\",[],\"NV8\"],[[128318,128319],\"valid\",[],\"NV8\"],[[128320,128323],\"valid\",[],\"NV8\"],[[128324,128330],\"valid\",[],\"NV8\"],[[128331,128335],\"valid\",[],\"NV8\"],[[128336,128359],\"valid\",[],\"NV8\"],[[128360,128377],\"valid\",[],\"NV8\"],[[128378,128378],\"disallowed\"],[[128379,128419],\"valid\",[],\"NV8\"],[[128420,128420],\"disallowed\"],[[128421,128506],\"valid\",[],\"NV8\"],[[128507,128511],\"valid\",[],\"NV8\"],[[128512,128512],\"valid\",[],\"NV8\"],[[128513,128528],\"valid\",[],\"NV8\"],[[128529,128529],\"valid\",[],\"NV8\"],[[128530,128532],\"valid\",[],\"NV8\"],[[128533,128533],\"valid\",[],\"NV8\"],[[128534,128534],\"valid\",[],\"NV8\"],[[128535,128535],\"valid\",[],\"NV8\"],[[128536,128536],\"valid\",[],\"NV8\"],[[128537,128537],\"valid\",[],\"NV8\"],[[128538,128538],\"valid\",[],\"NV8\"],[[128539,128539],\"valid\",[],\"NV8\"],[[128540,128542],\"valid\",[],\"NV8\"],[[128543,128543],\"valid\",[],\"NV8\"],[[128544,128549],\"valid\",[],\"NV8\"],[[128550,128551],\"valid\",[],\"NV8\"],[[128552,128555],\"valid\",[],\"NV8\"],[[128556,128556],\"valid\",[],\"NV8\"],[[128557,128557],\"valid\",[],\"NV8\"],[[128558,128559],\"valid\",[],\"NV8\"],[[128560,128563],\"valid\",[],\"NV8\"],[[128564,128564],\"valid\",[],\"NV8\"],[[128565,128576],\"valid\",[],\"NV8\"],[[128577,128578],\"valid\",[],\"NV8\"],[[128579,128580],\"valid\",[],\"NV8\"],[[128581,128591],\"valid\",[],\"NV8\"],[[128592,128639],\"valid\",[],\"NV8\"],[[128640,128709],\"valid\",[],\"NV8\"],[[128710,128719],\"valid\",[],\"NV8\"],[[128720,128720],\"valid\",[],\"NV8\"],[[128721,128735],\"disallowed\"],[[128736,128748],\"valid\",[],\"NV8\"],[[128749,128751],\"disallowed\"],[[128752,128755],\"valid\",[],\"NV8\"],[[128756,128767],\"disallowed\"],[[128768,128883],\"valid\",[],\"NV8\"],[[128884,128895],\"disallowed\"],[[128896,128980],\"valid\",[],\"NV8\"],[[128981,129023],\"disallowed\"],[[129024,129035],\"valid\",[],\"NV8\"],[[129036,129039],\"disallowed\"],[[129040,129095],\"valid\",[],\"NV8\"],[[129096,129103],\"disallowed\"],[[129104,129113],\"valid\",[],\"NV8\"],[[129114,129119],\"disallowed\"],[[129120,129159],\"valid\",[],\"NV8\"],[[129160,129167],\"disallowed\"],[[129168,129197],\"valid\",[],\"NV8\"],[[129198,129295],\"disallowed\"],[[129296,129304],\"valid\",[],\"NV8\"],[[129305,129407],\"disallowed\"],[[129408,129412],\"valid\",[],\"NV8\"],[[129413,129471],\"disallowed\"],[[129472,129472],\"valid\",[],\"NV8\"],[[129473,131069],\"disallowed\"],[[131070,131071],\"disallowed\"],[[131072,173782],\"valid\"],[[173783,173823],\"disallowed\"],[[173824,177972],\"valid\"],[[177973,177983],\"disallowed\"],[[177984,178205],\"valid\"],[[178206,178207],\"disallowed\"],[[178208,183969],\"valid\"],[[183970,194559],\"disallowed\"],[[194560,194560],\"mapped\",[20029]],[[194561,194561],\"mapped\",[20024]],[[194562,194562],\"mapped\",[20033]],[[194563,194563],\"mapped\",[131362]],[[194564,194564],\"mapped\",[20320]],[[194565,194565],\"mapped\",[20398]],[[194566,194566],\"mapped\",[20411]],[[194567,194567],\"mapped\",[20482]],[[194568,194568],\"mapped\",[20602]],[[194569,194569],\"mapped\",[20633]],[[194570,194570],\"mapped\",[20711]],[[194571,194571],\"mapped\",[20687]],[[194572,194572],\"mapped\",[13470]],[[194573,194573],\"mapped\",[132666]],[[194574,194574],\"mapped\",[20813]],[[194575,194575],\"mapped\",[20820]],[[194576,194576],\"mapped\",[20836]],[[194577,194577],\"mapped\",[20855]],[[194578,194578],\"mapped\",[132380]],[[194579,194579],\"mapped\",[13497]],[[194580,194580],\"mapped\",[20839]],[[194581,194581],\"mapped\",[20877]],[[194582,194582],\"mapped\",[132427]],[[194583,194583],\"mapped\",[20887]],[[194584,194584],\"mapped\",[20900]],[[194585,194585],\"mapped\",[20172]],[[194586,194586],\"mapped\",[20908]],[[194587,194587],\"mapped\",[20917]],[[194588,194588],\"mapped\",[168415]],[[194589,194589],\"mapped\",[20981]],[[194590,194590],\"mapped\",[20995]],[[194591,194591],\"mapped\",[13535]],[[194592,194592],\"mapped\",[21051]],[[194593,194593],\"mapped\",[21062]],[[194594,194594],\"mapped\",[21106]],[[194595,194595],\"mapped\",[21111]],[[194596,194596],\"mapped\",[13589]],[[194597,194597],\"mapped\",[21191]],[[194598,194598],\"mapped\",[21193]],[[194599,194599],\"mapped\",[21220]],[[194600,194600],\"mapped\",[21242]],[[194601,194601],\"mapped\",[21253]],[[194602,194602],\"mapped\",[21254]],[[194603,194603],\"mapped\",[21271]],[[194604,194604],\"mapped\",[21321]],[[194605,194605],\"mapped\",[21329]],[[194606,194606],\"mapped\",[21338]],[[194607,194607],\"mapped\",[21363]],[[194608,194608],\"mapped\",[21373]],[[194609,194611],\"mapped\",[21375]],[[194612,194612],\"mapped\",[133676]],[[194613,194613],\"mapped\",[28784]],[[194614,194614],\"mapped\",[21450]],[[194615,194615],\"mapped\",[21471]],[[194616,194616],\"mapped\",[133987]],[[194617,194617],\"mapped\",[21483]],[[194618,194618],\"mapped\",[21489]],[[194619,194619],\"mapped\",[21510]],[[194620,194620],\"mapped\",[21662]],[[194621,194621],\"mapped\",[21560]],[[194622,194622],\"mapped\",[21576]],[[194623,194623],\"mapped\",[21608]],[[194624,194624],\"mapped\",[21666]],[[194625,194625],\"mapped\",[21750]],[[194626,194626],\"mapped\",[21776]],[[194627,194627],\"mapped\",[21843]],[[194628,194628],\"mapped\",[21859]],[[194629,194630],\"mapped\",[21892]],[[194631,194631],\"mapped\",[21913]],[[194632,194632],\"mapped\",[21931]],[[194633,194633],\"mapped\",[21939]],[[194634,194634],\"mapped\",[21954]],[[194635,194635],\"mapped\",[22294]],[[194636,194636],\"mapped\",[22022]],[[194637,194637],\"mapped\",[22295]],[[194638,194638],\"mapped\",[22097]],[[194639,194639],\"mapped\",[22132]],[[194640,194640],\"mapped\",[20999]],[[194641,194641],\"mapped\",[22766]],[[194642,194642],\"mapped\",[22478]],[[194643,194643],\"mapped\",[22516]],[[194644,194644],\"mapped\",[22541]],[[194645,194645],\"mapped\",[22411]],[[194646,194646],\"mapped\",[22578]],[[194647,194647],\"mapped\",[22577]],[[194648,194648],\"mapped\",[22700]],[[194649,194649],\"mapped\",[136420]],[[194650,194650],\"mapped\",[22770]],[[194651,194651],\"mapped\",[22775]],[[194652,194652],\"mapped\",[22790]],[[194653,194653],\"mapped\",[22810]],[[194654,194654],\"mapped\",[22818]],[[194655,194655],\"mapped\",[22882]],[[194656,194656],\"mapped\",[136872]],[[194657,194657],\"mapped\",[136938]],[[194658,194658],\"mapped\",[23020]],[[194659,194659],\"mapped\",[23067]],[[194660,194660],\"mapped\",[23079]],[[194661,194661],\"mapped\",[23000]],[[194662,194662],\"mapped\",[23142]],[[194663,194663],\"mapped\",[14062]],[[194664,194664],\"disallowed\"],[[194665,194665],\"mapped\",[23304]],[[194666,194667],\"mapped\",[23358]],[[194668,194668],\"mapped\",[137672]],[[194669,194669],\"mapped\",[23491]],[[194670,194670],\"mapped\",[23512]],[[194671,194671],\"mapped\",[23527]],[[194672,194672],\"mapped\",[23539]],[[194673,194673],\"mapped\",[138008]],[[194674,194674],\"mapped\",[23551]],[[194675,194675],\"mapped\",[23558]],[[194676,194676],\"disallowed\"],[[194677,194677],\"mapped\",[23586]],[[194678,194678],\"mapped\",[14209]],[[194679,194679],\"mapped\",[23648]],[[194680,194680],\"mapped\",[23662]],[[194681,194681],\"mapped\",[23744]],[[194682,194682],\"mapped\",[23693]],[[194683,194683],\"mapped\",[138724]],[[194684,194684],\"mapped\",[23875]],[[194685,194685],\"mapped\",[138726]],[[194686,194686],\"mapped\",[23918]],[[194687,194687],\"mapped\",[23915]],[[194688,194688],\"mapped\",[23932]],[[194689,194689],\"mapped\",[24033]],[[194690,194690],\"mapped\",[24034]],[[194691,194691],\"mapped\",[14383]],[[194692,194692],\"mapped\",[24061]],[[194693,194693],\"mapped\",[24104]],[[194694,194694],\"mapped\",[24125]],[[194695,194695],\"mapped\",[24169]],[[194696,194696],\"mapped\",[14434]],[[194697,194697],\"mapped\",[139651]],[[194698,194698],\"mapped\",[14460]],[[194699,194699],\"mapped\",[24240]],[[194700,194700],\"mapped\",[24243]],[[194701,194701],\"mapped\",[24246]],[[194702,194702],\"mapped\",[24266]],[[194703,194703],\"mapped\",[172946]],[[194704,194704],\"mapped\",[24318]],[[194705,194706],\"mapped\",[140081]],[[194707,194707],\"mapped\",[33281]],[[194708,194709],\"mapped\",[24354]],[[194710,194710],\"mapped\",[14535]],[[194711,194711],\"mapped\",[144056]],[[194712,194712],\"mapped\",[156122]],[[194713,194713],\"mapped\",[24418]],[[194714,194714],\"mapped\",[24427]],[[194715,194715],\"mapped\",[14563]],[[194716,194716],\"mapped\",[24474]],[[194717,194717],\"mapped\",[24525]],[[194718,194718],\"mapped\",[24535]],[[194719,194719],\"mapped\",[24569]],[[194720,194720],\"mapped\",[24705]],[[194721,194721],\"mapped\",[14650]],[[194722,194722],\"mapped\",[14620]],[[194723,194723],\"mapped\",[24724]],[[194724,194724],\"mapped\",[141012]],[[194725,194725],\"mapped\",[24775]],[[194726,194726],\"mapped\",[24904]],[[194727,194727],\"mapped\",[24908]],[[194728,194728],\"mapped\",[24910]],[[194729,194729],\"mapped\",[24908]],[[194730,194730],\"mapped\",[24954]],[[194731,194731],\"mapped\",[24974]],[[194732,194732],\"mapped\",[25010]],[[194733,194733],\"mapped\",[24996]],[[194734,194734],\"mapped\",[25007]],[[194735,194735],\"mapped\",[25054]],[[194736,194736],\"mapped\",[25074]],[[194737,194737],\"mapped\",[25078]],[[194738,194738],\"mapped\",[25104]],[[194739,194739],\"mapped\",[25115]],[[194740,194740],\"mapped\",[25181]],[[194741,194741],\"mapped\",[25265]],[[194742,194742],\"mapped\",[25300]],[[194743,194743],\"mapped\",[25424]],[[194744,194744],\"mapped\",[142092]],[[194745,194745],\"mapped\",[25405]],[[194746,194746],\"mapped\",[25340]],[[194747,194747],\"mapped\",[25448]],[[194748,194748],\"mapped\",[25475]],[[194749,194749],\"mapped\",[25572]],[[194750,194750],\"mapped\",[142321]],[[194751,194751],\"mapped\",[25634]],[[194752,194752],\"mapped\",[25541]],[[194753,194753],\"mapped\",[25513]],[[194754,194754],\"mapped\",[14894]],[[194755,194755],\"mapped\",[25705]],[[194756,194756],\"mapped\",[25726]],[[194757,194757],\"mapped\",[25757]],[[194758,194758],\"mapped\",[25719]],[[194759,194759],\"mapped\",[14956]],[[194760,194760],\"mapped\",[25935]],[[194761,194761],\"mapped\",[25964]],[[194762,194762],\"mapped\",[143370]],[[194763,194763],\"mapped\",[26083]],[[194764,194764],\"mapped\",[26360]],[[194765,194765],\"mapped\",[26185]],[[194766,194766],\"mapped\",[15129]],[[194767,194767],\"mapped\",[26257]],[[194768,194768],\"mapped\",[15112]],[[194769,194769],\"mapped\",[15076]],[[194770,194770],\"mapped\",[20882]],[[194771,194771],\"mapped\",[20885]],[[194772,194772],\"mapped\",[26368]],[[194773,194773],\"mapped\",[26268]],[[194774,194774],\"mapped\",[32941]],[[194775,194775],\"mapped\",[17369]],[[194776,194776],\"mapped\",[26391]],[[194777,194777],\"mapped\",[26395]],[[194778,194778],\"mapped\",[26401]],[[194779,194779],\"mapped\",[26462]],[[194780,194780],\"mapped\",[26451]],[[194781,194781],\"mapped\",[144323]],[[194782,194782],\"mapped\",[15177]],[[194783,194783],\"mapped\",[26618]],[[194784,194784],\"mapped\",[26501]],[[194785,194785],\"mapped\",[26706]],[[194786,194786],\"mapped\",[26757]],[[194787,194787],\"mapped\",[144493]],[[194788,194788],\"mapped\",[26766]],[[194789,194789],\"mapped\",[26655]],[[194790,194790],\"mapped\",[26900]],[[194791,194791],\"mapped\",[15261]],[[194792,194792],\"mapped\",[26946]],[[194793,194793],\"mapped\",[27043]],[[194794,194794],\"mapped\",[27114]],[[194795,194795],\"mapped\",[27304]],[[194796,194796],\"mapped\",[145059]],[[194797,194797],\"mapped\",[27355]],[[194798,194798],\"mapped\",[15384]],[[194799,194799],\"mapped\",[27425]],[[194800,194800],\"mapped\",[145575]],[[194801,194801],\"mapped\",[27476]],[[194802,194802],\"mapped\",[15438]],[[194803,194803],\"mapped\",[27506]],[[194804,194804],\"mapped\",[27551]],[[194805,194805],\"mapped\",[27578]],[[194806,194806],\"mapped\",[27579]],[[194807,194807],\"mapped\",[146061]],[[194808,194808],\"mapped\",[138507]],[[194809,194809],\"mapped\",[146170]],[[194810,194810],\"mapped\",[27726]],[[194811,194811],\"mapped\",[146620]],[[194812,194812],\"mapped\",[27839]],[[194813,194813],\"mapped\",[27853]],[[194814,194814],\"mapped\",[27751]],[[194815,194815],\"mapped\",[27926]],[[194816,194816],\"mapped\",[27966]],[[194817,194817],\"mapped\",[28023]],[[194818,194818],\"mapped\",[27969]],[[194819,194819],\"mapped\",[28009]],[[194820,194820],\"mapped\",[28024]],[[194821,194821],\"mapped\",[28037]],[[194822,194822],\"mapped\",[146718]],[[194823,194823],\"mapped\",[27956]],[[194824,194824],\"mapped\",[28207]],[[194825,194825],\"mapped\",[28270]],[[194826,194826],\"mapped\",[15667]],[[194827,194827],\"mapped\",[28363]],[[194828,194828],\"mapped\",[28359]],[[194829,194829],\"mapped\",[147153]],[[194830,194830],\"mapped\",[28153]],[[194831,194831],\"mapped\",[28526]],[[194832,194832],\"mapped\",[147294]],[[194833,194833],\"mapped\",[147342]],[[194834,194834],\"mapped\",[28614]],[[194835,194835],\"mapped\",[28729]],[[194836,194836],\"mapped\",[28702]],[[194837,194837],\"mapped\",[28699]],[[194838,194838],\"mapped\",[15766]],[[194839,194839],\"mapped\",[28746]],[[194840,194840],\"mapped\",[28797]],[[194841,194841],\"mapped\",[28791]],[[194842,194842],\"mapped\",[28845]],[[194843,194843],\"mapped\",[132389]],[[194844,194844],\"mapped\",[28997]],[[194845,194845],\"mapped\",[148067]],[[194846,194846],\"mapped\",[29084]],[[194847,194847],\"disallowed\"],[[194848,194848],\"mapped\",[29224]],[[194849,194849],\"mapped\",[29237]],[[194850,194850],\"mapped\",[29264]],[[194851,194851],\"mapped\",[149000]],[[194852,194852],\"mapped\",[29312]],[[194853,194853],\"mapped\",[29333]],[[194854,194854],\"mapped\",[149301]],[[194855,194855],\"mapped\",[149524]],[[194856,194856],\"mapped\",[29562]],[[194857,194857],\"mapped\",[29579]],[[194858,194858],\"mapped\",[16044]],[[194859,194859],\"mapped\",[29605]],[[194860,194861],\"mapped\",[16056]],[[194862,194862],\"mapped\",[29767]],[[194863,194863],\"mapped\",[29788]],[[194864,194864],\"mapped\",[29809]],[[194865,194865],\"mapped\",[29829]],[[194866,194866],\"mapped\",[29898]],[[194867,194867],\"mapped\",[16155]],[[194868,194868],\"mapped\",[29988]],[[194869,194869],\"mapped\",[150582]],[[194870,194870],\"mapped\",[30014]],[[194871,194871],\"mapped\",[150674]],[[194872,194872],\"mapped\",[30064]],[[194873,194873],\"mapped\",[139679]],[[194874,194874],\"mapped\",[30224]],[[194875,194875],\"mapped\",[151457]],[[194876,194876],\"mapped\",[151480]],[[194877,194877],\"mapped\",[151620]],[[194878,194878],\"mapped\",[16380]],[[194879,194879],\"mapped\",[16392]],[[194880,194880],\"mapped\",[30452]],[[194881,194881],\"mapped\",[151795]],[[194882,194882],\"mapped\",[151794]],[[194883,194883],\"mapped\",[151833]],[[194884,194884],\"mapped\",[151859]],[[194885,194885],\"mapped\",[30494]],[[194886,194887],\"mapped\",[30495]],[[194888,194888],\"mapped\",[30538]],[[194889,194889],\"mapped\",[16441]],[[194890,194890],\"mapped\",[30603]],[[194891,194891],\"mapped\",[16454]],[[194892,194892],\"mapped\",[16534]],[[194893,194893],\"mapped\",[152605]],[[194894,194894],\"mapped\",[30798]],[[194895,194895],\"mapped\",[30860]],[[194896,194896],\"mapped\",[30924]],[[194897,194897],\"mapped\",[16611]],[[194898,194898],\"mapped\",[153126]],[[194899,194899],\"mapped\",[31062]],[[194900,194900],\"mapped\",[153242]],[[194901,194901],\"mapped\",[153285]],[[194902,194902],\"mapped\",[31119]],[[194903,194903],\"mapped\",[31211]],[[194904,194904],\"mapped\",[16687]],[[194905,194905],\"mapped\",[31296]],[[194906,194906],\"mapped\",[31306]],[[194907,194907],\"mapped\",[31311]],[[194908,194908],\"mapped\",[153980]],[[194909,194910],\"mapped\",[154279]],[[194911,194911],\"disallowed\"],[[194912,194912],\"mapped\",[16898]],[[194913,194913],\"mapped\",[154539]],[[194914,194914],\"mapped\",[31686]],[[194915,194915],\"mapped\",[31689]],[[194916,194916],\"mapped\",[16935]],[[194917,194917],\"mapped\",[154752]],[[194918,194918],\"mapped\",[31954]],[[194919,194919],\"mapped\",[17056]],[[194920,194920],\"mapped\",[31976]],[[194921,194921],\"mapped\",[31971]],[[194922,194922],\"mapped\",[32000]],[[194923,194923],\"mapped\",[155526]],[[194924,194924],\"mapped\",[32099]],[[194925,194925],\"mapped\",[17153]],[[194926,194926],\"mapped\",[32199]],[[194927,194927],\"mapped\",[32258]],[[194928,194928],\"mapped\",[32325]],[[194929,194929],\"mapped\",[17204]],[[194930,194930],\"mapped\",[156200]],[[194931,194931],\"mapped\",[156231]],[[194932,194932],\"mapped\",[17241]],[[194933,194933],\"mapped\",[156377]],[[194934,194934],\"mapped\",[32634]],[[194935,194935],\"mapped\",[156478]],[[194936,194936],\"mapped\",[32661]],[[194937,194937],\"mapped\",[32762]],[[194938,194938],\"mapped\",[32773]],[[194939,194939],\"mapped\",[156890]],[[194940,194940],\"mapped\",[156963]],[[194941,194941],\"mapped\",[32864]],[[194942,194942],\"mapped\",[157096]],[[194943,194943],\"mapped\",[32880]],[[194944,194944],\"mapped\",[144223]],[[194945,194945],\"mapped\",[17365]],[[194946,194946],\"mapped\",[32946]],[[194947,194947],\"mapped\",[33027]],[[194948,194948],\"mapped\",[17419]],[[194949,194949],\"mapped\",[33086]],[[194950,194950],\"mapped\",[23221]],[[194951,194951],\"mapped\",[157607]],[[194952,194952],\"mapped\",[157621]],[[194953,194953],\"mapped\",[144275]],[[194954,194954],\"mapped\",[144284]],[[194955,194955],\"mapped\",[33281]],[[194956,194956],\"mapped\",[33284]],[[194957,194957],\"mapped\",[36766]],[[194958,194958],\"mapped\",[17515]],[[194959,194959],\"mapped\",[33425]],[[194960,194960],\"mapped\",[33419]],[[194961,194961],\"mapped\",[33437]],[[194962,194962],\"mapped\",[21171]],[[194963,194963],\"mapped\",[33457]],[[194964,194964],\"mapped\",[33459]],[[194965,194965],\"mapped\",[33469]],[[194966,194966],\"mapped\",[33510]],[[194967,194967],\"mapped\",[158524]],[[194968,194968],\"mapped\",[33509]],[[194969,194969],\"mapped\",[33565]],[[194970,194970],\"mapped\",[33635]],[[194971,194971],\"mapped\",[33709]],[[194972,194972],\"mapped\",[33571]],[[194973,194973],\"mapped\",[33725]],[[194974,194974],\"mapped\",[33767]],[[194975,194975],\"mapped\",[33879]],[[194976,194976],\"mapped\",[33619]],[[194977,194977],\"mapped\",[33738]],[[194978,194978],\"mapped\",[33740]],[[194979,194979],\"mapped\",[33756]],[[194980,194980],\"mapped\",[158774]],[[194981,194981],\"mapped\",[159083]],[[194982,194982],\"mapped\",[158933]],[[194983,194983],\"mapped\",[17707]],[[194984,194984],\"mapped\",[34033]],[[194985,194985],\"mapped\",[34035]],[[194986,194986],\"mapped\",[34070]],[[194987,194987],\"mapped\",[160714]],[[194988,194988],\"mapped\",[34148]],[[194989,194989],\"mapped\",[159532]],[[194990,194990],\"mapped\",[17757]],[[194991,194991],\"mapped\",[17761]],[[194992,194992],\"mapped\",[159665]],[[194993,194993],\"mapped\",[159954]],[[194994,194994],\"mapped\",[17771]],[[194995,194995],\"mapped\",[34384]],[[194996,194996],\"mapped\",[34396]],[[194997,194997],\"mapped\",[34407]],[[194998,194998],\"mapped\",[34409]],[[194999,194999],\"mapped\",[34473]],[[195000,195000],\"mapped\",[34440]],[[195001,195001],\"mapped\",[34574]],[[195002,195002],\"mapped\",[34530]],[[195003,195003],\"mapped\",[34681]],[[195004,195004],\"mapped\",[34600]],[[195005,195005],\"mapped\",[34667]],[[195006,195006],\"mapped\",[34694]],[[195007,195007],\"disallowed\"],[[195008,195008],\"mapped\",[34785]],[[195009,195009],\"mapped\",[34817]],[[195010,195010],\"mapped\",[17913]],[[195011,195011],\"mapped\",[34912]],[[195012,195012],\"mapped\",[34915]],[[195013,195013],\"mapped\",[161383]],[[195014,195014],\"mapped\",[35031]],[[195015,195015],\"mapped\",[35038]],[[195016,195016],\"mapped\",[17973]],[[195017,195017],\"mapped\",[35066]],[[195018,195018],\"mapped\",[13499]],[[195019,195019],\"mapped\",[161966]],[[195020,195020],\"mapped\",[162150]],[[195021,195021],\"mapped\",[18110]],[[195022,195022],\"mapped\",[18119]],[[195023,195023],\"mapped\",[35488]],[[195024,195024],\"mapped\",[35565]],[[195025,195025],\"mapped\",[35722]],[[195026,195026],\"mapped\",[35925]],[[195027,195027],\"mapped\",[162984]],[[195028,195028],\"mapped\",[36011]],[[195029,195029],\"mapped\",[36033]],[[195030,195030],\"mapped\",[36123]],[[195031,195031],\"mapped\",[36215]],[[195032,195032],\"mapped\",[163631]],[[195033,195033],\"mapped\",[133124]],[[195034,195034],\"mapped\",[36299]],[[195035,195035],\"mapped\",[36284]],[[195036,195036],\"mapped\",[36336]],[[195037,195037],\"mapped\",[133342]],[[195038,195038],\"mapped\",[36564]],[[195039,195039],\"mapped\",[36664]],[[195040,195040],\"mapped\",[165330]],[[195041,195041],\"mapped\",[165357]],[[195042,195042],\"mapped\",[37012]],[[195043,195043],\"mapped\",[37105]],[[195044,195044],\"mapped\",[37137]],[[195045,195045],\"mapped\",[165678]],[[195046,195046],\"mapped\",[37147]],[[195047,195047],\"mapped\",[37432]],[[195048,195048],\"mapped\",[37591]],[[195049,195049],\"mapped\",[37592]],[[195050,195050],\"mapped\",[37500]],[[195051,195051],\"mapped\",[37881]],[[195052,195052],\"mapped\",[37909]],[[195053,195053],\"mapped\",[166906]],[[195054,195054],\"mapped\",[38283]],[[195055,195055],\"mapped\",[18837]],[[195056,195056],\"mapped\",[38327]],[[195057,195057],\"mapped\",[167287]],[[195058,195058],\"mapped\",[18918]],[[195059,195059],\"mapped\",[38595]],[[195060,195060],\"mapped\",[23986]],[[195061,195061],\"mapped\",[38691]],[[195062,195062],\"mapped\",[168261]],[[195063,195063],\"mapped\",[168474]],[[195064,195064],\"mapped\",[19054]],[[195065,195065],\"mapped\",[19062]],[[195066,195066],\"mapped\",[38880]],[[195067,195067],\"mapped\",[168970]],[[195068,195068],\"mapped\",[19122]],[[195069,195069],\"mapped\",[169110]],[[195070,195071],\"mapped\",[38923]],[[195072,195072],\"mapped\",[38953]],[[195073,195073],\"mapped\",[169398]],[[195074,195074],\"mapped\",[39138]],[[195075,195075],\"mapped\",[19251]],[[195076,195076],\"mapped\",[39209]],[[195077,195077],\"mapped\",[39335]],[[195078,195078],\"mapped\",[39362]],[[195079,195079],\"mapped\",[39422]],[[195080,195080],\"mapped\",[19406]],[[195081,195081],\"mapped\",[170800]],[[195082,195082],\"mapped\",[39698]],[[195083,195083],\"mapped\",[40000]],[[195084,195084],\"mapped\",[40189]],[[195085,195085],\"mapped\",[19662]],[[195086,195086],\"mapped\",[19693]],[[195087,195087],\"mapped\",[40295]],[[195088,195088],\"mapped\",[172238]],[[195089,195089],\"mapped\",[19704]],[[195090,195090],\"mapped\",[172293]],[[195091,195091],\"mapped\",[172558]],[[195092,195092],\"mapped\",[172689]],[[195093,195093],\"mapped\",[40635]],[[195094,195094],\"mapped\",[19798]],[[195095,195095],\"mapped\",[40697]],[[195096,195096],\"mapped\",[40702]],[[195097,195097],\"mapped\",[40709]],[[195098,195098],\"mapped\",[40719]],[[195099,195099],\"mapped\",[40726]],[[195100,195100],\"mapped\",[40763]],[[195101,195101],\"mapped\",[173568]],[[195102,196605],\"disallowed\"],[[196606,196607],\"disallowed\"],[[196608,262141],\"disallowed\"],[[262142,262143],\"disallowed\"],[[262144,327677],\"disallowed\"],[[327678,327679],\"disallowed\"],[[327680,393213],\"disallowed\"],[[393214,393215],\"disallowed\"],[[393216,458749],\"disallowed\"],[[458750,458751],\"disallowed\"],[[458752,524285],\"disallowed\"],[[524286,524287],\"disallowed\"],[[524288,589821],\"disallowed\"],[[589822,589823],\"disallowed\"],[[589824,655357],\"disallowed\"],[[655358,655359],\"disallowed\"],[[655360,720893],\"disallowed\"],[[720894,720895],\"disallowed\"],[[720896,786429],\"disallowed\"],[[786430,786431],\"disallowed\"],[[786432,851965],\"disallowed\"],[[851966,851967],\"disallowed\"],[[851968,917501],\"disallowed\"],[[917502,917503],\"disallowed\"],[[917504,917504],\"disallowed\"],[[917505,917505],\"disallowed\"],[[917506,917535],\"disallowed\"],[[917536,917631],\"disallowed\"],[[917632,917759],\"disallowed\"],[[917760,917999],\"ignored\"],[[918000,983037],\"disallowed\"],[[983038,983039],\"disallowed\"],[[983040,1048573],\"disallowed\"],[[1048574,1048575],\"disallowed\"],[[1048576,1114109],\"disallowed\"],[[1114110,1114111],\"disallowed\"]]"); + /***/ }), /***/ 2357: @@ -16068,6 +20018,14 @@ module.exports = require("child_process"); /***/ }), +/***/ 6417: +/***/ ((module) => { + +"use strict"; +module.exports = require("crypto"); + +/***/ }), + /***/ 8614: /***/ ((module) => { @@ -16124,6 +20082,14 @@ module.exports = require("path"); /***/ }), +/***/ 4213: +/***/ ((module) => { + +"use strict"; +module.exports = require("punycode"); + +/***/ }), + /***/ 2413: /***/ ((module) => { @@ -16196,6 +20162,46 @@ module.exports = require("zlib"); /******/ } /******/ /************************************************************************/ +/******/ /* webpack/runtime/compat get default export */ +/******/ (() => { +/******/ // getDefaultExport function for compatibility with non-harmony modules +/******/ __webpack_require__.n = (module) => { +/******/ var getter = module && module.__esModule ? +/******/ () => module['default'] : +/******/ () => module; +/******/ __webpack_require__.d(getter, { a: getter }); +/******/ return getter; +/******/ }; +/******/ })(); +/******/ +/******/ /* webpack/runtime/define property getters */ +/******/ (() => { +/******/ // define getter functions for harmony exports +/******/ __webpack_require__.d = (exports, definition) => { +/******/ for(var key in definition) { +/******/ if(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) { +/******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] }); +/******/ } +/******/ } +/******/ }; +/******/ })(); +/******/ +/******/ /* webpack/runtime/hasOwnProperty shorthand */ +/******/ (() => { +/******/ __webpack_require__.o = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) +/******/ })(); +/******/ +/******/ /* webpack/runtime/make namespace object */ +/******/ (() => { +/******/ // define __esModule on exports +/******/ __webpack_require__.r = (exports) => { +/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) { +/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' }); +/******/ } +/******/ Object.defineProperty(exports, '__esModule', { value: true }); +/******/ }; +/******/ })(); +/******/ /******/ /* webpack/runtime/compat */ /******/ /******/ __webpack_require__.ab = __dirname + "/";/************************************************************************/ diff --git a/.github/workflows/actions/release-notes/package-lock.json b/.github/workflows/actions/release-notes/package-lock.json index 46fd56e188..fd686df336 100644 --- a/.github/workflows/actions/release-notes/package-lock.json +++ b/.github/workflows/actions/release-notes/package-lock.json @@ -1,145 +1,718 @@ { "name": "release-notes", "version": "1.0.0", - "lockfileVersion": 1, + "lockfileVersion": 2, "requires": true, + "packages": { + "": { + "name": "release-notes", + "version": "1.0.0", + "dependencies": { + "@actions/core": "^1.10.0", + "@actions/github": "^6.0.1", + "@octokit/rest": "^22.0.0", + "yaml": "^1.10.0" + }, + "devDependencies": { + "@vercel/ncc": "^0.24.1" + } + }, + "node_modules/@actions/core": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", + "integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==", + "dependencies": { + "@actions/http-client": "^2.0.1", + "uuid": "^8.3.2" + } + }, + "node_modules/@actions/github": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@actions/github/-/github-6.0.1.tgz", + "integrity": "sha512-xbZVcaqD4XnQAe35qSQqskb3SqIAfRyLBrHMd/8TuL7hJSz2QtbDwnNM8zWx4zO5l2fnGtseNE3MbEvD7BxVMw==", + "dependencies": { + "@actions/http-client": "^2.2.0", + "@octokit/core": "^5.0.1", + "@octokit/plugin-paginate-rest": "^9.2.2", + "@octokit/plugin-rest-endpoint-methods": "^10.4.0", + "@octokit/request": "^8.4.1", + "@octokit/request-error": "^5.1.1", + "undici": "^5.28.5" + } + }, + "node_modules/@actions/http-client": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.3.tgz", + "integrity": "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA==", + "dependencies": { + "tunnel": "^0.0.6", + "undici": "^5.25.4" + } + }, + "node_modules/@fastify/busboy": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", + "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", + "engines": { + "node": ">=14" + } + }, + "node_modules/@octokit/auth-token": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz", + "integrity": "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==", + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/core": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.2.tgz", + "integrity": "sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg==", + "dependencies": { + "@octokit/auth-token": "^4.0.0", + "@octokit/graphql": "^7.1.0", + "@octokit/request": "^8.4.1", + "@octokit/request-error": "^5.1.1", + "@octokit/types": "^13.0.0", + "before-after-hook": "^2.2.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/endpoint": { + "version": "9.0.6", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz", + "integrity": "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==", + "dependencies": { + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/graphql": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.1.tgz", + "integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==", + "dependencies": { + "@octokit/request": "^8.4.1", + "@octokit/types": "^13.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/openapi-types": { + "version": "24.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz", + "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==" + }, + "node_modules/@octokit/plugin-paginate-rest": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.2.2.tgz", + "integrity": "sha512-u3KYkGF7GcZnSD/3UP0S7K5XUFT2FkOQdcfXZGZQPGv3lm4F2Xbf71lvjldr8c1H3nNbF+33cLEkWYbokGWqiQ==", + "dependencies": { + "@octokit/types": "^12.6.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": "5" + } + }, + "node_modules/@octokit/plugin-paginate-rest/node_modules/@octokit/openapi-types": { + "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", + "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==" + }, + "node_modules/@octokit/plugin-paginate-rest/node_modules/@octokit/types": { + "version": "12.6.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", + "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", + "dependencies": { + "@octokit/openapi-types": "^20.0.0" + } + }, + "node_modules/@octokit/plugin-rest-endpoint-methods": { + "version": "10.4.1", + "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-10.4.1.tgz", + "integrity": "sha512-xV1b+ceKV9KytQe3zCVqjg+8GTGfDYwaT1ATU5isiUyVtlVAO3HNdzpS4sr4GBx4hxQ46s7ITtZrAsxG22+rVg==", + "dependencies": { + "@octokit/types": "^12.6.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": "5" + } + }, + "node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/openapi-types": { + "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", + "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==" + }, + "node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types": { + "version": "12.6.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", + "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", + "dependencies": { + "@octokit/openapi-types": "^20.0.0" + } + }, + "node_modules/@octokit/request": { + "version": "8.4.1", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.4.1.tgz", + "integrity": "sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw==", + "dependencies": { + "@octokit/endpoint": "^9.0.6", + "@octokit/request-error": "^5.1.1", + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/request-error": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz", + "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==", + "dependencies": { + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/rest": { + "version": "22.0.0", + "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-22.0.0.tgz", + "integrity": "sha512-z6tmTu9BTnw51jYGulxrlernpsQYXpui1RK21vmXn8yF5bp6iX16yfTtJYGK5Mh1qDkvDOmp2n8sRMcQmR8jiA==", + "dependencies": { + "@octokit/core": "^7.0.2", + "@octokit/plugin-paginate-rest": "^13.0.1", + "@octokit/plugin-request-log": "^6.0.0", + "@octokit/plugin-rest-endpoint-methods": "^16.0.0" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/@octokit/rest/node_modules/@octokit/auth-token": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-6.0.0.tgz", + "integrity": "sha512-P4YJBPdPSpWTQ1NU4XYdvHvXJJDxM6YwpS0FZHRgP7YFkdVxsWcpWGy/NVqlAA7PcPCnMacXlRm1y2PFZRWL/w==", + "engines": { + "node": ">= 20" + } + }, + "node_modules/@octokit/rest/node_modules/@octokit/core": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-7.0.5.tgz", + "integrity": "sha512-t54CUOsFMappY1Jbzb7fetWeO0n6K0k/4+/ZpkS+3Joz8I4VcvY9OiEBFRYISqaI2fq5sCiPtAjRDOzVYG8m+Q==", + "dependencies": { + "@octokit/auth-token": "^6.0.0", + "@octokit/graphql": "^9.0.2", + "@octokit/request": "^10.0.4", + "@octokit/request-error": "^7.0.1", + "@octokit/types": "^15.0.0", + "before-after-hook": "^4.0.0", + "universal-user-agent": "^7.0.0" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/@octokit/rest/node_modules/@octokit/endpoint": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-11.0.1.tgz", + "integrity": "sha512-7P1dRAZxuWAOPI7kXfio88trNi/MegQ0IJD3vfgC3b+LZo1Qe6gRJc2v0mz2USWWJOKrB2h5spXCzGbw+fAdqA==", + "dependencies": { + "@octokit/types": "^15.0.0", + "universal-user-agent": "^7.0.2" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/@octokit/rest/node_modules/@octokit/graphql": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-9.0.2.tgz", + "integrity": "sha512-iz6KzZ7u95Fzy9Nt2L8cG88lGRMr/qy1Q36ih/XVzMIlPDMYwaNLE/ENhqmIzgPrlNWiYJkwmveEetvxAgFBJw==", + "dependencies": { + "@octokit/request": "^10.0.4", + "@octokit/types": "^15.0.0", + "universal-user-agent": "^7.0.0" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/@octokit/rest/node_modules/@octokit/openapi-types": { + "version": "26.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-26.0.0.tgz", + "integrity": "sha512-7AtcfKtpo77j7Ts73b4OWhOZHTKo/gGY8bB3bNBQz4H+GRSWqx2yvj8TXRsbdTE0eRmYmXOEY66jM7mJ7LzfsA==" + }, + "node_modules/@octokit/rest/node_modules/@octokit/plugin-paginate-rest": { + "version": "13.2.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-13.2.0.tgz", + "integrity": "sha512-YuAlyjR8o5QoRSOvMHxSJzPtogkNMgeMv2mpccrvdUGeC3MKyfi/hS+KiFwyH/iRKIKyx+eIMsDjbt3p9r2GYA==", + "dependencies": { + "@octokit/types": "^15.0.0" + }, + "engines": { + "node": ">= 20" + }, + "peerDependencies": { + "@octokit/core": ">=6" + } + }, + "node_modules/@octokit/rest/node_modules/@octokit/plugin-request-log": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-6.0.0.tgz", + "integrity": "sha512-UkOzeEN3W91/eBq9sPZNQ7sUBvYCqYbrrD8gTbBuGtHEuycE4/awMXcYvx6sVYo7LypPhmQwwpUe4Yyu4QZN5Q==", + "engines": { + "node": ">= 20" + }, + "peerDependencies": { + "@octokit/core": ">=6" + } + }, + "node_modules/@octokit/rest/node_modules/@octokit/plugin-rest-endpoint-methods": { + "version": "16.1.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-16.1.0.tgz", + "integrity": "sha512-nCsyiKoGRnhH5LkH8hJEZb9swpqOcsW+VXv1QoyUNQXJeVODG4+xM6UICEqyqe9XFr6LkL8BIiFCPev8zMDXPw==", + "dependencies": { + "@octokit/types": "^15.0.0" + }, + "engines": { + "node": ">= 20" + }, + "peerDependencies": { + "@octokit/core": ">=6" + } + }, + "node_modules/@octokit/rest/node_modules/@octokit/request": { + "version": "10.0.5", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-10.0.5.tgz", + "integrity": "sha512-TXnouHIYLtgDhKo+N6mXATnDBkV05VwbR0TtMWpgTHIoQdRQfCSzmy/LGqR1AbRMbijq/EckC/E3/ZNcU92NaQ==", + "dependencies": { + "@octokit/endpoint": "^11.0.1", + "@octokit/request-error": "^7.0.1", + "@octokit/types": "^15.0.0", + "fast-content-type-parse": "^3.0.0", + "universal-user-agent": "^7.0.2" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/@octokit/rest/node_modules/@octokit/request-error": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-7.0.1.tgz", + "integrity": "sha512-CZpFwV4+1uBrxu7Cw8E5NCXDWFNf18MSY23TdxCBgjw1tXXHvTrZVsXlW8hgFTOLw8RQR1BBrMvYRtuyaijHMA==", + "dependencies": { + "@octokit/types": "^15.0.0" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/@octokit/rest/node_modules/@octokit/types": { + "version": "15.0.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-15.0.0.tgz", + "integrity": "sha512-8o6yDfmoGJUIeR9OfYU0/TUJTnMPG2r68+1yEdUeG2Fdqpj8Qetg0ziKIgcBm0RW/j29H41WP37CYCEhp6GoHQ==", + "dependencies": { + "@octokit/openapi-types": "^26.0.0" + } + }, + "node_modules/@octokit/rest/node_modules/before-after-hook": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-4.0.0.tgz", + "integrity": "sha512-q6tR3RPqIB1pMiTRMFcZwuG5T8vwp+vUvEG0vuI6B+Rikh5BfPp2fQ82c925FOs+b0lcFQ8CFrL+KbilfZFhOQ==" + }, + "node_modules/@octokit/rest/node_modules/universal-user-agent": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.3.tgz", + "integrity": "sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A==" + }, + "node_modules/@octokit/types": { + "version": "13.10.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz", + "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==", + "dependencies": { + "@octokit/openapi-types": "^24.2.0" + } + }, + "node_modules/@vercel/ncc": { + "version": "0.24.1", + "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.24.1.tgz", + "integrity": "sha512-r9m7brz2hNmq5TF3sxrK4qR/FhXn44XIMglQUir4sT7Sh5GOaYXlMYikHFwJStf8rmQGTlvOoBXt4yHVonRG8A==", + "dev": true, + "bin": { + "ncc": "dist/ncc/cli.js" + } + }, + "node_modules/before-after-hook": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz", + "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==" + }, + "node_modules/deprecation": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" + }, + "node_modules/fast-content-type-parse": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/fast-content-type-parse/-/fast-content-type-parse-3.0.0.tgz", + "integrity": "sha512-ZvLdcY8P+N8mGQJahJV5G4U88CSvT1rP8ApL6uETe88MBXrBHAkZlSEySdUlyztF7ccb+Znos3TFqaepHxdhBg==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ] + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/tunnel": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", + "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", + "engines": { + "node": ">=0.6.11 <=0.7.0 || >=0.7.3" + } + }, + "node_modules/undici": { + "version": "5.29.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz", + "integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==", + "dependencies": { + "@fastify/busboy": "^2.0.0" + }, + "engines": { + "node": ">=14.0" + } + }, + "node_modules/universal-user-agent": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz", + "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==" + }, + "node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "node_modules/yaml": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.0.tgz", + "integrity": "sha512-yr2icI4glYaNG+KWONODapy2/jDdMSDnrONSjblABjD9B4Z5LgiircSt8m8sRZFNi08kG9Sm0uSHtEmP3zaEGg==", + "engines": { + "node": ">= 6" + } + } + }, "dependencies": { "@actions/core": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.2.6.tgz", - "integrity": "sha512-ZQYitnqiyBc3D+k7LsgSBmMDVkOVidaagDG7j3fOym77jNunWRuYx7VSHa9GNfFZh+zh61xsCjRj4JxMZlDqTA==" + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", + "integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==", + "requires": { + "@actions/http-client": "^2.0.1", + "uuid": "^8.3.2" + } }, "@actions/github": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@actions/github/-/github-4.0.0.tgz", - "integrity": "sha512-Ej/Y2E+VV6sR9X7pWL5F3VgEWrABaT292DRqRU6R4hnQjPtC/zD3nagxVdXWiRQvYDh8kHXo7IDmG42eJ/dOMA==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@actions/github/-/github-6.0.1.tgz", + "integrity": "sha512-xbZVcaqD4XnQAe35qSQqskb3SqIAfRyLBrHMd/8TuL7hJSz2QtbDwnNM8zWx4zO5l2fnGtseNE3MbEvD7BxVMw==", "requires": { - "@actions/http-client": "^1.0.8", - "@octokit/core": "^3.0.0", - "@octokit/plugin-paginate-rest": "^2.2.3", - "@octokit/plugin-rest-endpoint-methods": "^4.0.0" + "@actions/http-client": "^2.2.0", + "@octokit/core": "^5.0.1", + "@octokit/plugin-paginate-rest": "^9.2.2", + "@octokit/plugin-rest-endpoint-methods": "^10.4.0", + "@octokit/request": "^8.4.1", + "@octokit/request-error": "^5.1.1", + "undici": "^5.28.5" } }, "@actions/http-client": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.8.tgz", - "integrity": "sha512-G4JjJ6f9Hb3Zvejj+ewLLKLf99ZC+9v+yCxoYf9vSyH+WkzPLB2LuUtRMGNkooMqdugGBFStIKXOuvH1W+EctA==", + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.3.tgz", + "integrity": "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA==", "requires": { - "tunnel": "0.0.6" + "tunnel": "^0.0.6", + "undici": "^5.25.4" } }, + "@fastify/busboy": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", + "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==" + }, "@octokit/auth-token": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.4.2.tgz", - "integrity": "sha512-jE/lE/IKIz2v1+/P0u4fJqv0kYwXOTujKemJMFr6FeopsxlIK3+wKDCJGnysg81XID5TgZQbIfuJ5J0lnTiuyQ==", - "requires": { - "@octokit/types": "^5.0.0" - } + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz", + "integrity": "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==" }, "@octokit/core": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.1.0.tgz", - "integrity": "sha512-yPyQSmxIXLieEIRikk2w8AEtWkFdfG/LXcw1KvEtK3iP0ENZLW/WYQmdzOKqfSaLhooz4CJ9D+WY79C8ZliACw==", + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.2.tgz", + "integrity": "sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg==", "requires": { - "@octokit/auth-token": "^2.4.0", - "@octokit/graphql": "^4.3.1", - "@octokit/request": "^5.4.0", - "@octokit/types": "^5.0.0", - "before-after-hook": "^2.1.0", - "universal-user-agent": "^5.0.0" + "@octokit/auth-token": "^4.0.0", + "@octokit/graphql": "^7.1.0", + "@octokit/request": "^8.4.1", + "@octokit/request-error": "^5.1.1", + "@octokit/types": "^13.0.0", + "before-after-hook": "^2.2.0", + "universal-user-agent": "^6.0.0" } }, "@octokit/endpoint": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.3.tgz", - "integrity": "sha512-Y900+r0gIz+cWp6ytnkibbD95ucEzDSKzlEnaWS52hbCDNcCJYO5mRmWW7HRAnDc7am+N/5Lnd8MppSaTYx1Yg==", + "version": "9.0.6", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz", + "integrity": "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==", "requires": { - "@octokit/types": "^5.0.0", - "is-plain-object": "^3.0.0", - "universal-user-agent": "^5.0.0" + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" } }, "@octokit/graphql": { - "version": "4.5.1", - "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.5.1.tgz", - "integrity": "sha512-qgMsROG9K2KxDs12CO3bySJaYoUu2aic90qpFrv7A8sEBzZ7UFGvdgPKiLw5gOPYEYbS0Xf8Tvf84tJutHPulQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.1.tgz", + "integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==", "requires": { - "@octokit/request": "^5.3.0", - "@octokit/types": "^5.0.0", - "universal-user-agent": "^5.0.0" + "@octokit/request": "^8.4.1", + "@octokit/types": "^13.0.0", + "universal-user-agent": "^6.0.0" } }, + "@octokit/openapi-types": { + "version": "24.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz", + "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==" + }, "@octokit/plugin-paginate-rest": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.2.3.tgz", - "integrity": "sha512-eKTs91wXnJH8Yicwa30jz6DF50kAh7vkcqCQ9D7/tvBAP5KKkg6I2nNof8Mp/65G0Arjsb4QcOJcIEQY+rK1Rg==", + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.2.2.tgz", + "integrity": "sha512-u3KYkGF7GcZnSD/3UP0S7K5XUFT2FkOQdcfXZGZQPGv3lm4F2Xbf71lvjldr8c1H3nNbF+33cLEkWYbokGWqiQ==", "requires": { - "@octokit/types": "^5.0.0" + "@octokit/types": "^12.6.0" + }, + "dependencies": { + "@octokit/openapi-types": { + "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", + "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==" + }, + "@octokit/types": { + "version": "12.6.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", + "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", + "requires": { + "@octokit/openapi-types": "^20.0.0" + } + } } }, - "@octokit/plugin-request-log": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-1.0.0.tgz", - "integrity": "sha512-ywoxP68aOT3zHCLgWZgwUJatiENeHE7xJzYjfz8WI0goynp96wETBF+d95b8g/uL4QmS6owPVlaxiz3wyMAzcw==" - }, "@octokit/plugin-rest-endpoint-methods": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-4.0.0.tgz", - "integrity": "sha512-emS6gysz4E9BNi9IrCl7Pm4kR+Az3MmVB0/DoDCmF4U48NbYG3weKyDlgkrz6Jbl4Mu4nDx8YWZwC4HjoTdcCA==", + "version": "10.4.1", + "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-10.4.1.tgz", + "integrity": "sha512-xV1b+ceKV9KytQe3zCVqjg+8GTGfDYwaT1ATU5isiUyVtlVAO3HNdzpS4sr4GBx4hxQ46s7ITtZrAsxG22+rVg==", "requires": { - "@octokit/types": "^5.0.0", - "deprecation": "^2.3.1" + "@octokit/types": "^12.6.0" + }, + "dependencies": { + "@octokit/openapi-types": { + "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", + "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==" + }, + "@octokit/types": { + "version": "12.6.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", + "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", + "requires": { + "@octokit/openapi-types": "^20.0.0" + } + } } }, "@octokit/request": { - "version": "5.4.5", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.4.5.tgz", - "integrity": "sha512-atAs5GAGbZedvJXXdjtKljin+e2SltEs48B3naJjqWupYl2IUBbB/CJisyjbNHcKpHzb3E+OYEZ46G8eakXgQg==", + "version": "8.4.1", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.4.1.tgz", + "integrity": "sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw==", "requires": { - "@octokit/endpoint": "^6.0.1", - "@octokit/request-error": "^2.0.0", - "@octokit/types": "^5.0.0", - "deprecation": "^2.0.0", - "is-plain-object": "^3.0.0", - "node-fetch": "^2.3.0", - "once": "^1.4.0", - "universal-user-agent": "^5.0.0" + "@octokit/endpoint": "^9.0.6", + "@octokit/request-error": "^5.1.1", + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" } }, "@octokit/request-error": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.0.2.tgz", - "integrity": "sha512-2BrmnvVSV1MXQvEkrb9zwzP0wXFNbPJij922kYBTLIlIafukrGOb+ABBT2+c6wZiuyWDH1K1zmjGQ0toN/wMWw==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz", + "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==", "requires": { - "@octokit/types": "^5.0.1", + "@octokit/types": "^13.1.0", "deprecation": "^2.0.0", "once": "^1.4.0" } }, "@octokit/rest": { - "version": "18.0.0", - "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-18.0.0.tgz", - "integrity": "sha512-4G/a42lry9NFGuuECnua1R1eoKkdBYJap97jYbWDNYBOUboWcM75GJ1VIcfvwDV/pW0lMPs7CEmhHoVrSV5shg==", + "version": "22.0.0", + "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-22.0.0.tgz", + "integrity": "sha512-z6tmTu9BTnw51jYGulxrlernpsQYXpui1RK21vmXn8yF5bp6iX16yfTtJYGK5Mh1qDkvDOmp2n8sRMcQmR8jiA==", "requires": { - "@octokit/core": "^3.0.0", - "@octokit/plugin-paginate-rest": "^2.2.0", - "@octokit/plugin-request-log": "^1.0.0", - "@octokit/plugin-rest-endpoint-methods": "4.0.0" + "@octokit/core": "^7.0.2", + "@octokit/plugin-paginate-rest": "^13.0.1", + "@octokit/plugin-request-log": "^6.0.0", + "@octokit/plugin-rest-endpoint-methods": "^16.0.0" + }, + "dependencies": { + "@octokit/auth-token": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-6.0.0.tgz", + "integrity": "sha512-P4YJBPdPSpWTQ1NU4XYdvHvXJJDxM6YwpS0FZHRgP7YFkdVxsWcpWGy/NVqlAA7PcPCnMacXlRm1y2PFZRWL/w==" + }, + "@octokit/core": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-7.0.5.tgz", + "integrity": "sha512-t54CUOsFMappY1Jbzb7fetWeO0n6K0k/4+/ZpkS+3Joz8I4VcvY9OiEBFRYISqaI2fq5sCiPtAjRDOzVYG8m+Q==", + "requires": { + "@octokit/auth-token": "^6.0.0", + "@octokit/graphql": "^9.0.2", + "@octokit/request": "^10.0.4", + "@octokit/request-error": "^7.0.1", + "@octokit/types": "^15.0.0", + "before-after-hook": "^4.0.0", + "universal-user-agent": "^7.0.0" + } + }, + "@octokit/endpoint": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-11.0.1.tgz", + "integrity": "sha512-7P1dRAZxuWAOPI7kXfio88trNi/MegQ0IJD3vfgC3b+LZo1Qe6gRJc2v0mz2USWWJOKrB2h5spXCzGbw+fAdqA==", + "requires": { + "@octokit/types": "^15.0.0", + "universal-user-agent": "^7.0.2" + } + }, + "@octokit/graphql": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-9.0.2.tgz", + "integrity": "sha512-iz6KzZ7u95Fzy9Nt2L8cG88lGRMr/qy1Q36ih/XVzMIlPDMYwaNLE/ENhqmIzgPrlNWiYJkwmveEetvxAgFBJw==", + "requires": { + "@octokit/request": "^10.0.4", + "@octokit/types": "^15.0.0", + "universal-user-agent": "^7.0.0" + } + }, + "@octokit/openapi-types": { + "version": "26.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-26.0.0.tgz", + "integrity": "sha512-7AtcfKtpo77j7Ts73b4OWhOZHTKo/gGY8bB3bNBQz4H+GRSWqx2yvj8TXRsbdTE0eRmYmXOEY66jM7mJ7LzfsA==" + }, + "@octokit/plugin-paginate-rest": { + "version": "13.2.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-13.2.0.tgz", + "integrity": "sha512-YuAlyjR8o5QoRSOvMHxSJzPtogkNMgeMv2mpccrvdUGeC3MKyfi/hS+KiFwyH/iRKIKyx+eIMsDjbt3p9r2GYA==", + "requires": { + "@octokit/types": "^15.0.0" + } + }, + "@octokit/plugin-request-log": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-6.0.0.tgz", + "integrity": "sha512-UkOzeEN3W91/eBq9sPZNQ7sUBvYCqYbrrD8gTbBuGtHEuycE4/awMXcYvx6sVYo7LypPhmQwwpUe4Yyu4QZN5Q==", + "requires": {} + }, + "@octokit/plugin-rest-endpoint-methods": { + "version": "16.1.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-16.1.0.tgz", + "integrity": "sha512-nCsyiKoGRnhH5LkH8hJEZb9swpqOcsW+VXv1QoyUNQXJeVODG4+xM6UICEqyqe9XFr6LkL8BIiFCPev8zMDXPw==", + "requires": { + "@octokit/types": "^15.0.0" + } + }, + "@octokit/request": { + "version": "10.0.5", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-10.0.5.tgz", + "integrity": "sha512-TXnouHIYLtgDhKo+N6mXATnDBkV05VwbR0TtMWpgTHIoQdRQfCSzmy/LGqR1AbRMbijq/EckC/E3/ZNcU92NaQ==", + "requires": { + "@octokit/endpoint": "^11.0.1", + "@octokit/request-error": "^7.0.1", + "@octokit/types": "^15.0.0", + "fast-content-type-parse": "^3.0.0", + "universal-user-agent": "^7.0.2" + } + }, + "@octokit/request-error": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-7.0.1.tgz", + "integrity": "sha512-CZpFwV4+1uBrxu7Cw8E5NCXDWFNf18MSY23TdxCBgjw1tXXHvTrZVsXlW8hgFTOLw8RQR1BBrMvYRtuyaijHMA==", + "requires": { + "@octokit/types": "^15.0.0" + } + }, + "@octokit/types": { + "version": "15.0.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-15.0.0.tgz", + "integrity": "sha512-8o6yDfmoGJUIeR9OfYU0/TUJTnMPG2r68+1yEdUeG2Fdqpj8Qetg0ziKIgcBm0RW/j29H41WP37CYCEhp6GoHQ==", + "requires": { + "@octokit/openapi-types": "^26.0.0" + } + }, + "before-after-hook": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-4.0.0.tgz", + "integrity": "sha512-q6tR3RPqIB1pMiTRMFcZwuG5T8vwp+vUvEG0vuI6B+Rikh5BfPp2fQ82c925FOs+b0lcFQ8CFrL+KbilfZFhOQ==" + }, + "universal-user-agent": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.3.tgz", + "integrity": "sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A==" + } } }, "@octokit/types": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-5.0.1.tgz", - "integrity": "sha512-GorvORVwp244fGKEt3cgt/P+M0MGy4xEDbckw+K5ojEezxyMDgCaYPKVct+/eWQfZXOT7uq0xRpmrl/+hliabA==", + "version": "13.10.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz", + "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==", "requires": { - "@types/node": ">= 8" + "@octokit/openapi-types": "^24.2.0" } }, - "@types/node": { - "version": "14.0.19", - "resolved": "https://registry.npmjs.org/@types/node/-/node-14.0.19.tgz", - "integrity": "sha512-yf3BP/NIXF37BjrK5klu//asUWitOEoUP5xE1mhSUjazotwJ/eJDgEmMQNlOeWOVv72j24QQ+3bqXHE++CFGag==" - }, "@vercel/ncc": { "version": "0.24.1", "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.24.1.tgz", @@ -147,214 +720,55 @@ "dev": true }, "before-after-hook": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.1.0.tgz", - "integrity": "sha512-IWIbu7pMqyw3EAJHzzHbWa85b6oud/yfKYg5rqB5hNE8CeMi3nX+2C2sj0HswfblST86hpVEOAb9x34NZd6P7A==" - }, - "cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "requires": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - } + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz", + "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==" }, "deprecation": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" }, - "end-of-stream": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", - "requires": { - "once": "^1.4.0" - } - }, - "execa": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz", - "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", - "requires": { - "cross-spawn": "^6.0.0", - "get-stream": "^4.0.0", - "is-stream": "^1.1.0", - "npm-run-path": "^2.0.0", - "p-finally": "^1.0.0", - "signal-exit": "^3.0.0", - "strip-eof": "^1.0.0" - } - }, - "get-stream": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", - "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", - "requires": { - "pump": "^3.0.0" - } - }, - "is-plain-object": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-3.0.1.tgz", - "integrity": "sha512-Xnpx182SBMrr/aBik8y+GuR4U1L9FqMSojwDQwPMmxyC6bvEqly9UBCxhauBF5vNh2gwWJNX6oDV7O+OM4z34g==" - }, - "is-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=" - }, - "isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=" - }, - "macos-release": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/macos-release/-/macos-release-2.4.0.tgz", - "integrity": "sha512-ko6deozZYiAkqa/0gmcsz+p4jSy3gY7/ZsCEokPaYd8k+6/aXGkiTgr61+Owup7Sf+xjqW8u2ElhoM9SEcEfuA==" - }, - "nice-try": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", - "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==" - }, - "node-fetch": { - "version": "2.6.7", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", - "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", - "requires": { - "whatwg-url": "^5.0.0" - } - }, - "npm-run-path": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", - "integrity": "sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=", - "requires": { - "path-key": "^2.0.0" - } + "fast-content-type-parse": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/fast-content-type-parse/-/fast-content-type-parse-3.0.0.tgz", + "integrity": "sha512-ZvLdcY8P+N8mGQJahJV5G4U88CSvT1rP8ApL6uETe88MBXrBHAkZlSEySdUlyztF7ccb+Znos3TFqaepHxdhBg==" }, "once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", "requires": { "wrappy": "1" } }, - "os-name": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/os-name/-/os-name-3.1.0.tgz", - "integrity": "sha512-h8L+8aNjNcMpo/mAIBPn5PXCM16iyPGjHNWo6U1YO8sJTMHtEtyczI6QJnLoplswm6goopQkqc7OAnjhWcugVg==", - "requires": { - "macos-release": "^2.2.0", - "windows-release": "^3.1.0" - } - }, - "p-finally": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", - "integrity": "sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=" - }, - "path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=" - }, - "pump": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", - "requires": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } - }, - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" - }, - "shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", - "requires": { - "shebang-regex": "^1.0.0" - } - }, - "shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=" - }, - "signal-exit": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", - "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==" - }, - "strip-eof": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", - "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=" - }, - "tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=" - }, "tunnel": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==" }, - "universal-user-agent": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-5.0.0.tgz", - "integrity": "sha512-B5TPtzZleXyPrUMKCpEHFmVhMN6EhmJYjG5PQna9s7mXeSqGTLap4OpqLl5FCEFUI3UBmllkETwKf/db66Y54Q==", + "undici": { + "version": "5.29.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz", + "integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==", "requires": { - "os-name": "^3.1.0" + "@fastify/busboy": "^2.0.0" } }, - "webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE=" - }, - "whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=", - "requires": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "requires": { - "isexe": "^2.0.0" - } + "universal-user-agent": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz", + "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==" }, - "windows-release": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/windows-release/-/windows-release-3.3.1.tgz", - "integrity": "sha512-Pngk/RDCaI/DkuHPlGTdIkDiTAnAkyMjoQMZqRsxydNl1qGXNIoZrB7RK8g53F2tEgQBMqQJHQdYZuQEEAu54A==", - "requires": { - "execa": "^1.0.0" - } + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" }, "wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, "yaml": { "version": "1.10.0", diff --git a/.github/workflows/actions/release-notes/package.json b/.github/workflows/actions/release-notes/package.json index 8a170a5070..dadf27a540 100644 --- a/.github/workflows/actions/release-notes/package.json +++ b/.github/workflows/actions/release-notes/package.json @@ -6,9 +6,9 @@ "build": "ncc build action.js -o dist/" }, "dependencies": { - "@actions/core": "^1.2.6", - "@actions/github": "^4.0.0", - "@octokit/rest": "^18.0.0", + "@actions/core": "^1.10.0", + "@actions/github": "^6.0.1", + "@octokit/rest": "^22.0.0", "yaml": "^1.10.0" }, "devDependencies": { diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml new file mode 100644 index 0000000000..52dcdd194a --- /dev/null +++ b/.github/workflows/benchmark.yml @@ -0,0 +1,46 @@ +name: benchmark +on: + push: + branches: + - main + +permissions: + contents: write + deployments: write + +jobs: + benchmark: + name: Run Go benchmark example + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up go + uses: actions/setup-go@v6 + with: + go-version: 1.25 + check-latest: true + go-version-file: 'go.mod' + - name: Set up go env + run: | + echo "GOPATH=$(go env GOPATH)" >> $GITHUB_ENV + echo "$(go env GOPATH)/bin" >> $GITHUB_PATH + shell: bash + - name: Run benchmark + run: | + git clone https://github.com/buildpacks/samples.git + mkdir out || (exit 0) + go test -bench=. -benchtime=1s ./benchmarks/... -tags=benchmarks | tee ./out/benchmark.txt + + - name: Store benchmark result + uses: benchmark-action/github-action-benchmark@v1 + with: + name: Go Benchmark + tool: 'go' + output-file-path: ./out/benchmark.txt + github-token: ${{ secrets.GITHUB_TOKEN }} + auto-push: true + # Show alert with commit comment on detecting possible performance regression + alert-threshold: '200%' + comment-on-alert: true + fail-on-alert: true + alert-comment-cc-users: '@buildpacks/platform-maintainers' diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index d5f2601a1a..2e8d7685dc 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -20,10 +20,11 @@ jobs: strategy: fail-fast: false matrix: - config: [macos, linux, windows-lcow, windows-wcow] + config: [macos, linux, windows] include: - config: macos - os: macos + # since macos-14 the latest runner is arm64 + os: macos-arm64 runner: macos-latest no_docker: "true" pack_bin: pack @@ -32,15 +33,10 @@ jobs: runner: ubuntu-latest no_docker: "false" pack_bin: pack - - config: windows-lcow + - config: windows os: windows - runner: [self-hosted, windows, lcow] - no_docker: "false" - pack_bin: pack.exe - - config: windows-wcow - os: windows - runner: [self-hosted, windows, wcow] - no_docker: "false" + runner: windows-latest + no_docker: "true" pack_bin: pack.exe runs-on: ${{ matrix.runner }} env: @@ -53,7 +49,7 @@ jobs: git config --global core.autocrlf false git config --global core.eol lf git config --global core.symlinks true - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Derive pack version from branch name Unix if: runner.os != 'Windows' run: | @@ -72,9 +68,10 @@ jobs: } shell: powershell - name: Set up go - uses: actions/setup-go@v2 + uses: actions/setup-go@v6 with: - go-version: '1.17' + check-latest: true + go-version-file: 'go.mod' - name: Set up go env for Unix if: runner.os != 'Windows' run: | @@ -87,22 +84,27 @@ jobs: echo "GOPATH=$(go env GOPATH)"| Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append echo "$(go env GOPATH)\bin" | Out-File -FilePath $Env:GITHUB_PATH -Encoding utf8 -Append shell: powershell + - name: Install Make on Windows + if: runner.os == 'Windows' + run: choco install make -y - name: Verify run: make verify - name: Test env: TEST_COVERAGE: 1 + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: make test - name: Upload Coverage uses: codecov/codecov-action@v3 with: + token: ${{ secrets.CODECOV_TOKEN }} file: ./out/tests/coverage-unit.txt flags: unit,os_${{ matrix.os }} - fail_ci_if_error: true + fail_ci_if_error: false verbose: true - name: Prepare Codecov if: matrix.os == 'windows' - uses: crazy-max/ghaction-chocolatey@v1 + uses: crazy-max/ghaction-chocolatey@v3 with: args: install codecov -y - name: run Codecov @@ -123,8 +125,7 @@ jobs: env: PACK_BUILD: ${{ github.run_number }} shell: powershell - - uses: actions/upload-artifact@v2 - if: matrix.config != 'windows-wcow' + - uses: actions/upload-artifact@v4 with: name: pack-${{ matrix.os }} path: out/${{ env.PACK_BIN }} @@ -134,28 +135,41 @@ jobs: fail-fast: false matrix: include: + - name: freebsd + goarch: amd64 + goos: freebsd + - name: freebsd-arm64 + goarch: arm64 + goos: freebsd - name: linux-arm64 goarch: arm64 goos: linux - - name: macos-arm64 - goarch: arm64 + - name: macos + # since macos-14 default runner is arm, we need to build for intel architecture later + goarch: amd64 goos: darwin + - name: linux-s390x + goarch: s390x + goos: linux + - name: linux-ppc64le + goarch: ppc64le + goos: linux needs: test runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up go - uses: actions/setup-go@v2 + uses: actions/setup-go@v6 with: - stable: false - go-version: '1.16.2' + check-latest: true + go-version-file: 'go.mod' - name: Build run: | [[ $GITHUB_REF =~ ^refs\/heads\/release/(.*)$ ]] && version=${BASH_REMATCH[1]} || version=0.0.0 env PACK_VERSION=${version} GOARCH=${{ matrix.goarch }} GOOS=${{ matrix.goos }} make build env: PACK_BUILD: ${{ github.run_number }} - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v4 with: name: pack-${{ matrix.name }} path: out/${{ env.PACK_BIN }} @@ -164,7 +178,7 @@ jobs: needs: build-additional-archs runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Derive pack version from branch name shell: bash run: | @@ -184,19 +198,43 @@ jobs: echo "PACK_MILESTONE=${milestone}" >> $GITHUB_ENV - name: Download artifacts - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v5 - name: Package artifacts - macos run: | chmod +x pack-macos/pack filename=pack-v${{ env.PACK_VERSION }}-macos.tgz tar -C pack-macos -vzcf $filename pack shasum -a 256 $filename > $filename.sha256 + - name: Package artifacts - freebsd + run: | + chmod +x pack-freebsd/pack + filename=pack-v${{ env.PACK_VERSION }}-freebsd.tgz + tar -C pack-freebsd -vzcf $filename pack + shasum -a 256 $filename > $filename.sha256 + - name: Package artifacts - freebsd-arm64 + run: | + chmod +x pack-freebsd-arm64/pack + filename=pack-v${{ env.PACK_VERSION }}-freebsd-arm64.tgz + tar -C pack-freebsd-arm64 -vzcf $filename pack + shasum -a 256 $filename > $filename.sha256 - name: Package artifacts - linux-arm64 run: | chmod +x pack-linux-arm64/pack filename=pack-v${{ env.PACK_VERSION }}-linux-arm64.tgz tar -C pack-linux-arm64 -vzcf $filename pack shasum -a 256 $filename > $filename.sha256 + - name: Package artifacts - linux-s390x + run: | + chmod +x pack-linux-s390x/pack + filename=pack-v${{ env.PACK_VERSION }}-linux-s390x.tgz + tar -C pack-linux-s390x -vzcf $filename pack + shasum -a 256 $filename > $filename.sha256 + - name: Package artifacts - linux-ppc64le + run: | + chmod +x pack-linux-ppc64le/pack + filename=pack-v${{ env.PACK_VERSION }}-linux-ppc64le.tgz + tar -C pack-linux-ppc64le -vzcf $filename pack + shasum -a 256 $filename > $filename.sha256 - name: Package artifacts - macos-arm64 run: | chmod +x pack-macos-arm64/pack @@ -218,17 +256,16 @@ jobs: id: lifecycle_version run: | LIFECYCLE_VERSION=$(./pack-linux/pack report | grep 'Default Lifecycle Version:' | grep -o '[^ ]*$') - echo "::set-output name=version::$LIFECYCLE_VERSION" + echo "version=$LIFECYCLE_VERSION" >> $GITHUB_OUTPUT - name: Extract pack help id: pack_help - # Replacements have to do with multiline output. - # See https://github.community/t5/GitHub-Actions/set-output-Truncates-Multiline-Strings/m-p/38372/highlight/true#M3322 + # Multiline output use a syntax similar to heredocs. + # see https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#multiline-strings run: | - PACK_HELP=$(./pack-linux/pack --help) - PACK_HELP="${PACK_HELP//'%'/'%25'}" - PACK_HELP="${PACK_HELP//$'\n'/'%0A'}" - PACK_HELP="${PACK_HELP//$'\r'/'%0D'}" - echo "::set-output name=help::$PACK_HELP" + DELIMITER="$(uuidgen)" + echo "help<<${DELIMITER}" >> $GITHUB_OUTPUT + ./pack-linux/pack --help >> $GITHUB_OUTPUT + echo "${DELIMITER}" >> $GITHUB_OUTPUT - name: Generate changelog uses: ./.github/workflows/actions/release-notes id: changelog @@ -237,7 +274,7 @@ jobs: milestone: ${{ env.PACK_MILESTONE }} - name: Create Pre-Release if: ${{ env.PACK_VERSION != env.PACK_MILESTONE }} - uses: softprops/action-gh-release@v1 + uses: softprops/action-gh-release@v2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: @@ -248,23 +285,64 @@ jobs: prerelease: true files: pack-v${{ env.PACK_VERSION }}-* body: | - # pack v${{ env.PACK_VERSION }} - > This is a **beta** pre-release of the Cloud Native Buildpack local CLI. This platform implementation should be relatively stable and reliable, but breaking changes in the underlying [specification](https://github.com/buildpack/spec) may be implemented without notice. Note that pack is intended for local image builds, and thus requires a Docker daemon. The [lifecycle](https://github.com/buildpack/lifecycle) should be used directly when building on cloud platforms. - ## Prerequisites - - The [Docker daemon](https://www.docker.com/get-started) must be installed on your workstation or accessible over the network. + - A container runtime such as [Docker](https://www.docker.com/get-started) or [podman](https://podman.io/get-started) must be available to execute builds. ## Install + #### FreeBSD + + ##### AMD64 + + ```bash + (curl -sSL "https://github.com/buildpacks/pack/releases/download/v${{ env.PACK_VERSION }}/pack-v${{ env.PACK_VERSION }}-freebsd.tgz" | sudo tar -C /usr/local/bin/ --no-same-owner -xzv pack) + ``` + + ##### ARM64 + + ```bash + (curl -sSL "https://github.com/buildpacks/pack/releases/download/v${{ env.PACK_VERSION }}/pack-v${{ env.PACK_VERSION }}-freebsd-arm64.tgz" | sudo tar -C /usr/local/bin/ --no-same-owner -xzv pack) + ``` + #### Linux - ##### Command + ##### AMD64 ```bash (curl -sSL "https://github.com/buildpacks/pack/releases/download/v${{ env.PACK_VERSION }}/pack-v${{ env.PACK_VERSION }}-linux.tgz" | sudo tar -C /usr/local/bin/ --no-same-owner -xzv pack) ``` + ##### ARM64 + + ```bash + (curl -sSL "https://github.com/buildpacks/pack/releases/download/v${{ env.PACK_VERSION }}/pack-v${{ env.PACK_VERSION }}-linux-arm64.tgz" | sudo tar -C /usr/local/bin/ --no-same-owner -xzv pack) + ``` + + ##### S390X + + ```bash + (curl -sSL "https://github.com/buildpacks/pack/releases/download/v${{ env.PACK_VERSION }}/pack-v${{ env.PACK_VERSION }}-linux-s390x.tgz" | sudo tar -C /usr/local/bin/ --no-same-owner -xzv pack) + ``` + ##### PPC64LE + + ```bash + (curl -sSL "https://github.com/buildpacks/pack/releases/download/v${{ env.PACK_VERSION }}/pack-v${{ env.PACK_VERSION }}-linux-ppc64le.tgz" | sudo tar -C /usr/local/bin/ --no-same-owner -xzv pack) + ``` + + #### MacOS + + ##### Intel + + ```bash + (curl -sSL "https://github.com/buildpacks/pack/releases/download/v${{ env.PACK_VERSION }}/pack-v${{ env.PACK_VERSION }}-macos.tgz" | sudo tar -C /usr/local/bin/ --no-same-owner -xzv pack) + ``` + + ##### Apple Silicon + + ```bash + (curl -sSL "https://github.com/buildpacks/pack/releases/download/v${{ env.PACK_VERSION }}/pack-v${{ env.PACK_VERSION }}-macos-arm64.tgz" | sudo tar -C /usr/local/bin/ --no-same-owner -xzv pack) + ``` #### Manually 1. Download the `.tgz` or `.zip` file for your platform @@ -291,7 +369,7 @@ jobs: - name: Create Beta Release if: ${{ env.PACK_VERSION == env.PACK_MILESTONE }} - uses: softprops/action-gh-release@v1 + uses: softprops/action-gh-release@v2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: @@ -300,12 +378,9 @@ jobs: draft: true files: pack-v${{ env.PACK_VERSION }}-* body: | - # pack v${{ env.PACK_VERSION }} - > This is a **beta** release of the Cloud Native Buildpack local CLI. This platform implementation should be relatively stable and reliable, but breaking changes in the underlying [specification](https://github.com/buildpack/spec) may be implemented without notice. Note that pack is intended for local image builds, and thus requires a Docker daemon. The [lifecycle](https://github.com/buildpack/lifecycle) should be used directly when building on cloud platforms. - ## Prerequisites - - The [Docker daemon](https://www.docker.com/get-started) must be installed on your workstation or accessible over the network. + - A container runtime such as [Docker](https://www.docker.com/get-started) or [podman](https://podman.io/get-started) must be available to execute builds. ## Install diff --git a/.github/workflows/check-latest-release.yml b/.github/workflows/check-latest-release.yml new file mode 100644 index 0000000000..0e6b39dcde --- /dev/null +++ b/.github/workflows/check-latest-release.yml @@ -0,0 +1,108 @@ +name: Check latest pack release + +on: + schedule: + - cron: 0 2 * * 1,4 + workflow_dispatch: {} + +jobs: + check-release: + runs-on: + - ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v6 + with: + go-version-file: 'go.mod' + - name: Read go versions + id: read-go + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + #!/usr/bin/env bash + + set -euo pipefail + + LATEST_GO_VERSION=$(go version | cut -d ' ' -f 3) + + LATEST_RELEASE_VERSION=$(gh release list --exclude-drafts --exclude-pre-releases -L 1 | cut -d $'\t' -f 1 | cut -d ' ' -f 2) + + wget https://github.com/$GITHUB_REPOSITORY/releases/download/$LATEST_RELEASE_VERSION/pack-$LATEST_RELEASE_VERSION-linux.tgz -O out.tgz + tar xzf out.tgz + LATEST_RELEASE_GO_VERSION=$(go version ./pack | cut -d ' ' -f 2) + + echo "latest-go-version=${LATEST_GO_VERSION}" >> "$GITHUB_OUTPUT" + echo "latest-release-go-version=${LATEST_RELEASE_GO_VERSION}" >> "$GITHUB_OUTPUT" + + LATEST_RELEASE_VERSION=$(echo $LATEST_RELEASE_VERSION | cut -d \v -f 2) + echo "latest-release-version=${LATEST_RELEASE_VERSION}" >> "$GITHUB_OUTPUT" + - name: Create issue if needed + if: ${{ steps.read-go.outputs.latest-go-version != steps.read-go.outputs.latest-release-go-version }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + #!/usr/bin/env bash + + set -euo pipefail + + title="Upgrade $GITHUB_REPOSITORY to ${{ steps.read-go.outputs.latest-go-version }}" + label=${{ steps.read-go.outputs.latest-go-version }} + + # Create label to use for exact search + gh label create "$label" || true + + search_output=$(gh issue list --search "$title" --label "$label") + + body="Latest $GITHUB_REPOSITORY release v${{ steps.read-go.outputs.latest-release-version }} is built with Go version ${{ steps.read-go.outputs.latest-release-go-version }}; newer version ${{ steps.read-go.outputs.latest-go-version }} is available." + + if [ -z "${search_output// }" ] + then + echo "No issues matched search; creating new issue..." + gh issue create \ + --label "type/bug" \ + --label "status/triage" \ + --label "$label" \ + --title "$title" \ + --body "$body" + else + echo "Found matching issues:" + echo $search_output + fi + - name: Scan latest release image + id: scan-image + uses: anchore/scan-action@v6 + with: + image: docker.io/buildpacksio/pack:${{ steps.read-go.outputs.latest-release-version }} + - name: Create issue if needed + if: failure() && steps.scan-image.outcome == 'failure' + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + #!/usr/bin/env bash + + set -euo pipefail + + title="CVE(s) found" + label=cve + + # Create label to use for exact search + gh label create "$label" || true + + search_output=$(gh issue list --search "$title" --label "$label") + + GITHUB_WORKFLOW_URL=https://github.com/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID + body="Latest docker.io/buildpacksio/pack v${{ steps.read-go.outputs.latest-release-version }} triggered CVE(s) from Grype. For further details, see: $GITHUB_WORKFLOW_URL" + + if [ -z "${search_output// }" ] + then + echo "No issues matched search; creating new issue..." + gh issue create \ + --label "type/bug" \ + --label "status/triage" \ + --label "$label" \ + --title "$title" \ + --body "$body" + else + echo "Found matching issues:" + echo $search_output + fi diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 0298fac2d7..e6d39ebb4b 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -30,11 +30,11 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v1 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -45,7 +45,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@v1 + uses: github/codeql-action/autobuild@v3 # ℹ️ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl @@ -59,4 +59,4 @@ jobs: # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v1 + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/compatibility.yml b/.github/workflows/compatibility.yml index b76bdcae0a..ddd4facb46 100644 --- a/.github/workflows/compatibility.yml +++ b/.github/workflows/compatibility.yml @@ -42,11 +42,12 @@ jobs: lifecycle_kind: current runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up go - uses: actions/setup-go@v2.1.3 + uses: actions/setup-go@v6 with: - go-version: '1.17' + check-latest: true + go-version-file: 'go.mod' - name: Set up go env run: | echo "GOPATH=$(go env GOPATH)" >> $GITHUB_ENV @@ -58,3 +59,4 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | make acceptance + docker network prune diff --git a/.github/workflows/deliver-ubuntu.yml b/.github/workflows/deliver-ubuntu.yml deleted file mode 100644 index f36cbcce13..0000000000 --- a/.github/workflows/deliver-ubuntu.yml +++ /dev/null @@ -1,128 +0,0 @@ -name: delivery / ubuntu - -on: - release: - types: - - released - workflow_dispatch: - inputs: - tag_name: - description: The release tag to distribute - required: true - ubuntu_ppa: - description: flag indicating this is an ubuntu release - required: true - - -jobs: - create-ppa: - #if: github.event_name == 'release' - strategy: - fail-fast: false - matrix: - target: [xenial, bionic, focal] - include: - - target: xenial - runner: ubuntu-16.04 - image: docker://ubuntu:16.04 - i386-go-version: 1.13 - - target: bionic - runner: ubuntu-18.04 - image: docker://ubuntu:18.04 - i386-go-version: 1.15 - - target: focal - runner: ubuntu-20.04 - image: docker://ubuntu:20.04 - i386-go-version: none - name: create-ppa - runs-on: ${{ matrix.runner }} - - steps: - - name: Determine version - uses: actions/github-script@v6 - id: version - with: - result-encoding: string - script: | - let payload = context.payload; - let tag = (payload.release && payload.release.tag_name) || (payload.inputs && payload.inputs.tag_name); - if (!tag) { - throw "ERROR: unable to determine tag" - } - return tag.replace(/^v/, ''); - - name: Checkout code - uses: actions/checkout@v3 - - - name: Metadata - id: metadata - run: | - echo "::set-output name=date::$(date +"%a, %d %b %Y %T %z")" - - - name: Install packaging tools - run: | - sudo apt-get update - sudo apt-get install software-properties-common -y - sudo add-apt-repository ppa:longsleep/golang-backports -y - sudo apt-get update - sudo apt-get install gnupg dput dh-make devscripts lintian golang -y - - - name: Fill debian/* - uses: cschleiden/replace-tokens@v1 - with: - files: '[".github/workflows/delivery/ubuntu/debian/*"]' - tokenPrefix: '{{' - tokenSuffix: '}}' - env: - ARCH: "any" - I386_GO_VERSION: ${{matrix.i386-go-version}} - PACKAGE_NAME: "pack-cli" - UBUNTU_VERSION: ${{ matrix.target }} - HOMEPAGE: "https://buildpacks.io" - PACK_VERSION: ${{ steps.version.outputs.result }} - REPO: "buildpacks/pack" - DESCRIPTION: "CLI for building apps using Cloud Native Buildpacks" - DATE_TIME: ${{ steps.metadata.outputs.date }} - MAINTAINER_NAME: "cncf-buildpacks" - MAINTAINER_EMAIL: "cncf-buildpacks-maintainers@lists.cncf.io" - - - name: Create PPA - run: ./.github/workflows/scripts/create_ppa.sh - env: - PACKAGE_NAME: "pack-cli" - PACKAGE_VERSION: ${{ steps.version.outputs.result }} - MAINTAINER: "cncf-buildpacks" - MAINTANER_EMAIL: "cncf-buildpacks-maintainers@lists.cncf.io" - GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }} - GPG_PUBLIC_KEY: ${{ secrets.GPG_PUBLIC_KEY }} - - - name: Test Xenial - if: matrix.target == 'xenial' - uses: docker://ubuntu:16.04 - env: - PACKAGE_NAME: "pack-cli" - with: - entrypoint: .github/workflows/delivery/ubuntu/test-install-package.sh - - - name: Test Bionic - if: matrix.target == 'bionic' - uses: docker://ubuntu:18.04 - env: - PACKAGE_NAME: "pack-cli" - with: - entrypoint: .github/workflows/delivery/ubuntu/test-install-package.sh - - - name: Test Focal - if: matrix.target == 'focal' - uses: docker://ubuntu:20.04 - env: - PACKAGE_NAME: "pack-cli" - with: - entrypoint: .github/workflows/delivery/ubuntu/test-install-package.sh - - - name: Upload - run: | - dput "${PPA_DEST}" ./../*.changes - env: - PPA_DEST: "ppa:cncf-buildpacks/pack-cli" - - diff --git a/.github/workflows/delivery-archlinux-git.yml b/.github/workflows/delivery-archlinux-git.yml index 938367bcc0..7f6360c156 100644 --- a/.github/workflows/delivery-archlinux-git.yml +++ b/.github/workflows/delivery-archlinux-git.yml @@ -1,9 +1,9 @@ name: delivery / archlinux / git on: - push: - branches: - - main +# push: +# branches: +# - main workflow_dispatch: jobs: @@ -12,7 +12,7 @@ jobs: env: PACKAGE_NAME: pack-cli-git steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Setup working dir @@ -26,9 +26,9 @@ jobs: version=$(echo "${git_description}" | awk -F- '{print $(1)}' | sed 's/^v//') revision=$(echo "${git_description}" | awk -F- '{print $(NF-1)}') commit=$(echo "${git_description}" | awk -F- '{print $(NF)}' | sed 's/^g//') - echo "::set-output name=version::$version" - echo "::set-output name=revision::$revision" - echo "::set-output name=commit::$commit" + echo "version=$version" >> $GITHUB_OUTPUT + echo "revision=$revision" >> $GITHUB_OUTPUT + echo "commit=$commit" >> $GITHUB_OUTPUT - name: Fill PKGBUILD uses: cschleiden/replace-tokens@v1 with: @@ -51,4 +51,4 @@ jobs: PACK_VERSION: ${{ steps.metadata.outputs.version }} AUR_KEY: ${{ secrets.AUR_KEY }} with: - entrypoint: .github/workflows/delivery/archlinux/publish-package.sh \ No newline at end of file + entrypoint: .github/workflows/delivery/archlinux/publish-package.sh diff --git a/.github/workflows/delivery-archlinux.yml b/.github/workflows/delivery-archlinux.yml index 7fe37650f7..ce7829c095 100644 --- a/.github/workflows/delivery-archlinux.yml +++ b/.github/workflows/delivery-archlinux.yml @@ -11,72 +11,14 @@ on: required: true jobs: - pack-cli: - runs-on: ubuntu-latest - env: - PACKAGE_NAME: pack-cli - steps: - - uses: actions/checkout@v3 - - name: Determine version - uses: actions/github-script@v6 - id: version - with: - result-encoding: string - script: | - let payload = context.payload; - let tag = (payload.release && payload.release.tag_name) || (payload.inputs && payload.inputs.tag_name); - if (!tag) { - throw "ERROR: unable to determine tag" - } - return tag.replace(/^v/, ''); - - name: Set PACK_VERSION - run: echo "PACK_VERSION=${{ steps.version.outputs.result }}" >> $GITHUB_ENV - shell: bash - - name: Setup working dir - run: | - mkdir -p ${{ env.PACKAGE_NAME }} - cp .github/workflows/delivery/archlinux/${{ env.PACKAGE_NAME }}/PKGBUILD ${{ env.PACKAGE_NAME }}/PKGBUILD - - name: Metadata - id: metadata - run: | - url=https://github.com/buildpacks/pack/archive/v${{ env.PACK_VERSION }}.tar.gz - filename=pack-${{ env.PACK_VERSION }}.tgz - fullpath=`pwd`/$filename - - curl -sSL "$url" -o "$fullpath" - sha512=$(sha512sum "$fullpath" | awk '{ print $1 }') - echo "::set-output name=url::$url" - echo "::set-output name=sha512::$sha512" - - name: Fill PKGBUILD - uses: cschleiden/replace-tokens@v1 - with: - files: ${{ env.PACKAGE_NAME }}/PKGBUILD - tokenPrefix: '{{' - tokenSuffix: '}}' - env: - PACK_VERSION: ${{ env.PACK_VERSION }} - SRC_TGZ_URL: ${{ steps.metadata.outputs.url }} - SRC_TGZ_SHA: ${{ steps.metadata.outputs.sha512 }} - - name: Print PKGBUILD - run: cat ${{ env.PACKAGE_NAME }}/PKGBUILD - - name: Test - uses: docker://archlinux:latest - with: - entrypoint: .github/workflows/delivery/archlinux/test-install-package.sh - - name: Publish - uses: docker://archlinux:latest - env: - AUR_KEY: ${{ secrets.AUR_KEY }} - with: - entrypoint: .github/workflows/delivery/archlinux/publish-package.sh pack-cli-bin: runs-on: ubuntu-latest env: PACKAGE_NAME: pack-cli-bin steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Determine version - uses: actions/github-script@v6 + uses: actions/github-script@v7 id: version with: result-encoding: string @@ -95,7 +37,7 @@ jobs: mkdir -p ${{ env.PACKAGE_NAME }}/ cp .github/workflows/delivery/archlinux/${{ env.PACKAGE_NAME }}/PKGBUILD ${{ env.PACKAGE_NAME }}/PKGBUILD - name: Lookup assets - uses: actions/github-script@v6 + uses: actions/github-script@v7 id: assets with: script: | @@ -120,9 +62,8 @@ jobs: run: | curl -sSL ${{ steps.assets.outputs.linux_url }} -o ${{ steps.assets.outputs.linux_name }} sha512=$(sha512sum ${{ steps.assets.outputs.linux_name }} | cut -d ' ' -f1) - - echo "::set-output name=url::${{ steps.assets.outputs.linux_url }}" - echo "::set-output name=sha512::$sha512" + echo "url=${{ steps.assets.outputs.linux_url }}" >> $GITHUB_OUTPUT + echo "sha512=$sha512" >> $GITHUB_OUTPUT - name: Fill PKGBUILD uses: cschleiden/replace-tokens@v1 with: diff --git a/.github/workflows/delivery-chocolatey.yml b/.github/workflows/delivery-chocolatey.yml index c18c2e9daf..799b754ac8 100644 --- a/.github/workflows/delivery-chocolatey.yml +++ b/.github/workflows/delivery-chocolatey.yml @@ -21,9 +21,9 @@ jobs: deliver-chocolatey: runs-on: windows-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Determine version - uses: actions/github-script@v6 + uses: actions/github-script@v7 id: version with: result-encoding: string @@ -40,7 +40,7 @@ jobs: return updatedTag; - name: Setup working dir run: | - mkdir ${{ env.CHOCO_PATH }} + mkdir -p ${{ env.CHOCO_PATH }}/source cp -r .github/workflows/delivery/chocolatey/. ${{ env.CHOCO_PATH }}/ ls -R ${{ env.CHOCO_PATH }} - name: Download and unzip Pack (Windows) @@ -67,20 +67,20 @@ jobs: cp LICENSE $file cat $file - name: build-release - uses: crazy-max/ghaction-chocolatey@v1 + uses: crazy-max/ghaction-chocolatey@v3 with: - args: pack ${{ env.CHOCO_PATH }}/pack.nuspec --outputdirectory ${{ env.CHOCO_PATH}} + args: pack ${{ env.CHOCO_PATH }}/pack.nuspec --outputdirectory ${{ env.CHOCO_PATH}}/source - name: list files run: | ls ${{ env.CHOCO_PATH }} ls ${{ env.CHOCO_PATH }}/tools - name: Test Release - uses: crazy-max/ghaction-chocolatey@v1 + uses: crazy-max/ghaction-chocolatey@v3 with: - args: install pack -s ${{ env.CHOCO_PATH }}/pack.${{ env.PACK_VERSION }}.nupkg + args: install pack -s ${{ env.CHOCO_PATH }}/source - name: Ensure Pack Installed run: pack help - name: Upload Release - uses: crazy-max/ghaction-chocolatey@v1 + uses: crazy-max/ghaction-chocolatey@v3 with: - args: push ${{ env.CHOCO_PATH }}/pack.${{ env.PACK_VERSION }}.nupkg -s https://push.chocolatey.org/ -k ${{ secrets.CHOCO_KEY }} + args: push ${{ env.CHOCO_PATH }}/source/pack.${{ env.PACK_VERSION }}.nupkg -s https://push.chocolatey.org/ -k ${{ secrets.CHOCO_KEY }} diff --git a/.github/workflows/delivery-docker.yml b/.github/workflows/delivery-docker.yml index c59bdd5391..1524a52f68 100644 --- a/.github/workflows/delivery-docker.yml +++ b/.github/workflows/delivery-docker.yml @@ -16,16 +16,26 @@ on: default: false env: - BUILDER: "paketobuildpacks/builder:tiny" + REGISTRY_NAME: 'docker.io' + USER_NAME: 'buildpacksio' IMG_NAME: 'pack' - USERNAME: 'buildpacksio' jobs: deliver-docker: + strategy: + matrix: + config: [tiny, base] + include: + - config: tiny + base_image: gcr.io/distroless/static + suffix: + - config: base + base_image: ubuntu:jammy + suffix: -base runs-on: ubuntu-latest steps: - name: Determine version - uses: actions/github-script@v6 + uses: actions/github-script@v7 id: version with: result-encoding: string @@ -39,35 +49,33 @@ jobs: return tag.replace(/^v/, ''); - name: Checkout source at tag - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: v${{ steps.version.outputs.result }} - # This has to come after the first checkout, so it isn't clobbered - - name: Checkout delivery configuration - uses: actions/checkout@v3 - with: - path: ./head - - name: Setup Working Dir - shell: bash - run: | - cp head/.github/workflows/delivery/docker/project.toml project.toml - rm buildpack.yml - name: Determine App Name - run: 'echo "IMG_NAME=${{ env.USERNAME }}/${{ env.IMG_NAME }}" >> $GITHUB_ENV' + run: 'echo "IMG_NAME=${{ env.REGISTRY_NAME }}/${{ env.USER_NAME }}/${{ env.IMG_NAME }}" >> $GITHUB_ENV' - name: Login to Dockerhub - uses: docker/login-action@v1 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - - name: Pack Build/Publish - uses: dfreilich/pack-action@v2.1.1 - env: - BP_GO_BUILD_LDFLAGS: "-s -w -X 'github.com/buildpacks/pack.Version=${{ steps.version.outputs.result }}'" - with: - args: 'build ${{ env.IMG_NAME }}:${{ steps.version.outputs.result }} --builder ${{ env.BUILDER }} --env BP_GO_BUILD_LDFLAGS --publish' + - uses: docker/setup-qemu-action@v3 + - uses: docker/setup-buildx-action@v3 + - uses: buildpacks/github-actions/setup-tools@v5.9.4 + - name: Buildx Build/Publish + run: | + docker buildx build . \ + --tag ${{ env.IMG_NAME }}:${{ steps.version.outputs.result }}${{ matrix.suffix }} \ + --platform linux/amd64,linux/arm64,linux/s390x,linux/ppc64le \ + --build-arg pack_version=${{ steps.version.outputs.result }} \ + --build-arg base_image=${{ matrix.base_image }} \ + --provenance=false \ + --push + - name: Tag Image as Base + if: ${{ (github.event.release != '' || github.event.inputs.tag_latest) && matrix.config == 'base' }} + run: | + crane copy ${{ env.IMG_NAME }}:${{ steps.version.outputs.result }}${{ matrix.suffix }} ${{ env.IMG_NAME }}:base - name: Tag Image as Latest - if: ${{ github.event.release != '' || github.event.inputs.tag_latest }} + if: ${{ (github.event.release != '' || github.event.inputs.tag_latest) && matrix.config != 'base' }} run: | - docker pull ${{ env.IMG_NAME }}:${{ steps.version.outputs.result }} - docker tag ${{ env.IMG_NAME }}:${{ steps.version.outputs.result }} ${{ env.IMG_NAME }}:latest - docker push ${{ env.IMG_NAME }}:latest + crane copy ${{ env.IMG_NAME }}:${{ steps.version.outputs.result }}${{ matrix.suffix }} ${{ env.IMG_NAME }}:latest diff --git a/.github/workflows/delivery-homebrew.yml b/.github/workflows/delivery-homebrew.yml index 0eea8de3b3..636db0487b 100644 --- a/.github/workflows/delivery-homebrew.yml +++ b/.github/workflows/delivery-homebrew.yml @@ -14,9 +14,9 @@ jobs: update-tap: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Checkout tap - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: buildpack/homebrew-tap path: homebrew-tap @@ -24,7 +24,7 @@ jobs: - name: Copy pack.rb run: cp .github/workflows/delivery/homebrew/pack.rb homebrew-tap/Formula/pack.rb - name: Lookup assets - uses: actions/github-script@v6 + uses: actions/github-script@v7 id: assets with: script: | @@ -50,6 +50,11 @@ jobs: core.setOutput("linux_url", asset.browser_download_url); } + if (asset.name.endsWith("linux-arm64.tgz")) { + core.setOutput("linux_arm64_name", asset.name); + core.setOutput("linux_arm64_url", asset.browser_download_url); + } + if (asset.name.endsWith("macos.tgz")) { core.setOutput("macos_name", asset.name); core.setOutput("macos_url", asset.browser_download_url); @@ -65,15 +70,19 @@ jobs: run: | curl -sSL ${{ steps.assets.outputs.linux_url }} -o ${{ steps.assets.outputs.linux_name }} linux_sha256=$(sha256sum ${{ steps.assets.outputs.linux_name }} | cut -d ' ' -f1) - echo "::set-output name=linux_sha256::$linux_sha256" + echo "linux_sha256=$linux_sha256" >> $GITHUB_OUTPUT + + curl -sSL ${{ steps.assets.outputs.linux_arm64_url }} -o ${{ steps.assets.outputs.linux_arm64_name }} + linux_arm64_sha256=$(sha256sum ${{ steps.assets.outputs.linux_arm64_name }} | cut -d ' ' -f1) + echo "linux_arm64_sha256=$linux_arm64_sha256" >> $GITHUB_OUTPUT curl -sSL ${{ steps.assets.outputs.macos_url }} -o ${{ steps.assets.outputs.macos_name }} macos_sha256=$(sha256sum ${{ steps.assets.outputs.macos_name }} | cut -d ' ' -f1) - echo "::set-output name=macos_sha256::$macos_sha256" + echo "macos_sha256=$macos_sha256" >> $GITHUB_OUTPUT curl -sSL ${{ steps.assets.outputs.macos_arm64_url }} -o ${{ steps.assets.outputs.macos_arm64_name }} macos_arm64_sha256=$(sha256sum ${{ steps.assets.outputs.macos_arm64_name }} | cut -d ' ' -f1) - echo "::set-output name=macos_arm64_sha256::$macos_arm64_sha256" + echo "macos_arm64_sha256=$macos_arm64_sha256" >> $GITHUB_OUTPUT - name: Fill pack.rb uses: cschleiden/replace-tokens@v1 with: @@ -83,6 +92,8 @@ jobs: env: LINUX_URL: ${{ steps.assets.outputs.linux_url }} LINUX_SHA: ${{ steps.checksums.outputs.linux_sha256 }} + LINUX_ARM64_URL: ${{ steps.assets.outputs.linux_arm64_url }} + LINUX_ARM64_SHA: ${{ steps.checksums.outputs.linux_arm64_sha256 }} MACOS_URL: ${{ steps.assets.outputs.macos_url }} MACOS_SHA: ${{ steps.checksums.outputs.macos_sha256 }} MACOS_ARM64_URL: ${{ steps.assets.outputs.macos_arm64_url }} diff --git a/.github/workflows/delivery-release-dispatch.yml b/.github/workflows/delivery-release-dispatch.yml index 4fec0332cd..71a96be5d3 100644 --- a/.github/workflows/delivery-release-dispatch.yml +++ b/.github/workflows/delivery-release-dispatch.yml @@ -13,7 +13,7 @@ jobs: repo: ['buildpacks/docs', 'buildpacks/samples', 'buildpacks/pack-orb', 'buildpacks/github-actions'] steps: - name: Repository Dispatch - uses: peter-evans/repository-dispatch@v2 + uses: peter-evans/repository-dispatch@v3 with: token: ${{ secrets.PLATFORM_GITHUB_TOKEN }} event-type: pack-release diff --git a/.github/workflows/delivery-ubuntu.yml b/.github/workflows/delivery-ubuntu.yml new file mode 100644 index 0000000000..2f2a6dbe64 --- /dev/null +++ b/.github/workflows/delivery-ubuntu.yml @@ -0,0 +1,117 @@ +name: delivery / ubuntu + +on: + release: + types: + - released + workflow_dispatch: + inputs: + tag_name: + description: The release tag to distribute + required: true + +env: + MAINTAINER_NAME: "cncf-buildpacks" + MAINTAINER_EMAIL: "cncf-buildpacks-maintainers@lists.cncf.io" + PACKAGE_NAME: "pack-cli" + +jobs: + deliver-ppa: + strategy: + fail-fast: false + matrix: + target: [focal, jammy, noble, plucky] + runs-on: ubuntu-22.04 + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Metadata + id: metadata + run: | + echo "date=$(date +"%a, %d %b %Y %T %z")" >> $GITHUB_OUTPUT + + - name: Determine version + uses: actions/github-script@v7 + id: version + with: + result-encoding: string + script: | + let payload = context.payload; + let tag = (payload.release && payload.release.tag_name) || (payload.inputs && payload.inputs.tag_name); + if (!tag) { + throw "ERROR: unable to determine tag" + } + return tag.replace(/^v/, ''); + + - name: Fill debian/* + uses: cschleiden/replace-tokens@v1 + with: + files: '[".github/workflows/delivery/ubuntu/debian/*"]' + tokenPrefix: '{{' + tokenSuffix: '}}' + env: + ARCH: "any" + DATE_TIME: ${{ steps.metadata.outputs.date }} + DESCRIPTION: "CLI for building apps using Cloud Native Buildpacks" + GO_DEP_ENTRY: golang (>=1.24) + GO_DEP_LIB_PATH: /usr/lib/go + GO_DEP_PACKAGE_NAME: golang + HOMEPAGE: "https://buildpacks.io" + PACK_VERSION: ${{ steps.version.outputs.result }} + REPO: "buildpacks/pack" + UBUNTU_VERSION: ${{ matrix.target }} + + ### + # NOTE: 'uses' does not support interpolation so we have to manually define the + # following steps per variant. + ### + + + - name: Deliver focal + if: matrix.target == 'focal' + uses: docker://ubuntu:focal + with: + entrypoint: .github/workflows/delivery/ubuntu/deliver.sh + env: + DEBIAN_FRONTEND: "noninteractive" + GO_DEP_PACKAGE_NAME: golang + GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }} + GPG_PUBLIC_KEY: ${{ secrets.GPG_PUBLIC_KEY }} + PACKAGE_VERSION: ${{ steps.version.outputs.result }} + + - name: Deliver jammy + if: matrix.target == 'jammy' + uses: docker://ubuntu:jammy + with: + entrypoint: .github/workflows/delivery/ubuntu/deliver.sh + env: + DEBIAN_FRONTEND: "noninteractive" + GO_DEP_PACKAGE_NAME: golang + GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }} + GPG_PUBLIC_KEY: ${{ secrets.GPG_PUBLIC_KEY }} + PACKAGE_VERSION: ${{ steps.version.outputs.result }} + + - name: Deliver noble + if: matrix.target == 'noble' + uses: docker://ubuntu:noble + with: + entrypoint: .github/workflows/delivery/ubuntu/deliver.sh + env: + DEBIAN_FRONTEND: "noninteractive" + GO_DEP_PACKAGE_NAME: golang + GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }} + GPG_PUBLIC_KEY: ${{ secrets.GPG_PUBLIC_KEY }} + PACKAGE_VERSION: ${{ steps.version.outputs.result }} + + - name: Deliver plucky + if: matrix.target == 'plucky' + uses: docker://ubuntu:plucky + with: + entrypoint: .github/workflows/delivery/ubuntu/deliver.sh + env: + DEBIAN_FRONTEND: "noninteractive" + GO_DEP_PACKAGE_NAME: golang + GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }} + GPG_PUBLIC_KEY: ${{ secrets.GPG_PUBLIC_KEY }} + PACKAGE_VERSION: ${{ steps.version.outputs.result }} diff --git a/.github/workflows/delivery/archlinux/README.md b/.github/workflows/delivery/archlinux/README.md index df07ee6fa7..14279c3065 100644 --- a/.github/workflows/delivery/archlinux/README.md +++ b/.github/workflows/delivery/archlinux/README.md @@ -1,8 +1,8 @@ # Arch Linux -There are 3 maintained packages: +There are two maintained packages by us and one official archlinux package: -- [pack-cli](https://aur.archlinux.org/packages/pack-cli/): The latest release of `pack`, compiled from source. +- [pack-cli](https://archlinux.org/packages/extra/x86_64/pack-cli/): Official Archlinux package in the 'Extra' repo. - [pack-cli-bin](https://aur.archlinux.org/packages/pack-cli-bin/): The latest release of `pack`, precompiled. - [pack-cli-git](https://aur.archlinux.org/packages/pack-cli-git/): An unreleased version of `pack`, compiled from source of the `main` branch. diff --git a/.github/workflows/delivery/archlinux/pack-cli/PKGBUILD b/.github/workflows/delivery/archlinux/pack-cli/PKGBUILD deleted file mode 100644 index b56e418fb6..0000000000 --- a/.github/workflows/delivery/archlinux/pack-cli/PKGBUILD +++ /dev/null @@ -1,22 +0,0 @@ -# Maintainer: Michael William Le Nguyen -# Maintainer: Buildpacks Maintainers -pkgname=pack-cli -pkgver={{PACK_VERSION}} -pkgrel=1 -pkgdesc="CLI for building apps using Cloud Native Buildpacks" -arch=('x86_64') -url="https://buildpacks.io/" -license=('Apache') -makedepends=('go-pie') -source=("{{SRC_TGZ_URL}}") -sha512sums=("{{SRC_TGZ_SHA}}") -build() { - export GOPATH="${srcdir}/go" - cd "${srcdir}/pack-${pkgver}" - PACK_VERSION="v${pkgver}" make build -} -package() { - export GOPATH="${srcdir}/go" - go clean -modcache - install -D -m755 "${srcdir}/pack-${pkgver}/out/pack" "${pkgdir}/usr/bin/pack" -} diff --git a/.github/workflows/delivery/archlinux/publish-package.sh b/.github/workflows/delivery/archlinux/publish-package.sh index f096d7c650..40c28472dc 100755 --- a/.github/workflows/delivery/archlinux/publish-package.sh +++ b/.github/workflows/delivery/archlinux/publish-package.sh @@ -40,6 +40,9 @@ echo '> Cloning aur...' git clone "ssh://aur@aur.archlinux.org/${PACKAGE_NAME}.git" "${PACKAGE_AUR_DIR}" chown -R archie "${PACKAGE_AUR_DIR}" pushd "${PACKAGE_AUR_DIR}" > /dev/null + echo '> Declare directory ${PACKAGE_AUR_DIR} as safe' + git config --global --add safe.directory "${PACKAGE_AUR_DIR}" + echo '> Checking out master...' git checkout master diff --git a/.github/workflows/delivery/docker/project.toml b/.github/workflows/delivery/docker/project.toml deleted file mode 100644 index da9c5c982c..0000000000 --- a/.github/workflows/delivery/docker/project.toml +++ /dev/null @@ -1,7 +0,0 @@ -[project] -version = "1.0.2" -source-url = "https://github.com/buildpacks/pack" - -[[build.env]] -name = "BP_GO_TARGETS" -value = "./cmd/pack" diff --git a/.github/workflows/delivery/homebrew/pack.rb b/.github/workflows/delivery/homebrew/pack.rb index b2d10af477..346dbe3dcc 100644 --- a/.github/workflows/delivery/homebrew/pack.rb +++ b/.github/workflows/delivery/homebrew/pack.rb @@ -14,7 +14,10 @@ class Pack < Formula elsif OS.mac? url "{{MACOS_URL}}" sha256 "{{MACOS_SHA}}" - else + elsif OS.linux? && Hardware::CPU.arm? + url "{{LINUX_ARM64_URL}}" + sha256 "{{LINUX_ARM64_SHA}}" + else url "{{LINUX_URL}}" sha256 "{{LINUX_SHA}}" end diff --git a/.github/workflows/delivery/ubuntu/1_dependencies.sh b/.github/workflows/delivery/ubuntu/1_dependencies.sh new file mode 100644 index 0000000000..771a994681 --- /dev/null +++ b/.github/workflows/delivery/ubuntu/1_dependencies.sh @@ -0,0 +1,15 @@ +function dependencies() { + : "$GO_DEP_PACKAGE_NAME" + + echo "> Installing dev tools..." + apt-get update + apt-get install gnupg debhelper dput dh-make devscripts lintian software-properties-common -y + + echo "> Installing git..." + apt-get install git -y + + echo "> Installing go..." + add-apt-repository ppa:longsleep/golang-backports -y + apt-get update + apt-get install $GO_DEP_PACKAGE_NAME -y +} diff --git a/.github/workflows/delivery/ubuntu/2_create-ppa.sh b/.github/workflows/delivery/ubuntu/2_create-ppa.sh new file mode 100755 index 0000000000..f0544a7955 --- /dev/null +++ b/.github/workflows/delivery/ubuntu/2_create-ppa.sh @@ -0,0 +1,66 @@ +function create_ppa() { + # verify the following are set. + : "$GPG_PUBLIC_KEY" + : "$GPG_PRIVATE_KEY" + : "$PACKAGE_VERSION" + : "$PACKAGE_NAME" + : "$MAINTAINER_NAME" + : "$MAINTAINER_EMAIL" + : "$SCRIPT_DIR" + + echo "> Importing GPG keys..." + gpg --import <(echo "$GPG_PUBLIC_KEY") + gpg --allow-secret-key-import --import <(echo "$GPG_PRIVATE_KEY") + + # Dependencies fail to be pulled in during the Launchpad build process. + echo "> Vendoring dependencies..." + go mod vendor + + echo "> Creating package: ${PACKAGE_NAME}_${PACKAGE_VERSION}" + echo "> Generating skeleton of a debian package..." + export DEBEMAIL=$MAINTAINER_EMAIL + export DEBFULLNAME=$MAINTAINER_NAME + dh_make -p "${PACKAGE_NAME}_${PACKAGE_VERSION}" --single --native --copyright apache --email "${MAINTAINER_EMAIL}" -y + + echo "> Copying templated configuration files..." + cp "$SCRIPT_DIR/debian/"* debian/ + + echo "=======" + echo "compat" + echo "=======" + cat debian/compat + echo + echo "=======" + echo "changelog" + echo "=======" + cat debian/changelog + echo + echo "=======" + echo "control" + echo "=======" + cat debian/control + echo + echo "=======" + echo "rules" + echo "=======" + cat debian/rules + echo + echo "=======" + echo "copyright" + echo "=======" + cat debian/copyright + echo + + echo "> Removing empty default files created by dh_make..." + rm -f debian/*.ex + rm -f debian/*.EX + rm -f debian/README.* + + # Ubuntu ONLY accepts source packages. + echo "> Build a source based debian package..." + debuild -S + + # debuild places everything in parent directory + echo "> Files created in: ${PWD}/.." + ls -al ${PWD}/.. +} diff --git a/.github/workflows/delivery/ubuntu/3_test-ppa.sh b/.github/workflows/delivery/ubuntu/3_test-ppa.sh new file mode 100755 index 0000000000..4e328951c9 --- /dev/null +++ b/.github/workflows/delivery/ubuntu/3_test-ppa.sh @@ -0,0 +1,21 @@ +function test_ppa { + : "$GITHUB_WORKSPACE" + + echo "> Creating a test directory..." + testdir="$(mktemp -d)" + + echo "> Source Dir: '$GITHUB_WORKSPACE'" + echo "> Test Dir: '$testdir'" + cp -R $GITHUB_WORKSPACE/* $testdir + + pushd $testdir + echo "> Building a debian binary package..." + debuild -b -us -uc + + echo "> Installing binary package..." + dpkg -i ../*.deb + + echo "> Contents installed by the build debain package:" + dpkg -L pack-cli + popd +} diff --git a/.github/workflows/delivery/ubuntu/4_upload-ppa.sh b/.github/workflows/delivery/ubuntu/4_upload-ppa.sh new file mode 100755 index 0000000000..4b2524b9aa --- /dev/null +++ b/.github/workflows/delivery/ubuntu/4_upload-ppa.sh @@ -0,0 +1,4 @@ +function upload_ppa { + echo "> Uploading PPA..." + dput "ppa:cncf-buildpacks/pack-cli" ./../*.changes +} \ No newline at end of file diff --git a/.github/workflows/delivery/ubuntu/debian/README b/.github/workflows/delivery/ubuntu/debian/README index 4fc1f480d4..65c9727aaa 100644 --- a/.github/workflows/delivery/ubuntu/debian/README +++ b/.github/workflows/delivery/ubuntu/debian/README @@ -7,4 +7,4 @@ For extensive documentation see: https://buildpacks.io/docs/tools/pack/cli/pack/ Please file issues and bugs at https://github.com/buildpacks/pack/ - -- {{MAINTAINER_NAME}} <{{MAINTAINER_EMAIL}}> Mon, 05 Oct 2020 21:46:51 +0000 \ No newline at end of file + -- {{MAINTAINER_NAME}} <{{MAINTAINER_EMAIL}}> {{DATE_TIME}} \ No newline at end of file diff --git a/.github/workflows/delivery/ubuntu/debian/compat b/.github/workflows/delivery/ubuntu/debian/compat index f11c82a4cb..9d607966b7 100644 --- a/.github/workflows/delivery/ubuntu/debian/compat +++ b/.github/workflows/delivery/ubuntu/debian/compat @@ -1 +1 @@ -9 \ No newline at end of file +11 \ No newline at end of file diff --git a/.github/workflows/delivery/ubuntu/debian/control b/.github/workflows/delivery/ubuntu/debian/control index aa0d21c2c5..176e52a5c2 100644 --- a/.github/workflows/delivery/ubuntu/debian/control +++ b/.github/workflows/delivery/ubuntu/debian/control @@ -2,7 +2,7 @@ Source: {{PACKAGE_NAME}} Section: utils Priority: optional Maintainer: {{MAINTAINER_NAME}} <{{MAINTAINER_EMAIL}}> -Build-Depends: debhelper (>=9), git, golang (>=1.13) [!i386], golang-{{I386_GO_VERSION}} [i386] +Build-Depends: debhelper (>= 11), git, {{GO_DEP_ENTRY}} Standards-Version: 3.9.8 Vcs-Git: git@github.com/{{REPO}}.git Vcs-Browser: https://github.com/{{REPO}} diff --git a/.github/workflows/delivery/ubuntu/debian/rules b/.github/workflows/delivery/ubuntu/debian/rules index 5d842dbb93..4ca1f6aa0b 100644 --- a/.github/workflows/delivery/ubuntu/debian/rules +++ b/.github/workflows/delivery/ubuntu/debian/rules @@ -10,12 +10,5 @@ override_dh_auto_test: override_dh_auto_build: mkdir -p /tmp/.cache/go-build - GOCACHE=/tmp/.cache/go-build GOFLAGS="-mod=vendor" LDFLAGS="" PACK_VERSION='{{PACK_VERSION}}' PATH="${PATH}:/usr/lib/go-{{I386_GO_VERSION}}/bin" dh_auto_build -- build + GOCACHE=/tmp/.cache/go-build GOFLAGS="-mod=vendor" LDFLAGS="" PACK_VERSION='{{PACK_VERSION}}' PATH="${PATH}:{{GO_DEP_LIB_PATH}}/bin" dh_auto_build -- build rm -r /tmp/.cache/go-build - - - -# dh_make generated override targets -# This is example for Cmake (See https://bugs.debian.org/641051 ) -#override_dh_auto_configure: -# dh_auto_configure -- # -DCMAKE_LIBRARY_PATH=$(DEB_HOST_MULTIARCH) \ No newline at end of file diff --git a/.github/workflows/delivery/ubuntu/deliver.sh b/.github/workflows/delivery/ubuntu/deliver.sh new file mode 100755 index 0000000000..7374f93da5 --- /dev/null +++ b/.github/workflows/delivery/ubuntu/deliver.sh @@ -0,0 +1,42 @@ +#!/usr/bin/env bash + +set -e +set -o pipefail + +readonly SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +echo "PWD=${PWD}" +echo "SCRIPT_DIR=${SCRIPT_DIR}" + +source "$SCRIPT_DIR/1_dependencies.sh" +source "$SCRIPT_DIR/2_create-ppa.sh" +source "$SCRIPT_DIR/3_test-ppa.sh" +source "$SCRIPT_DIR/4_upload-ppa.sh" + +echo +echo "++++++++++++++++++++++++++++" +echo "> Installing dependencies..." +echo "++++++++++++++++++++++++++++" +echo +dependencies + +echo +echo "++++++++++++++++++++++++++++" +echo "> Creating PPA..." +echo "++++++++++++++++++++++++++++" +echo +create_ppa + +echo +echo "++++++++++++++++++++++++++++" +echo "> Testing PPA..." +echo "++++++++++++++++++++++++++++" +echo +test_ppa + +echo +echo "++++++++++++++++++++++++++++" +echo "> Uploading PPA..." +echo "++++++++++++++++++++++++++++" +echo +upload_ppa diff --git a/.github/workflows/delivery/ubuntu/test-install-package.sh b/.github/workflows/delivery/ubuntu/test-install-package.sh deleted file mode 100755 index 3c258f4bd2..0000000000 --- a/.github/workflows/delivery/ubuntu/test-install-package.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env bash -set -e -set -o pipefail - -apt-get update -# install neede packaging utilities -DEBIAN_FRONTEND=noninteractive apt-get install git devscripts debhelper software-properties-common -y - -# verify GITHUB_WORKSPACE is set up (we are in an action) -: "$GITHUB_WORKSPACE" - -# make and move package source into a testing directory -testdir="$(mktemp -d)" - -cp -R $GITHUB_WORKSPACE/* $testdir -pushd $testdir - -# install golang using ppa -add-apt-repository ppa:longsleep/golang-backports -y -apt-get update -apt-get install golang -y - -# build a debian binary package -debuild -b -us -uc - -# install the binary package -dpkg -i ../*.deb - -# list contents installed by the build debain package -dpkg -L ${PACKAGE_NAME} - -popd diff --git a/.github/workflows/privileged-pr-process.yml b/.github/workflows/privileged-pr-process.yml index 3b783d4e66..b1bcd376c5 100644 --- a/.github/workflows/privileged-pr-process.yml +++ b/.github/workflows/privileged-pr-process.yml @@ -8,14 +8,14 @@ jobs: label: runs-on: ubuntu-latest steps: - - uses: actions/labeler@main + - uses: actions/labeler@v4 with: repo-token: "${{ secrets.GITHUB_TOKEN }}" add-milestone: runs-on: ubuntu-latest steps: - name: Add milestone - uses: actions/github-script@v6 + uses: actions/github-script@v7 id: assets with: script: | diff --git a/.github/workflows/scripts/create_ppa.sh b/.github/workflows/scripts/create_ppa.sh deleted file mode 100755 index 40d7868611..0000000000 --- a/.github/workflows/scripts/create_ppa.sh +++ /dev/null @@ -1,74 +0,0 @@ -#!/usr/bin/env bash - -set -e -set -o pipefail - -readonly PROG_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -readonly PACK_DIR="$(cd "${PROG_DIR}/../../.." && pwd)" - -# verify the following are set. -: "$PACKAGE_VERSION" -: "$PACKAGE_NAME" -: "$GITHUB_WORKSPACE" - -function main() { - - # import secrets needed to sign packages we build with debuild - import_gpg - - # vendor local dependencies. Otherwise these fail to be pulled in during - # the Launchpad build process - go mod vendor - - # set up debian user info. - export DEBEMAIL=$MAINTAINER_EMAIL - export DEBFULLNAME=$MAINTAINER - echo "creating package: ${PACKAGE_NAME}_${PACKAGE_VERSION}" - - # generate the skeleton of a debian package. - dh_make -p "${PACKAGE_NAME}_${PACKAGE_VERSION}" --single --native --copyright apache --email "${MAINTAINER_EMAIL}" -y - - # copy our templated configuration files. - cp "$PACK_DIR/.github/workflows/delivery/ubuntu/debian/"* debian/ - - echo "compat" - cat debian/compat - echo - - echo "changelog" - cat debian/changelog - echo - - echo "control" - cat debian/control - echo - - echo "rules" - cat debian/rules - echo - - echo "copyright" - cat debian/copyright - echo - - # Remove empty default files created by dh_make - rm debian/*.ex - rm debian/*.EX - rm debian/README.* - - # build a source based debian package, Ubuntu ONLY accepts source packages. - debuild -S - -} - -# import gpg keys from env -function import_gpg() { - # verify the following are set. - : "$GPG_PUBLIC_KEY" - : "$GPG_PRIVATE_KEY" - - gpg --import <(echo "$GPG_PUBLIC_KEY") - gpg --allow-secret-key-import --import <(echo "$GPG_PRIVATE_KEY") -} - -main diff --git a/CODEOWNERS b/CODEOWNERS index 3daccc65fa..64985732c3 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1 +1 @@ -* @buildpacks/platform-maintainers +* @buildpacks/platform-maintainers @buildpacks/toc diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 1cdbc6aef1..4366d306bb 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -32,8 +32,9 @@ Running user acceptance on a pull request is just as critical as reviewing the c #### Downloading PR binaries 1. On GitHub's Pull Request view, click on the **Checks** tab. -2. On the top-right, click **Artifacts**. -3. Click on the zip file for the platform you are running. +2. On the left panel, click on the **build** step. +3. On the bottom, there is **Artifacts** section. +4. Click on the zip file for the platform you are running. #### Setup diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md index d7882a4368..ea3a09b4be 100644 --- a/DEVELOPMENT.md +++ b/DEVELOPMENT.md @@ -26,6 +26,29 @@ Alternatively, you can use Gitpod to run pre-configured dev environment in the c * Symlinks - Some of our tests attempt to create symlinks. On Windows, this requires the [permission to be provided](https://stackoverflow.com/a/24353758). +### Testing GitHub actions on forks + +The pack release process involves chaining a series of GitHub actions together, such as: +* The "build" workflow, which creates: + * .tgz files containing the pack binaries and shasums for the .tgz files + * a draft release with the above artifacts +* The "delivery-docker" workflow, which builds and pushes OCI images containing the pack binary +* The "benchmark" workflow, which runs performance checks for each commit and uploads reports to GitHub Pages + +It can be rather cumbersome to test changes to these workflows, as they are heavily intertwined. Thus, we recommend forking the buildpacks/pack repository on GitHub and running through the entire release process end-to-end. + +For the fork, it is necessary to complete the following preparations: + +* Add the following secrets: + * `DOCKER_PASSWORD` for the delivery-docker workflow, if not using ghcr.io + * `DOCKER_USERNAME` for the delivery-docker workflow, if not using ghcr.io + * `DEPLOY_KEY` for the release-merge workflow, as a SSH private key for repository access +* Enable the issues feature on the repository and create `status/triage` and `type/bug` labels for the check-latest-release workflow +* Create a branch named `gh-pages` for uploading benchmark reports for the benchmark workflow + +The `tools/test-fork.sh` script can be used to update the source code to reflect the state of the fork and disable workflows that should not run on the fork repository. +It can be invoked like so: `./tools/test-fork.sh ` + ## Tasks ### Building @@ -55,18 +78,22 @@ To run unit and integration tests: ```shell make unit ``` +Test output will be streamed to your terminal and also saved to the file +out/unit To run acceptance tests: ```shell make acceptance ``` +Test output will be streamed to your terminal and also saved to the file +out/acceptance Alternately, to run all tests: ```shell make test ``` -To run our full acceptance suite (including cross-compatibility for n-1 `pack` and `lifecycl`): +To run our full acceptance suite (including cross-compatibility for n-1 `pack` and `lifecycle`): ```shell make acceptance-all ``` @@ -100,15 +127,15 @@ make prepare-for-pr ### Acceptance Tests Some options users can provide to our acceptance tests are: -| ENV_VAR | Description | Default | -|--------------|------------------------------------------------------------------------|---------| -| ACCEPTANCE_SUITE_CONFIG | A set of configurations for how to run the acceptance tests, describing the version of `pack` used for testing, the version of `pack` used to create the builders used in the test, and the version of `lifecycle` binaries used to test with Github | `[{"pack": "current", "pack_create_builder": "current", "lifecycle": "default"}]'` | -| COMPILE_PACK_WITH_VERSION | Tell `pack` what version to consider itself | `dev` | -| GITHUB_TOKEN | A Github Token, used when downloading `pack` and `lifecycle` releases from Github during the test setup | "" | -| LIFECYCLE_IMAGE | Image reference to be used in untrusted builder workflows | buildpacksio/lifecycle: | -| LIFECYCLE_PATH | Path to a `.tgz` file filled with a set of `lifecycle` binaries | The Github release for the default version of lifecycle in `pack` | -| PACK_PATH | Path to a `pack` executable. | A compiled version of the current branch | -| PREVIOUS_LIFECYCLE_IMAGE | Image reference to be used in untrusted builder workflows, used to test compatibility of `pack` with the n-1 version of the `lifecycle` | buildpacksio/lifecycle:, buildpacksio/lifecycle: | -| PREVIOUS_LIFECYCLE_PATH | Path to a `.tgz` file filled with a set of `lifecycle` binaries, used to test compatibility of `pack` with the n-1 version of the `lifecycle` | The Github release for n-1 release of `lifecycle` | -| PREVIOUS_PACK_FIXTURES_PATH | Path to a set of fixtures, used to override the most up-to-date fixtures, in case of changed functionality | `acceptance/testdata/pack_previous_fixtures_overrides` | -| PREVIOUS_PACK_PATH | Path to a `pack` executable, used to test compatibility with n-1 version of `pack` | The most recent release from `pack`'s Github release | +| ENV_VAR | Description | Default | +|--------------|------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------| +| ACCEPTANCE_SUITE_CONFIG | A set of configurations for how to run the acceptance tests, describing the version of `pack` used for testing, the version of `pack` used to create the builders used in the test, and the version of `lifecycle` binaries used to test with Github | `[{"pack": "current", "pack_create_builder": "current", "lifecycle": "default"}]'` | +| COMPILE_PACK_WITH_VERSION | Tell `pack` what version to consider itself | `dev` | +| GITHUB_TOKEN | A Github Token, used when downloading `pack` and `lifecycle` releases from Github during the test setup | "" | +| LIFECYCLE_IMAGE | Image reference to be used in untrusted builder workflows | docker.io/buildpacksio/lifecycle: | +| LIFECYCLE_PATH | Path to a `.tgz` file filled with a set of `lifecycle` binaries | The Github release for the default version of lifecycle in `pack` | +| PACK_PATH | Path to a `pack` executable. | A compiled version of the current branch | +| PREVIOUS_LIFECYCLE_IMAGE | Image reference to be used in untrusted builder workflows, used to test compatibility of `pack` with the n-1 version of the `lifecycle` | docker.io/buildpacksio/lifecycle:, buildpacksio/lifecycle: | +| PREVIOUS_LIFECYCLE_PATH | Path to a `.tgz` file filled with a set of `lifecycle` binaries, used to test compatibility of `pack` with the n-1 version of the `lifecycle` | The Github release for n-1 release of `lifecycle` | +| PREVIOUS_PACK_FIXTURES_PATH | Path to a set of fixtures, used to override the most up-to-date fixtures, in case of changed functionality | `acceptance/testdata/pack_previous_fixtures_overrides` | +| PREVIOUS_PACK_PATH | Path to a `pack` executable, used to test compatibility with n-1 version of `pack` | The most recent release from `pack`'s Github release | diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000..43f2c3974f --- /dev/null +++ b/Dockerfile @@ -0,0 +1,12 @@ +ARG base_image=gcr.io/distroless/static + +FROM golang:1.25 as builder +ARG pack_version +ENV PACK_VERSION=$pack_version +WORKDIR /app +COPY . . +RUN make build + +FROM ${base_image} +COPY --from=builder /app/out/pack /usr/local/bin/pack +ENTRYPOINT [ "/usr/local/bin/pack" ] diff --git a/Makefile b/Makefile index 33b04baa12..f7368c803e 100644 --- a/Makefile +++ b/Makefile @@ -33,10 +33,11 @@ TEST_TIMEOUT?=1200s UNIT_TIMEOUT?=$(TEST_TIMEOUT) NO_DOCKER?= +# Clean build flags clean_build := $(strip ${PACK_BUILD}) clean_sha := $(strip ${PACK_GITSHA1}) -# append build number and git sha to version, if not-empty +# Append build number and git sha to version, if not-empty ifneq ($(and $(clean_build),$(clean_sha)),) PACK_VERSION:=${PACK_VERSION}+git-${clean_sha}.build-${clean_build} else ifneq ($(clean_build),) @@ -50,53 +51,43 @@ export CGO_ENABLED=0 BINDIR:=/usr/bin/ +.DEFAULT_GOAL := build + # this target must be listed first in order for it to be a default target, # so that ubuntu_ppa's may be constructed using default build tools. +## build: Build the program build: out - @echo "> Building..." - $(GOCMD) build -ldflags "-s -w -X 'github.com/buildpacks/pack.Version=${PACK_VERSION}' -extldflags ${LDFLAGS}" -trimpath -o ./out/$(PACK_BIN) -a ./cmd/pack + @echo "=====> Building..." + $(GOCMD) build -ldflags "-s -w -X 'github.com/buildpacks/pack/pkg/client.Version=${PACK_VERSION}' -extldflags '${LDFLAGS}'" -trimpath -o ./out/$(PACK_BIN) -a +## all: Run clean, verify, test, and build operations all: clean verify test build -# used by apt-get install when installing ubuntu ppa. -# move pack binary onto a path location. -install: - mkdir -p ${DESTDIR}${BINDIR} - cp ./out/$(PACK_BIN) ${DESTDIR}${BINDIR}/ - -mod-tidy: - $(GOCMD) mod tidy - cd tools && $(GOCMD) mod tidy - -tidy: mod-tidy format - -package: out - tar czf .$/out$/$(ARCHIVE_NAME).tgz -C .$/out$/ $(PACK_BIN) - -install-mockgen: - @echo "> Installing mockgen..." - cd tools && $(GOCMD) install github.com/golang/mock/mockgen - -install-goimports: - @echo "> Installing goimports..." - cd tools && $(GOCMD) install golang.org/x/tools/cmd/goimports +## clean: Clean the workspace +clean: + @echo "=====> Cleaning workspace..." + @$(RMRF) .$/out benchmarks.test || (exit 0) +## format: Format the code format: install-goimports - @echo "> Formating code..." + @echo "=====> Formatting code..." @goimports -l -w -local ${PACKAGE_BASE} ${SRC} @go run tools/pedantic_imports/main.go ${PACKAGE_BASE} ${SRC} -install-golangci-lint: - @echo "> Installing golangci-lint..." - cd tools && $(GOCMD) install github.com/golangci/golangci-lint/cmd/golangci-lint +## generate: Generate mocks +generate: install-mockgen + @echo "=====> Generating mocks..." + $(GOCMD) generate ./... +## lint: Check the code lint: install-golangci-lint - @echo "> Linting code..." + @echo "=====> Linting code..." @golangci-lint run -c golangci.yaml +## test: Run unit and acceptance tests test: unit acceptance -# append coverage arguments +## unit: Append coverage arguments ifeq ($(TEST_COVERAGE), 1) unit: GOTESTFLAGS:=$(GOTESTFLAGS) -coverprofile=./out/tests/coverage-unit.txt -covermode=atomic endif @@ -107,29 +98,18 @@ unit: out @echo "> Running unit/integration tests..." $(GOCMD) test $(GOTESTFLAGS) -timeout=$(UNIT_TIMEOUT) ./... +## acceptance: Run acceptance tests acceptance: out - @echo "> Running acceptance tests..." + @echo "=====> Running acceptance tests..." $(GOCMD) test $(GOTESTFLAGS) -timeout=$(ACCEPTANCE_TIMEOUT) -tags=acceptance ./acceptance +## acceptance-all: Run all acceptance tests acceptance-all: export ACCEPTANCE_SUITE_CONFIG:=$(shell $(CAT) .$/acceptance$/testconfig$/all.json) acceptance-all: - @echo "> Running acceptance tests..." + @echo "=====> Running acceptance tests..." $(GOCMD) test $(GOTESTFLAGS) -timeout=$(ACCEPTANCE_TIMEOUT) -tags=acceptance ./acceptance -clean: - @echo "> Cleaning workspace..." - @$(RMRF) .$/out benchmarks.test || (exit 0) - -verify: verify-format lint - -generate: install-mockgen - @echo "> Generating mocks..." - $(GOCMD) generate ./... - -verify-format: install-goimports - @echo "> Verifying format..." - $(if $(shell goimports -l -local ${PACKAGE_BASE} ${SRC}), @echo ERROR: Format verification failed! && goimports ${GOIMPORTS_DIFF_OPTION} -local ${PACKAGE_BASE} ${SRC} && exit 1) - +## prepare-for-pr: Run clean, verify, and test operations and check for uncommitted changes prepare-for-pr: tidy verify test @git diff-index --quiet HEAD -- ||\ (echo "-----------------" &&\ @@ -141,18 +121,71 @@ prepare-for-pr: tidy verify test echo "-----------------\n" &&\ exit 0) +## verify: Run format and lint checks +verify: verify-format lint + +## verify-format: Verify the format +verify-format: install-goimports + @echo "=====> Verifying format..." + $(if $(shell goimports -l -local ${PACKAGE_BASE} ${SRC}), @echo ERROR: Format verification failed! && goimports ${GOIMPORTS_DIFF_OPTION} -local ${PACKAGE_BASE} ${SRC} && exit 1) + +## benchmark: Run benchmark tests benchmark: out - @echo "> Running Benchmarks" + @echo "=====> Running benchmarks" $(GOCMD) test -run=^$ -bench=. -benchtime=1s -benchmem -memprofile=./out/bench_mem.out -cpuprofile=./out/bench_cpu.out -tags=benchmarks ./benchmarks/ -v # NOTE: You can analyze the results, using go tool pprof. For instance, you can start a server to see a graph of the cpu usage by running # go tool pprof -http=":8082" out/bench_cpu.out. Alternatively, you can run go tool pprof, and in the ensuing cli, run # commands like top10 or web to dig down into the cpu and memory usage # For more, see https://blog.golang.org/pprof +## package: Package the program +package: out + tar czf .$/out$/$(ARCHIVE_NAME).tgz -C .$/out$/ $(PACK_BIN) + +## install: Install the program to the system +install: + mkdir -p ${DESTDIR}${BINDIR} + cp ./out/$(PACK_BIN) ${DESTDIR}${BINDIR}/ + +## install-mockgen: Used only by apt-get install when installing ubuntu ppa +install-mockgen: + @echo "=====> Installing mockgen..." + cd tools && $(GOCMD) install github.com/golang/mock/mockgen + +## install-goimports: Install goimports dependency +install-goimports: + @echo "=====> Installing goimports..." + cd tools && $(GOCMD) install golang.org/x/tools/cmd/goimports + +## install-golangci-lint: Install golangci-lint dependency +install-golangci-lint: + @echo "=====> Installing golangci-lint..." + cd tools && $(GOCMD) install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.0.2 + +## mod-tidy: Tidy Go modules +mod-tidy: + $(GOCMD) mod tidy -compat=1.25 + cd tools && $(GOCMD) mod tidy -compat=1.25 + +## tidy: Tidy modules and format the code +tidy: mod-tidy format + # NOTE: Windows doesn't support `-p` +## out: Make a directory for output out: @mkdir out || (exit 0) mkdir out$/tests || (exit 0) - -.PHONY: clean build format imports lint test unit acceptance prepare-for-pr verify verify-format benchmark +## help: Display help information +help: Makefile + @echo "" + @echo "Usage:" + @echo "" + @echo " make [target]" + @echo "" + @echo "Targets:" + @echo "" + @awk -F ':|##' '/^[^\.%\t][^\t]*:.*##/{printf " \033[36m%-20s\033[0m %s\n", $$1, $$NF}' $(MAKEFILE_LIST) | sort + @sed -n 's/^##//p' ${MAKEFILE_LIST} | column -t -s ':' | sed -e 's/^/ /' + +.PHONY: clean build format imports lint test unit acceptance prepare-for-pr verify verify-format benchmark diff --git a/README.md b/README.md index 495172845d..9a2504eaa3 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ [![GoDoc](https://godoc.org/github.com/buildpacks/pack?status.svg)](https://godoc.org/github.com/buildpacks/pack) [![GitHub license](https://img.shields.io/github/license/buildpacks/pack)](https://github.com/buildpacks/pack/blob/main/LICENSE) [![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/4748/badge)](https://bestpractices.coreinfrastructure.org/projects/4748) -[![Slack](https://img.shields.io/badge/slack-join-ff69b4.svg?logo=slack)](https://slack.buildpacks.io/) +[![Slack](https://img.shields.io/badge/slack-join-ff69b4.svg?logo=slack)](https://slack.cncf.io/) [![Gitpod ready-to-code](https://img.shields.io/badge/Gitpod-ready--to--code-blue?logo=gitpod)](https://gitpod.io/#https://github.com/buildpacks/pack) `pack` makes it easy for... @@ -34,9 +34,9 @@ Check out the command line documentation [here][pack-docs] To learn more about the details, check out the [specs repository][specs]. -[app-dev]: https://buildpacks.io/docs/app-developer-guide/ -[bp-author]: https://buildpacks.io/docs/buildpack-author-guide/ -[operator]: https://buildpacks.io/docs/operator-guide/ +[app-dev]: https://buildpacks.io/docs/for-app-developers/ +[bp-author]: https://buildpacks.io/docs/for-buildpack-authors/ +[operator]: https://buildpacks.io/docs/for-platform-operators/ [buildpacks.io]: https://buildpacks.io/ [install-pack]: https://buildpacks.io/docs/install-pack/ [getting-started]: https://buildpacks.io/docs/app-journey diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go index 38f559eec8..c208876fa4 100644 --- a/acceptance/acceptance_test.go +++ b/acceptance/acceptance_test.go @@ -1,5 +1,4 @@ //go:build acceptance -// +build acceptance package acceptance @@ -10,8 +9,6 @@ import ( "encoding/hex" "encoding/json" "fmt" - "io/ioutil" - "math/rand" "os" "path/filepath" "regexp" @@ -20,22 +17,26 @@ import ( "testing" "time" - dockertypes "github.com/docker/docker/api/types" - "github.com/docker/docker/client" - "github.com/ghodss/yaml" + "github.com/buildpacks/imgutil" + "github.com/buildpacks/lifecycle/api" "github.com/google/go-containerregistry/pkg/name" + v1 "github.com/google/go-containerregistry/pkg/v1" + "github.com/google/go-containerregistry/pkg/v1/types" + "github.com/moby/moby/client" "github.com/pelletier/go-toml" "github.com/sclevine/spec" "github.com/sclevine/spec/report" + yaml "gopkg.in/yaml.v3" "github.com/buildpacks/pack/acceptance/assertions" "github.com/buildpacks/pack/acceptance/buildpacks" "github.com/buildpacks/pack/acceptance/config" "github.com/buildpacks/pack/acceptance/invoke" "github.com/buildpacks/pack/acceptance/managers" - "github.com/buildpacks/pack/internal/cache" "github.com/buildpacks/pack/internal/style" "github.com/buildpacks/pack/pkg/archive" + "github.com/buildpacks/pack/pkg/cache" + "github.com/buildpacks/pack/pkg/logging" h "github.com/buildpacks/pack/testhelpers" ) @@ -45,7 +46,7 @@ const ( ) var ( - dockerCli client.CommonAPIClient + dockerCli *client.Client registryConfig *h.TestRegistryConfig suiteManager *SuiteManager imageManager managers.ImageManager @@ -56,11 +57,10 @@ func TestAcceptance(t *testing.T) { var err error h.RequireDocker(t) - rand.Seed(time.Now().UTC().UnixNano()) assert := h.NewAssertionManager(t) - dockerCli, err = client.NewClientWithOpts(client.FromEnv, client.WithVersion("1.38")) + dockerCli, err = client.New(client.FromEnv) assert.Nil(err) imageManager = managers.NewImageManager(t, dockerCli) @@ -127,13 +127,13 @@ func testWithoutSpecificBuilderRequirement( var ( pack *invoke.PackInvoker assert = h.NewAssertionManager(t) - buildpackManager buildpacks.BuildpackManager + buildpackManager buildpacks.BuildModuleManager ) it.Before(func() { pack = invoke.NewPackInvoker(t, assert, packConfig, registryConfig.DockerConfigDir) pack.EnableExperimental() - buildpackManager = buildpacks.NewBuildpackManager(t, assert) + buildpackManager = buildpacks.NewBuildModuleManager(t, assert) }) it.After(func() { @@ -171,26 +171,26 @@ func testWithoutSpecificBuilderRequirement( when("package", func() { var ( tmpDir string - buildpackManager buildpacks.BuildpackManager + buildpackManager buildpacks.BuildModuleManager simplePackageConfigFixtureName = "package.toml" ) it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "buildpack-package-tests") + tmpDir, err = os.MkdirTemp("", "buildpack-package-tests") assert.Nil(err) - buildpackManager = buildpacks.NewBuildpackManager(t, assert) - buildpackManager.PrepareBuildpacks(tmpDir, buildpacks.SimpleLayersParent, buildpacks.SimpleLayers) + buildpackManager = buildpacks.NewBuildModuleManager(t, assert) + buildpackManager.PrepareBuildModules(tmpDir, buildpacks.BpSimpleLayersParent, buildpacks.BpSimpleLayers) }) it.After(func() { assert.Nil(os.RemoveAll(tmpDir)) }) - generateAggregatePackageToml := func(buildpackURI, nestedPackageName, os string) string { + generateAggregatePackageToml := func(buildpackURI, nestedPackageName, operatingSystem string) string { t.Helper() - packageTomlFile, err := ioutil.TempFile(tmpDir, "package_aggregate-*.toml") + packageTomlFile, err := os.CreateTemp(tmpDir, "package_aggregate-*.toml") assert.Nil(err) pack.FixtureManager().TemplateFixtureToFile( @@ -199,7 +199,26 @@ func testWithoutSpecificBuilderRequirement( map[string]interface{}{ "BuildpackURI": buildpackURI, "PackageName": nestedPackageName, - "OS": os, + "OS": operatingSystem, + }, + ) + + assert.Nil(packageTomlFile.Close()) + + return packageTomlFile.Name() + } + + generateMultiPlatformCompositeBuildpackPackageToml := func(buildpackURI, dependencyURI string) string { + t.Helper() + packageTomlFile, err := os.CreateTemp(tmpDir, "package_multi_platform-*.toml") + assert.Nil(err) + + pack.FixtureManager().TemplateFixtureToFile( + "package_multi_platform.toml", + packageTomlFile, + map[string]interface{}{ + "BuildpackURI": buildpackURI, + "PackageName": dependencyURI, }, ) @@ -236,17 +255,17 @@ func testWithoutSpecificBuilderRequirement( packageName, aggregatePackageToml, buildpacks.WithRequiredBuildpacks( - buildpacks.SimpleLayersParent, + buildpacks.BpSimpleLayersParent, buildpacks.NewPackageImage( t, pack, nestedPackageName, packageTomlPath, - buildpacks.WithRequiredBuildpacks(buildpacks.SimpleLayers), + buildpacks.WithRequiredBuildpacks(buildpacks.BpSimpleLayers), ), ), ) - buildpackManager.PrepareBuildpacks(tmpDir, packageBuildpack) + buildpackManager.PrepareBuildModules(tmpDir, packageBuildpack) defer imageManager.CleanupImages(nestedPackageName, packageName) assertImage.ExistsLocally(nestedPackageName) @@ -254,34 +273,210 @@ func testWithoutSpecificBuilderRequirement( }) when("--publish", func() { - it("publishes image to registry", func() { - packageTomlPath := generatePackageTomlWithOS(t, assert, pack, tmpDir, simplePackageConfigFixtureName, imageManager.HostOS()) - nestedPackageName := registryConfig.RepoName("test/package-" + h.RandString(10)) + it.Before(func() { + // used to avoid authentication issues with the local registry + os.Setenv("DOCKER_CONFIG", registryConfig.DockerConfigDir) + }) - nestedPackage := buildpacks.NewPackageImage( - t, - pack, - nestedPackageName, - packageTomlPath, - buildpacks.WithRequiredBuildpacks(buildpacks.SimpleLayers), - buildpacks.WithPublish(), - ) - buildpackManager.PrepareBuildpacks(tmpDir, nestedPackage) + when("no --targets", func() { + it("publishes image to registry", func() { + packageTomlPath := generatePackageTomlWithOS(t, assert, pack, tmpDir, simplePackageConfigFixtureName, imageManager.HostOS()) + nestedPackageName := registryConfig.RepoName("test/package-" + h.RandString(10)) - aggregatePackageToml := generateAggregatePackageToml("simple-layers-parent-buildpack.tgz", nestedPackageName, imageManager.HostOS()) - packageName := registryConfig.RepoName("test/package-" + h.RandString(10)) + nestedPackage := buildpacks.NewPackageImage( + t, + pack, + nestedPackageName, + packageTomlPath, + buildpacks.WithRequiredBuildpacks(buildpacks.BpSimpleLayers), + buildpacks.WithPublish(), + ) + buildpackManager.PrepareBuildModules(tmpDir, nestedPackage) - output := pack.RunSuccessfully( - "buildpack", "package", packageName, - "-c", aggregatePackageToml, - "--publish", - ) + aggregatePackageToml := generateAggregatePackageToml("simple-layers-parent-buildpack.tgz", nestedPackageName, imageManager.HostOS()) + packageName := registryConfig.RepoName("test/package-" + h.RandString(10)) - defer imageManager.CleanupImages(packageName) - assertions.NewOutputAssertionManager(t, output).ReportsPackagePublished(packageName) + output := pack.RunSuccessfully( + "buildpack", "package", packageName, + "-c", aggregatePackageToml, + "--publish", + ) + + defer imageManager.CleanupImages(packageName) + assertions.NewOutputAssertionManager(t, output).ReportsPackagePublished(packageName) + + assertImage.NotExistsLocally(packageName) + assertImage.CanBePulledFromRegistry(packageName) + }) + }) + + when("--targets", func() { + var packageName string + + it.Before(func() { + h.SkipIf(t, !pack.SupportsFeature(invoke.MultiPlatformBuildersAndBuildPackages), "multi-platform builders and buildpack packages are available since 0.34.0") + packageName = registryConfig.RepoName("simple-multi-platform-buildpack" + h.RandString(8)) + }) + + when("simple buildpack on disk", func() { + var path string + + it.Before(func() { + // create a simple buildpack on disk + sourceDir := filepath.Join("testdata", "mock_buildpacks") + path = filepath.Join(tmpDir, "simple-layers-buildpack") + err := buildpacks.BpFolderSimpleLayers.Prepare(sourceDir, tmpDir) + h.AssertNil(t, err) + }) + + it("publishes images for each requested target to the registry and creates an image index", func() { + output := pack.RunSuccessfully( + "buildpack", "package", packageName, + "--path", path, + "--publish", + "--target", "linux/amd64", + "--target", "linux/arm64", + "--target", "windows/amd64", + ) + + defer imageManager.CleanupImages(packageName) + assertions.NewOutputAssertionManager(t, output).ReportsPackagePublished(packageName) + + assertImage.NotExistsLocally(packageName) + assertImage.CanBePulledFromRegistry(packageName) + + assertions.NewOutputAssertionManager(t, output).ReportsSuccessfulIndexPushed(packageName) + h.AssertRemoteImageIndex(t, packageName, types.OCIImageIndex, 3) + }) + }) + + when("composite buildpack on disk", func() { + var packageTomlPath string + + when("dependencies are not available in a registry", func() { + it.Before(func() { + // creates a composite buildpack on disk + sourceDir := filepath.Join("testdata", "mock_buildpacks") + + err := buildpacks.MetaBpDependency.Prepare(sourceDir, tmpDir) + h.AssertNil(t, err) + + err = buildpacks.MetaBpFolder.Prepare(sourceDir, tmpDir) + h.AssertNil(t, err) + + packageTomlPath = filepath.Join(tmpDir, "meta-buildpack", "package.toml") + }) + + it("errors with a descriptive message", func() { + output, err := pack.Run( + "buildpack", "package", packageName, + "--config", packageTomlPath, + "--publish", + "--target", "linux/amd64", + "--target", "linux/arm64", + "--target", "windows/amd64", + "--verbose", + ) + assert.NotNil(err) + h.AssertContains(t, output, "uri '../meta-buildpack-dependency' is not allowed when creating a composite multi-platform buildpack; push your dependencies to a registry and use 'docker://' instead") + }) + }) - assertImage.NotExistsLocally(packageName) - assertImage.CanBePulledFromRegistry(packageName) + when("dependencies are available in a registry", func() { + var depPackageName string + + it.Before(func() { + // multi-platform composite buildpacks require the dependencies to be available in a registry + // let's push it + + // first creates the simple buildpack dependency on disk + depSourceDir := filepath.Join("testdata", "mock_buildpacks") + depPath := filepath.Join(tmpDir, "meta-buildpack-dependency") + err := buildpacks.MetaBpDependency.Prepare(depSourceDir, tmpDir) + h.AssertNil(t, err) + + // push the dependency to a registry + depPackageName = registryConfig.RepoName("simple-multi-platform-buildpack" + h.RandString(8)) + output := pack.RunSuccessfully( + "buildpack", "package", depPackageName, + "--path", depPath, + "--publish", + "--target", "linux/amd64", + "--target", "linux/arm64", + "--target", "windows/amd64", + ) + assertions.NewOutputAssertionManager(t, output).ReportsPackagePublished(depPackageName) + assertImage.CanBePulledFromRegistry(depPackageName) + assertions.NewOutputAssertionManager(t, output).ReportsSuccessfulIndexPushed(depPackageName) + + // let's prepare the composite buildpack to use the simple buildpack dependency prepared above + packageTomlPath = generateMultiPlatformCompositeBuildpackPackageToml(".", depPackageName) + + // We need to copy the buildpack toml to the folder where the packageTomlPath was created + packageTomlDir := filepath.Dir(packageTomlPath) + sourceDir := filepath.Join("testdata", "mock_buildpacks", "meta-buildpack", "buildpack.toml") + h.CopyFile(t, sourceDir, filepath.Join(packageTomlDir, "buildpack.toml")) + }) + + it("publishes images for each requested target to the registry and creates an image index", func() { + output := pack.RunSuccessfully( + "buildpack", "package", packageName, + "--config", packageTomlPath, + "--publish", + "--target", "linux/amd64", + "--target", "linux/arm64", + "--target", "windows/amd64", + ) + + defer imageManager.CleanupImages(packageName) + assertions.NewOutputAssertionManager(t, output).ReportsPackagePublished(packageName) + + assertImage.NotExistsLocally(packageName) + assertImage.CanBePulledFromRegistry(packageName) + + assertions.NewOutputAssertionManager(t, output).ReportsSuccessfulIndexPushed(packageName) + h.AssertRemoteImageIndex(t, packageName, types.OCIImageIndex, 3) + }) + }) + }) + }) + + when("new multi-platform folder structure is used", func() { + var packageName string + + it.Before(func() { + h.SkipIf(t, !pack.SupportsFeature(invoke.MultiPlatformBuildersAndBuildPackages), "multi-platform builders and buildpack packages are available since 0.34.0") + packageName = registryConfig.RepoName("simple-multi-platform-buildpack" + h.RandString(8)) + }) + + when("simple buildpack on disk", func() { + var path string + + it.Before(func() { + // create a simple buildpack on disk + sourceDir := filepath.Join("testdata", "mock_buildpacks") + path = filepath.Join(tmpDir, "multi-platform-buildpack") + err := buildpacks.MultiPlatformFolderBP.Prepare(sourceDir, tmpDir) + h.AssertNil(t, err) + }) + + it("publishes images for each target specified in buildpack.toml to the registry and creates an image index", func() { + output := pack.RunSuccessfully( + "buildpack", "package", packageName, + "--path", path, + "--publish", "--verbose", + ) + + defer imageManager.CleanupImages(packageName) + assertions.NewOutputAssertionManager(t, output).ReportsPackagePublished(packageName) + + assertImage.NotExistsLocally(packageName) + assertImage.CanBePulledFromRegistry(packageName) + + assertions.NewOutputAssertionManager(t, output).ReportsSuccessfulIndexPushed(packageName) + h.AssertRemoteImageIndex(t, packageName, types.OCIImageIndex, 3) + }) + }) }) }) @@ -294,9 +489,9 @@ func testWithoutSpecificBuilderRequirement( pack, nestedPackageName, packageTomlPath, - buildpacks.WithRequiredBuildpacks(buildpacks.SimpleLayers), + buildpacks.WithRequiredBuildpacks(buildpacks.BpSimpleLayers), ) - buildpackManager.PrepareBuildpacks(tmpDir, nestedPackage) + buildpackManager.PrepareBuildModules(tmpDir, nestedPackage) defer imageManager.CleanupImages(nestedPackageName) aggregatePackageToml := generateAggregatePackageToml("simple-layers-parent-buildpack.tgz", nestedPackageName, imageManager.HostOS()) @@ -319,9 +514,9 @@ func testWithoutSpecificBuilderRequirement( nestedPackageName, packageTomlPath, buildpacks.WithPublish(), - buildpacks.WithRequiredBuildpacks(buildpacks.SimpleLayers), + buildpacks.WithRequiredBuildpacks(buildpacks.BpSimpleLayers), ) - buildpackManager.PrepareBuildpacks(tmpDir, nestedPackage) + buildpackManager.PrepareBuildModules(tmpDir, nestedPackage) aggregatePackageToml := generateAggregatePackageToml("simple-layers-parent-buildpack.tgz", nestedPackageName, imageManager.HostOS()) packageName := registryConfig.RepoName("test/package-" + h.RandString(10)) @@ -402,7 +597,7 @@ func testWithoutSpecificBuilderRequirement( it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "buildpack-inspect-tests") + tmpDir, err = os.MkdirTemp("", "buildpack-inspect-tests") assert.Nil(err) }) @@ -426,12 +621,12 @@ func testWithoutSpecificBuilderRequirement( packageFileLocation, packageTomlPath, buildpacks.WithRequiredBuildpacks( - buildpacks.FolderSimpleLayersParent, - buildpacks.FolderSimpleLayers, + buildpacks.BpFolderSimpleLayersParent, + buildpacks.BpFolderSimpleLayers, ), ) - buildpackManager.PrepareBuildpacks(tmpDir, packageFile) + buildpackManager.PrepareBuildModules(tmpDir, packageFile) expectedOutput := pack.FixtureManager().TemplateFixture( "inspect_buildpack_output.txt", @@ -458,13 +653,13 @@ func testWithoutSpecificBuilderRequirement( packageImageName, packageTomlPath, buildpacks.WithRequiredBuildpacks( - buildpacks.FolderSimpleLayersParent, - buildpacks.FolderSimpleLayers, + buildpacks.BpFolderSimpleLayersParent, + buildpacks.BpFolderSimpleLayers, ), ) defer imageManager.CleanupImages(packageImageName) - buildpackManager.PrepareBuildpacks(tmpDir, packageImage) + buildpackManager.PrepareBuildModules(tmpDir, packageImage) expectedOutput := pack.FixtureManager().TemplateFixture( "inspect_buildpack_output.txt", @@ -499,7 +694,7 @@ func testWithoutSpecificBuilderRequirement( when("config", func() { when("default-builder", func() { it("sets the default builder in ~/.pack/config.toml", func() { - builderName := "paketobuildpacks/builder:base" + builderName := "paketobuildpacks/builder-jammy-base" output := pack.RunSuccessfully("config", "default-builder", builderName) assertions.NewOutputAssertionManager(t, output).ReportsSettingDefaultBuilder(builderName) @@ -577,12 +772,16 @@ func testWithoutSpecificBuilderRequirement( when("report", func() { when("default builder is set", func() { it("redacts default builder", func() { - pack.RunSuccessfully("config", "default-builder", "paketobuildpacks/builder:base") + pack.RunSuccessfully("config", "default-builder", "paketobuildpacks/builder-jammy-base") output := pack.RunSuccessfully("report") - version := pack.Version() + layoutRepoDir := filepath.Join(pack.Home(), "layout-repo") + if runtime.GOOS == "windows" { + layoutRepoDir = strings.ReplaceAll(layoutRepoDir, `\`, `\\`) + } + expectedOutput := pack.FixtureManager().TemplateFixture( "report_output.txt", map[string]interface{}{ @@ -590,31 +789,178 @@ func testWithoutSpecificBuilderRequirement( "Version": version, "OS": runtime.GOOS, "Arch": runtime.GOARCH, + "LayoutRepoDir": layoutRepoDir, }, ) assert.Equal(output, expectedOutput) }) it("explicit mode doesn't redact", func() { - pack.RunSuccessfully("config", "default-builder", "paketobuildpacks/builder:base") + pack.RunSuccessfully("config", "default-builder", "paketobuildpacks/builder-jammy-base") output := pack.RunSuccessfully("report", "--explicit") - version := pack.Version() + layoutRepoDir := filepath.Join(pack.Home(), "layout-repo") + if runtime.GOOS == "windows" { + layoutRepoDir = strings.ReplaceAll(layoutRepoDir, `\`, `\\`) + } + expectedOutput := pack.FixtureManager().TemplateFixture( "report_output.txt", map[string]interface{}{ - "DefaultBuilder": "paketobuildpacks/builder:base", + "DefaultBuilder": "paketobuildpacks/builder-jammy-base", "Version": version, "OS": runtime.GOOS, "Arch": runtime.GOARCH, + "LayoutRepoDir": layoutRepoDir, }, ) assert.Equal(output, expectedOutput) }) }) }) + + when("manifest", func() { + var ( + indexRepoName string + repoName1 string + repoName2 string + indexLocalPath string + tmpDir string + err error + ) + + it.Before(func() { + h.SkipIf(t, !pack.SupportsFeature(invoke.ManifestCommands), "pack manifest commands are available since 0.34.0") + + // local storage path + tmpDir, err = os.MkdirTemp("", "manifest-commands-test") + assert.Nil(err) + os.Setenv("XDG_RUNTIME_DIR", tmpDir) + + // manifest commands are experimental + pack.EnableExperimental() + + // used to avoid authentication issues with the local registry + os.Setenv("DOCKER_CONFIG", registryConfig.DockerConfigDir) + }) + + it.After(func() { + assert.Succeeds(os.RemoveAll(tmpDir)) + }) + + when("create", func() { + it.Before(func() { + it.Before(func() { + indexRepoName = registryConfig.RepoName(h.NewRandomIndexRepoName()) + + // Manifest 1 + repoName1 = fmt.Sprintf("%s:%s", indexRepoName, "busybox-amd64") + h.CreateRemoteImage(t, indexRepoName, "busybox-amd64", "busybox@sha256:a236a6469768c17ca1a6ac81a35fe6fbc1efd76b0dcdf5aebb1cf5f0774ee539") + + // Manifest 2 + repoName2 = fmt.Sprintf("%s:%s", indexRepoName, "busybox-arm64") + h.CreateRemoteImage(t, indexRepoName, "busybox-arm64", "busybox@sha256:0bcc1b827b855c65eaf6e031e894e682b6170160b8a676e1df7527a19d51fb1a") + }) + }) + when("--publish", func() { + it("creates and push the index to a remote registry", func() { + output := pack.RunSuccessfully("manifest", "create", "--publish", indexRepoName, repoName1, repoName2) + assertions.NewOutputAssertionManager(t, output).ReportsSuccessfulIndexPushed(indexRepoName) + h.AssertRemoteImageIndex(t, indexRepoName, types.OCIImageIndex, 2) + }) + }) + + when("no --publish", func() { + it("creates the index locally", func() { + output := pack.RunSuccessfully("manifest", "create", indexRepoName, repoName1, repoName2) + assertions.NewOutputAssertionManager(t, output).ReportsSuccessfulIndexLocallyCreated(indexRepoName) + + indexLocalPath = filepath.Join(tmpDir, imgutil.MakeFileSafeName(indexRepoName)) + index := h.ReadIndexManifest(t, indexLocalPath) + h.AssertEq(t, len(index.Manifests), 2) + h.AssertEq(t, index.MediaType, types.OCIImageIndex) + }) + }) + }) + + when("index is already created", func() { + var digest v1.Hash + + it.Before(func() { + indexRepoName = registryConfig.RepoName(h.NewRandomIndexRepoName()) + + // Manifest 1 + repoName1 = fmt.Sprintf("%s:%s", indexRepoName, "busybox-amd64") + image1 := h.CreateRemoteImage(t, indexRepoName, "busybox-amd64", "busybox@sha256:a236a6469768c17ca1a6ac81a35fe6fbc1efd76b0dcdf5aebb1cf5f0774ee539") + digest, err = image1.Digest() + assert.Nil(err) + + // Manifest 2 + repoName2 = fmt.Sprintf("%s:%s", indexRepoName, "busybox-arm64") + h.CreateRemoteImage(t, indexRepoName, "busybox-arm64", "busybox@sha256:0bcc1b827b855c65eaf6e031e894e682b6170160b8a676e1df7527a19d51fb1a") + + // create an index locally + pack.RunSuccessfully("manifest", "create", indexRepoName, repoName1) + indexLocalPath = filepath.Join(tmpDir, imgutil.MakeFileSafeName(indexRepoName)) + }) + + when("add", func() { + it("adds the manifest to the index", func() { + output := pack.RunSuccessfully("manifest", "add", indexRepoName, repoName2) + assertions.NewOutputAssertionManager(t, output).ReportsSuccessfulManifestAddedToIndex(repoName2) + + index := h.ReadIndexManifest(t, indexLocalPath) + h.AssertEq(t, len(index.Manifests), 2) + h.AssertEq(t, index.MediaType, types.OCIImageIndex) + }) + }) + + when("remove", func() { + it("removes the index from local storage", func() { + output := pack.RunSuccessfully("manifest", "remove", indexRepoName) + assertions.NewOutputAssertionManager(t, output).ReportsSuccessfulIndexDeleted() + + h.AssertPathDoesNotExists(t, indexLocalPath) + }) + }) + + when("annotate", func() { + it("adds annotations to the manifest in the index", func() { + output := pack.RunSuccessfully("manifest", "annotate", indexRepoName, repoName1, "--annotations", "foo=bar") + assertions.NewOutputAssertionManager(t, output).ReportsSuccessfulIndexAnnotated(repoName1, indexRepoName) + + index := h.ReadIndexManifest(t, indexLocalPath) + h.AssertEq(t, len(index.Manifests), 1) + h.AssertEq(t, len(index.Manifests[0].Annotations), 1) + }) + }) + + when("rm", func() { + it.Before(func() { + // we need to point to the manifest digest we want to delete + repoName1 = fmt.Sprintf("%s@%s", repoName1, digest.String()) + }) + + it("removes the manifest from the index", func() { + output := pack.RunSuccessfully("manifest", "rm", indexRepoName, repoName1) + assertions.NewOutputAssertionManager(t, output).ReportsSuccessfulRemoveManifestFromIndex(indexRepoName) + + index := h.ReadIndexManifest(t, indexLocalPath) + h.AssertEq(t, len(index.Manifests), 0) + }) + }) + + when("push", func() { + it("pushes the index to a remote registry", func() { + output := pack.RunSuccessfully("manifest", "push", indexRepoName) + assertions.NewOutputAssertionManager(t, output).ReportsSuccessfulIndexPushed(indexRepoName) + h.AssertRemoteImageIndex(t, indexRepoName, types.OCIImageIndex, 1) + }) + }) + }) + }) } func testAcceptance( @@ -626,8 +972,8 @@ func testAcceptance( ) { var ( pack, createBuilderPack *invoke.PackInvoker - buildpackManager buildpacks.BuildpackManager - bpDir = buildpacksDir(lifecycle.EarliestBuildpackAPIVersion()) + buildpackManager buildpacks.BuildModuleManager + bpDir = buildModulesDir() assert = h.NewAssertionManager(t) ) @@ -638,10 +984,9 @@ func testAcceptance( createBuilderPack = invoke.NewPackInvoker(t, assert, createBuilderPackConfig, registryConfig.DockerConfigDir) createBuilderPack.EnableExperimental() - buildpackManager = buildpacks.NewBuildpackManager( + buildpackManager = buildpacks.NewBuildModuleManager( t, assert, - buildpacks.WithBuildpackAPIVersion(lifecycle.EarliestBuildpackAPIVersion()), ) }) @@ -706,73 +1051,453 @@ func testAcceptance( }) when("complex builder", func() { - it.Before(func() { - // create our nested builder - h.SkipIf(t, imageManager.HostOS() == "windows", "These tests are not yet compatible with Windows-based containers") - - // create a task, handled by a 'task manager' which executes our pack commands during tests. - // looks like this is used to de-dup tasks - key := taskKey( - "create-complex-builder", - append( - []string{runImageMirror, createBuilderPackConfig.Path(), lifecycle.Identifier()}, - createBuilderPackConfig.FixturePaths()..., - )..., - ) + when("builder has duplicate buildpacks", func() { + it.Before(func() { + // create our nested builder + h.SkipIf(t, imageManager.HostOS() == "windows", "These tests are not yet compatible with Windows-based containers") - value, err := suiteManager.RunTaskOnceString(key, func() (string, error) { - return createComplexBuilder( - t, - assert, - createBuilderPack, - lifecycle, - buildpackManager, - runImageMirror, + // create a task, handled by a 'task manager' which executes our pack commands during tests. + // looks like this is used to de-dup tasks + key := taskKey( + "create-complex-builder", + append( + []string{runImageMirror, createBuilderPackConfig.Path(), lifecycle.Identifier()}, + createBuilderPackConfig.FixturePaths()..., + )..., ) - }) - assert.Nil(err) - // register task to be run to 'clean up' a task - suiteManager.RegisterCleanUp("clean-"+key, func() error { - imageManager.CleanupImages(value) - return nil + value, err := suiteManager.RunTaskOnceString(key, func() (string, error) { + return createComplexBuilder( + t, + assert, + createBuilderPack, + lifecycle, + buildpackManager, + runImageMirror, + ) + }) + assert.Nil(err) + + // register task to be run to 'clean up' a task + suiteManager.RegisterCleanUp("clean-"+key, func() error { + imageManager.CleanupImages(value) + return nil + }) + builderName = value + + output := pack.RunSuccessfully( + "config", "run-image-mirrors", "add", "pack-test/run", "--mirror", "some-registry.com/pack-test/run1") + assertOutput := assertions.NewOutputAssertionManager(t, output) + assertOutput.ReportsSuccesfulRunImageMirrorsAdd("pack-test/run", "some-registry.com/pack-test/run1") }) - builderName = value - output := pack.RunSuccessfully( - "config", "run-image-mirrors", "add", "pack-test/run", "--mirror", "some-registry.com/pack-test/run1") - assertOutput := assertions.NewOutputAssertionManager(t, output) - assertOutput.ReportsSuccesfulRunImageMirrorsAdd("pack-test/run", "some-registry.com/pack-test/run1") - }) - when("builder has duplicate buildpacks", func() { it("buildpack layers have no duplication", func() { assertImage.DoesNotHaveDuplicateLayers(builderName) }) }) - }) - when("builder.toml is invalid", func() { - it("displays an error", func() { - builderConfigPath := createBuilderPack.FixtureManager().FixtureLocation("invalid_builder.toml") + when("builder has extensions", func() { + it.Before(func() { + h.SkipIf(t, !createBuilderPack.SupportsFeature(invoke.BuildImageExtensions), "") + h.SkipIf(t, !pack.SupportsFeature(invoke.BuildImageExtensions), "") + h.SkipIf(t, !lifecycle.SupportsFeature(config.BuildImageExtensions), "") + // create a task, handled by a 'task manager' which executes our pack commands during tests. + // looks like this is used to de-dup tasks + key := taskKey( + "create-builder-with-extensions", + append( + []string{runImageMirror, createBuilderPackConfig.Path(), lifecycle.Identifier()}, + createBuilderPackConfig.FixturePaths()..., + )..., + ) - output, err := createBuilderPack.Run( - "builder", "create", "some-builder:build", - "--config", builderConfigPath, - ) + value, err := suiteManager.RunTaskOnceString(key, func() (string, error) { + return createBuilderWithExtensions( + t, + assert, + createBuilderPack, + lifecycle, + buildpackManager, + runImageMirror, + ) + }) + assert.Nil(err) - assert.NotNil(err) - assertOutput := assertions.NewOutputAssertionManager(t, output) - assertOutput.ReportsInvalidBuilderToml() - }) - }) + // register task to be run to 'clean up' a task + suiteManager.RegisterCleanUp("clean-"+key, func() error { + imageManager.CleanupImages(value) + return nil + }) + builderName = value + }) - when("build", func() { - var repo, repoName string + it("creates builder", func() { + if imageManager.HostOS() != "windows" { + // Linux containers (including Linux containers on Windows) + extSimpleLayersDiffID := "sha256:d24758b8b75b13292746fe7a06666f28a9499da31826a60afe6ee6b8cba29b73" + extReadEnvDiffID := "sha256:43072b16e96564a4dd6bd2e74c55c3c94af78cf99d869cab1e62c873e1fa6780" + bpSimpleLayersDiffID := "sha256:ade9da86859fa4ea50a513757f9b242bf1038667abf92dad3d018974a17f0ea7" + bpReadEnvDiffID := "sha256:db0797077ba8deff7054ab5578133b8f0206b6393de34b5bfd795cf50f6afdbd" + // extensions + assertImage.HasLabelContaining(builderName, "io.buildpacks.extension.layers", `{"read/env":{"read-env-version":{"api":"0.9","layerDiffID":"`+extReadEnvDiffID+`","name":"Read Env Extension"}},"simple/layers":{"simple-layers-version":{"api":"0.7","layerDiffID":"`+extSimpleLayersDiffID+`","name":"Simple Layers Extension"}}}`) + assertImage.HasLabelContaining(builderName, "io.buildpacks.buildpack.order-extensions", `[{"group":[{"id":"read/env","version":"read-env-version"},{"id":"simple/layers","version":"simple-layers-version"}]}]`) + // buildpacks + assertImage.HasLabelContaining(builderName, "io.buildpacks.buildpack.layers", `{"read/env":{"read-env-version":{"api":"0.7","stacks":[{"id":"pack.test.stack"}],"layerDiffID":"`+bpReadEnvDiffID+`","name":"Read Env Buildpack"}},"simple/layers":{"simple-layers-version":{"api":"0.7","stacks":[{"id":"pack.test.stack"}],"layerDiffID":"`+bpSimpleLayersDiffID+`","name":"Simple Layers Buildpack"}}}`) + assertImage.HasLabelContaining(builderName, "io.buildpacks.buildpack.order", `[{"group":[{"id":"read/env","version":"read-env-version","optional":true},{"id":"simple/layers","version":"simple-layers-version","optional":true}]}]`) + } + }) - it.Before(func() { - repo = "some-org/" + h.RandString(10) - repoName = registryConfig.RepoName(repo) - pack.JustRunSuccessfully("config", "lifecycle-image", lifecycle.Image()) + when("build", func() { + var repo, repoName string + + it.Before(func() { + h.SkipIf(t, imageManager.HostOS() == "windows", "") + + repo = "some-org/" + h.RandString(10) + repoName = registryConfig.RepoName(repo) + pack.JustRunSuccessfully("config", "lifecycle-image", lifecycle.Image()) + }) + + it.After(func() { + h.SkipIf(t, imageManager.HostOS() == "windows", "") + + imageManager.CleanupImages(repoName) + ref, err := name.ParseReference(repoName, name.WeakValidation) + assert.Nil(err) + cacheImage := cache.NewImageCache(ref, dockerCli) + logger := logging.NewSimpleLogger(&bytes.Buffer{}) + buildCacheVolume, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "build", dockerCli, logger) + launchCacheVolume, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "launch", dockerCli, logger) + cacheImage.Clear(context.TODO()) + buildCacheVolume.Clear(context.TODO()) + launchCacheVolume.Clear(context.TODO()) + }) + + when("there are build image extensions", func() { + it("uses the 5 phases, and runs the extender (build)", func() { + origLifecycle := lifecycle.Image() + + output := pack.RunSuccessfully( + "build", repoName, + "-p", filepath.Join("testdata", "mock_app"), + "--network", "host", // export target is the daemon, but we need to be able to reach the registry where the builder image is saved + "-B", builderName, + ) + + assertions.NewOutputAssertionManager(t, output).ReportsSuccessfulImageBuild(repoName) + + assertOutput := assertions.NewLifecycleOutputAssertionManager(t, output) + assertOutput.IncludesTagOrEphemeralLifecycle(origLifecycle) + assertOutput.IncludesSeparatePhasesWithBuildExtension() + + t.Log("inspecting image") + inspectCmd := "inspect" + if !pack.Supports("inspect") { + inspectCmd = "inspect-image" + } + + output = pack.RunSuccessfully(inspectCmd, repoName) + }) + }) + + when("there are run image extensions", func() { + when("switching the run image", func() { + it.Before(func() { + h.SkipIf(t, !pack.SupportsFeature(invoke.RunImageExtensions), "") + h.SkipIf(t, !lifecycle.SupportsFeature(config.RunImageExtensions), "") + }) + + it("uses the 5 phases, and tries to pull the new run image by name before restore, and by identifier after restore", func() { + output, _ := pack.Run( + "build", repoName, + "-p", filepath.Join("testdata", "mock_app"), + "--network", "host", + "-B", builderName, + "--env", "EXT_RUN_SWITCH=1", + ) + h.AssertContains(t, output, "Pulling image 'busybox:latest'") + }) + }) + + when("extending the run image", func() { + it.Before(func() { + h.SkipIf(t, !pack.SupportsFeature(invoke.RunImageExtensions), "") + h.SkipIf(t, !lifecycle.SupportsFeature(config.RunImageExtensions), "") + }) + + it("uses the 5 phases, and runs the extender (run)", func() { + origLifecycle := lifecycle.Image() + + output := pack.RunSuccessfully( + "build", repoName, + "-p", filepath.Join("testdata", "mock_app"), + "--network", "host", // export target is the daemon, but we need to be able to reach the registry where the builder image and run image are saved + "-B", builderName, + "--env", "EXT_RUN=1", + ) + + assertions.NewOutputAssertionManager(t, output).ReportsSuccessfulImageBuild(repoName) + + assertOutput := assertions.NewLifecycleOutputAssertionManager(t, output) + + assertOutput.IncludesTagOrEphemeralLifecycle(origLifecycle) + assertOutput.IncludesSeparatePhasesWithRunExtension() + + t.Log("inspecting image") + inspectCmd := "inspect" + if !pack.Supports("inspect") { + inspectCmd = "inspect-image" + } + + output = pack.RunSuccessfully(inspectCmd, repoName) + }) + }) + }) + }) + }) + }) + + when("builder.toml is invalid", func() { + it("displays an error", func() { + builderConfigPath := createBuilderPack.FixtureManager().FixtureLocation("invalid_builder.toml") + + output, err := createBuilderPack.Run( + "builder", "create", "some-builder:build", + "--config", builderConfigPath, + ) + + assert.NotNil(err) + assertOutput := assertions.NewOutputAssertionManager(t, output) + assertOutput.ReportsInvalidBuilderToml() + }) + }) + + when("system buildpacks", func() { + var ( + builderWithSystemBP string + builderWithFailingSystemBP string + builderWithOptionalFailingSystemBP string + regularBuilder string + ) + + it.Before(func() { + // Create builder with system buildpacks + builderWithSystemBP = fmt.Sprintf("pack.local/builder-with-system-bps/%s", h.RandString(10)) + h.SkipIf(t, !createBuilderPack.Supports("builder create"), "pack builder create not supported") + + createBuilderPack.JustRunSuccessfully( + "builder", "create", builderWithSystemBP, + "--config", createBuilderPack.FixtureManager().FixtureLocation("builder_with_system_buildpacks.toml"), + ) + + // Create builder with failing system buildpack + builderWithFailingSystemBP = fmt.Sprintf("pack.local/builder-fail-system/%s", h.RandString(10)) + createBuilderPack.JustRunSuccessfully( + "builder", "create", builderWithFailingSystemBP, + "--config", createBuilderPack.FixtureManager().FixtureLocation("builder_with_failing_system_buildpack.toml"), + ) + + // Create builder with optional failing system buildpack + builderWithOptionalFailingSystemBP = fmt.Sprintf("pack.local/builder-optional-fail/%s", h.RandString(10)) + createBuilderPack.JustRunSuccessfully( + "builder", "create", builderWithOptionalFailingSystemBP, + "--config", createBuilderPack.FixtureManager().FixtureLocation("builder_with_optional_failing_system_buildpack.toml"), + ) + + // Create regular builder for comparison + regularBuilder = fmt.Sprintf("pack.local/regular-builder/%s", h.RandString(10)) + createBuilderPack.JustRunSuccessfully( + "builder", "create", regularBuilder, + "--config", createBuilderPack.FixtureManager().FixtureLocation("builder.toml"), + ) + }) + + it.After(func() { + imageManager.CleanupImages(builderWithSystemBP) + imageManager.CleanupImages(builderWithFailingSystemBP) + imageManager.CleanupImages(builderWithOptionalFailingSystemBP) + imageManager.CleanupImages(regularBuilder) + }) + + when("inspecting builder with system buildpacks", func() { + it("shows system buildpacks in builder info", func() { + output := createBuilderPack.RunSuccessfully("builder", "inspect", builderWithSystemBP) + + // Verify system buildpacks are shown in the output + h.AssertContains(t, output, "system/pre") + h.AssertContains(t, output, "system/post") + }) + }) + + when("building with system buildpacks", func() { + var ( + appImage string + appPath string + ) + + it.Before(func() { + appPath = filepath.Join("testdata", "mock_app") + appImage = fmt.Sprintf("pack.local/app/%s", h.RandString(10)) + }) + + it.After(func() { + imageManager.CleanupImages(appImage) + }) + + when("system buildpacks are enabled (default)", func() { + it("runs pre-system buildpacks before regular buildpacks", func() { + output := pack.RunSuccessfully( + "build", appImage, + "--path", appPath, + "--builder", builderWithSystemBP, + "--no-color", + ) + + // Verify pre-system buildpack ran before the main buildpack + h.AssertContains(t, output, "DETECT: System Pre buildpack") + h.AssertContains(t, output, "BUILD: System Pre buildpack") + h.AssertContains(t, output, "Simple Layers Buildpack") + + // Verify order: system pre should come before main buildpack + systemPreIndex := strings.Index(output, "BUILD: System Pre buildpack") + mainBuildpackIndex := strings.Index(output, "Simple Layers Buildpack") + if systemPreIndex == -1 || mainBuildpackIndex == -1 || systemPreIndex >= mainBuildpackIndex { + t.Fatalf("Expected system pre buildpack to run before main buildpack") + } + }) + + it("runs post-system buildpacks after regular buildpacks", func() { + output := pack.RunSuccessfully( + "build", appImage, + "--path", appPath, + "--builder", builderWithSystemBP, + "--no-color", + ) + + // Verify post-system buildpack ran after the main buildpack + h.AssertContains(t, output, "BUILD: System Post buildpack") + + // Verify order: system post should come after main buildpack + mainBuildpackIndex := strings.Index(output, "Simple Layers Buildpack") + systemPostIndex := strings.Index(output, "BUILD: System Post buildpack") + if mainBuildpackIndex == -1 || systemPostIndex == -1 || mainBuildpackIndex >= systemPostIndex { + t.Fatalf("Expected system post buildpack to run after main buildpack") + } + }) + + it("builds successfully with system buildpacks", func() { + output := pack.RunSuccessfully( + "build", appImage, + "--path", appPath, + "--builder", builderWithSystemBP, + "--verbose", + ) + + // Verify system buildpack contributed during build + h.AssertContains(t, output, "BUILD: System Pre buildpack") + h.AssertContains(t, output, "BUILD: System Post buildpack") + + // Verify the image was successfully built + h.AssertContains(t, output, "Successfully built image") + assertImage.ExistsLocally(appImage) + }) + }) + + when("--disable-system-buildpacks flag is used", func() { + it("does not run system buildpacks", func() { + output := pack.RunSuccessfully( + "build", appImage, + "--path", appPath, + "--builder", builderWithSystemBP, + "--disable-system-buildpacks", + "--no-color", + ) + + // Verify system buildpacks did not run + h.AssertNotContains(t, output, "DETECT: System Pre buildpack") + h.AssertNotContains(t, output, "BUILD: System Pre buildpack") + h.AssertNotContains(t, output, "BUILD: System Post buildpack") + + // Verify main buildpack still runs + h.AssertContains(t, output, "Simple Layers Buildpack") + + // Verify the image was successfully built + h.AssertContains(t, output, "Successfully built image") + assertImage.ExistsLocally(appImage) + }) + }) + + when("builder has no system buildpacks", func() { + it("builds normally without system buildpacks", func() { + output := pack.RunSuccessfully( + "build", appImage, + "--path", appPath, + "--builder", regularBuilder, + "--no-color", + ) + + // Verify no system buildpacks ran + h.AssertNotContains(t, output, "System Pre buildpack") + h.AssertNotContains(t, output, "System Post buildpack") + + // Verify main buildpack runs + h.AssertContains(t, output, "Simple Layers Buildpack") + + // Verify the image was successfully built + h.AssertContains(t, output, "Successfully built image") + assertImage.ExistsLocally(appImage) + }) + }) + + when("required system buildpack fails detection", func() { + it("fails the build", func() { + output, err := pack.Run( + "build", appImage, + "--path", appPath, + "--builder", builderWithFailingSystemBP, + "--no-color", + ) + + // Build should fail + h.AssertNotNil(t, err) + h.AssertContains(t, output, "DETECT: System Fail Detect buildpack (will fail)") + h.AssertContains(t, output, "No buildpack groups passed detection") + }) + }) + + when("optional system buildpack fails detection", func() { + it("continues with the build", func() { + output := pack.RunSuccessfully( + "build", appImage, + "--path", appPath, + "--builder", builderWithOptionalFailingSystemBP, + "--no-color", + ) + + // Build should succeed despite optional system buildpack failing + h.AssertContains(t, output, "DETECT: System Fail Detect buildpack (will fail)") + h.AssertContains(t, output, "DETECT: System Pre buildpack") + h.AssertContains(t, output, "BUILD: System Pre buildpack") + h.AssertContains(t, output, "Simple Layers Buildpack") + + // Verify the failed optional buildpack didn't run build + h.AssertNotContains(t, output, "BUILD: System Fail Detect buildpack") + + // Verify the image was successfully built + h.AssertContains(t, output, "Successfully built image") + assertImage.ExistsLocally(appImage) + }) + }) + }) + }) + + when("build", func() { + var repo, repoName string + + it.Before(func() { + repo = "some-org/" + h.RandString(10) + repoName = registryConfig.RepoName(repo) + pack.JustRunSuccessfully("config", "lifecycle-image", lifecycle.Image()) }) it.After(func() { @@ -780,8 +1505,9 @@ func testAcceptance( ref, err := name.ParseReference(repoName, name.WeakValidation) assert.Nil(err) cacheImage := cache.NewImageCache(ref, dockerCli) - buildCacheVolume := cache.NewVolumeCache(ref, "build", dockerCli) - launchCacheVolume := cache.NewVolumeCache(ref, "launch", dockerCli) + logger := logging.NewSimpleLogger(&bytes.Buffer{}) + buildCacheVolume, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "build", dockerCli, logger) + launchCacheVolume, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "launch", dockerCli, logger) cacheImage.Clear(context.TODO()) buildCacheVolume.Clear(context.TODO()) launchCacheVolume.Clear(context.TODO()) @@ -802,8 +1528,13 @@ func testAcceptance( assert.Nil(err) suiteManager.RegisterCleanUp("remove-lifecycle-"+lifecycle.Image(), func() error { - img := imageManager.GetImageID(lifecycle.Image()) - imageManager.CleanupImages(img) + // Try to get image ID, but ignore errors if image doesn't exist + // (e.g., if it was pulled by digest instead of tag) + inspect, err := imageManager.InspectLocal(lifecycle.Image()) + if err != nil { + return nil + } + imageManager.CleanupImages(inspect.ID) return nil }) }) @@ -814,6 +1545,8 @@ func testAcceptance( when("daemon", func() { it("uses the 5 phases", func() { + origLifecycle := lifecycle.Image() + output := pack.RunSuccessfully( "build", repoName, "-p", filepath.Join("testdata", "mock_app"), @@ -823,13 +1556,15 @@ func testAcceptance( assertions.NewOutputAssertionManager(t, output).ReportsSuccessfulImageBuild(repoName) assertOutput := assertions.NewLifecycleOutputAssertionManager(t, output) - assertOutput.IncludesLifecycleImageTag(lifecycle.Image()) + assertOutput.IncludesTagOrEphemeralLifecycle(origLifecycle) assertOutput.IncludesSeparatePhases() }) }) when("--publish", func() { it("uses the 5 phases", func() { + origLifecycle := lifecycle.Image() + buildArgs := []string{ repoName, "-p", filepath.Join("testdata", "mock_app"), @@ -845,7 +1580,7 @@ func testAcceptance( assertions.NewOutputAssertionManager(t, output).ReportsSuccessfulImageBuild(repoName) assertOutput := assertions.NewLifecycleOutputAssertionManager(t, output) - assertOutput.IncludesLifecycleImageTag(lifecycle.Image()) + assertOutput.IncludesTagOrEphemeralLifecycle(origLifecycle) assertOutput.IncludesSeparatePhases() }) }) @@ -873,7 +1608,7 @@ func testAcceptance( }) }) - when("default builder is set", func() { + when("builder is trusted (and set as default)", func() { it.Before(func() { pack.RunSuccessfully("config", "default-builder", builderName) pack.JustRunSuccessfully("config", "trusted-builders", "add", builderName) @@ -900,10 +1635,10 @@ func testAcceptance( assertImage.HasBaseImage(repoName, runImage) t.Log("sets the run image metadata") - assertImage.HasLabelWithData(repoName, "io.buildpacks.lifecycle.metadata", fmt.Sprintf(`"stack":{"runImage":{"image":"%s","mirrors":["%s"]}}}`, runImage, runImageMirror)) + assertImage.HasLabelContaining(repoName, "io.buildpacks.lifecycle.metadata", fmt.Sprintf(`"image":"pack-test/run","mirrors":["%s"]`, runImageMirror)) t.Log("sets the source metadata") - assertImage.HasLabelWithData(repoName, "io.buildpacks.project.metadata", (`{"source":{"type":"project","version":{"declared":"1.0.2"},"metadata":{"url":"https://github.com/buildpacks/pack"}}}`)) + assertImage.HasLabelContaining(repoName, "io.buildpacks.project.metadata", (`{"source":{"type":"project","version":{"declared":"1.0.2"},"metadata":{"url":"https://github.com/buildpacks/pack"}}}`)) t.Log("registry is empty") assertImage.NotExistsInRegistry(repo) @@ -922,6 +1657,12 @@ func testAcceptance( assertOutput = assertions.NewOutputAssertionManager(t, output) assertOutput.ReportsSuccessfulImageBuild(repoName) assertOutput.ReportsSelectingRunImageMirrorFromLocalConfig(localRunImageMirror) + if pack.SupportsFeature(invoke.FixesRunImageMetadata) { + t.Log(fmt.Sprintf("run-image mirror %s was NOT added into 'io.buildpacks.lifecycle.metadata' label", localRunImageMirror)) + assertImage.HasLabelNotContaining(repoName, "io.buildpacks.lifecycle.metadata", fmt.Sprintf(`"image":"%s"`, localRunImageMirror)) + t.Log(fmt.Sprintf("run-image %s was added into 'io.buildpacks.lifecycle.metadata' label", runImage)) + assertImage.HasLabelContaining(repoName, "io.buildpacks.lifecycle.metadata", fmt.Sprintf(`"image":"%s"`, runImage)) + } cachedLaunchLayer := "simple/layers:cached-launch-layer" assertLifecycleOutput := assertions.NewLifecycleOutputAssertionManager(t, output) @@ -955,26 +1696,24 @@ func testAcceptance( helloCommand string helloArgs []string helloArgsPrefix string + imageWorkdir string ) if imageManager.HostOS() == "windows" { webCommand = ".\\run" helloCommand = "cmd" helloArgs = []string{"/c", "echo hello world"} helloArgsPrefix = " " + imageWorkdir = "c:\\workspace" } else { webCommand = "./run" helloCommand = "echo" helloArgs = []string{"hello", "world"} helloArgsPrefix = "" + imageWorkdir = "/workspace" } formats := []compareFormat{ - { - extension: "txt", - compareFunc: assert.TrimmedEq, - outputArg: "human-readable", - }, { extension: "json", compareFunc: assert.EqualJSON, @@ -995,7 +1734,6 @@ func testAcceptance( t.Logf("inspecting image %s format", format.outputArg) output = pack.RunSuccessfully(inspectCmd, repoName, "--output", format.outputArg) - expectedOutput := pack.FixtureManager().TemplateFixture( fmt.Sprintf("inspect_image_local_output.%s", format.extension), map[string]interface{}{ @@ -1008,6 +1746,8 @@ func testAcceptance( "hello_command": helloCommand, "hello_args": helloArgs, "hello_args_prefix": helloArgsPrefix, + "image_workdir": imageWorkdir, + "rebasable": true, }, ) @@ -1053,10 +1793,10 @@ func testAcceptance( h.SkipIf(t, imageManager.HostOS() == "windows", "temporarily disabled on WCOW due to CI flakiness") var err error - tmpDir, err = ioutil.TempDir("", "archive-buildpacks-") + tmpDir, err = os.MkdirTemp("", "archive-buildpacks-") assert.Nil(err) - buildpackManager.PrepareBuildpacks(tmpDir, buildpacks.InternetCapable) + buildpackManager.PrepareBuildModules(tmpDir, buildpacks.BpInternetCapable) }) it.After(func() { @@ -1069,7 +1809,7 @@ func testAcceptance( output := pack.RunSuccessfully( "build", repoName, "-p", filepath.Join("testdata", "mock_app"), - "--buildpack", buildpacks.InternetCapable.FullPathIn(tmpDir), + "--buildpack", buildpacks.BpInternetCapable.FullPathIn(tmpDir), ) assertBuildpackOutput := assertions.NewTestBuildpackOutputAssertionManager(t, output) @@ -1082,7 +1822,7 @@ func testAcceptance( output := pack.RunSuccessfully( "build", repoName, "-p", filepath.Join("testdata", "mock_app"), - "--buildpack", buildpacks.InternetCapable.FullPathIn(tmpDir), + "--buildpack", buildpacks.BpInternetCapable.FullPathIn(tmpDir), "--network", "default", ) @@ -1096,7 +1836,7 @@ func testAcceptance( output := pack.RunSuccessfully( "build", repoName, "-p", filepath.Join("testdata", "mock_app"), - "--buildpack", buildpacks.InternetCapable.FullPathIn(tmpDir), + "--buildpack", buildpacks.BpInternetCapable.FullPathIn(tmpDir), "--network", "none", ) @@ -1116,6 +1856,7 @@ func testAcceptance( it.Before(func() { h.SkipIf(t, os.Getenv("DOCKER_HOST") != "", "cannot mount volume when DOCKER_HOST is set") + h.SkipIf(t, imageManager.HostOS() == "windows", "These tests are broken on Windows Containers on Windows when not using the creator; see https://github.com/buildpacks/pack/issues/2147") if imageManager.HostOS() == "windows" { volumeRoot = `c:\` @@ -1123,12 +1864,12 @@ func testAcceptance( } var err error - tmpDir, err = ioutil.TempDir("", "volume-buildpack-tests-") + tmpDir, err = os.MkdirTemp("", "volume-buildpack-tests-") assert.Nil(err) - buildpackManager.PrepareBuildpacks(tmpDir, buildpacks.ReadVolume, buildpacks.ReadWriteVolume) + buildpackManager.PrepareBuildModules(tmpDir, buildpacks.BpReadVolume, buildpacks.BpReadWriteVolume) - tmpVolumeSrc, err = ioutil.TempDir("", "volume-mount-source") + tmpVolumeSrc, err = os.MkdirTemp("", "volume-mount-source") assert.Nil(err) assert.Succeeds(os.Chmod(tmpVolumeSrc, 0777)) // Override umask @@ -1137,7 +1878,7 @@ func testAcceptance( tmpVolumeSrc, err = filepath.EvalSymlinks(tmpVolumeSrc) assert.Nil(err) - err = ioutil.WriteFile(filepath.Join(tmpVolumeSrc, "some-file"), []byte("some-content\n"), 0777) + err = os.WriteFile(filepath.Join(tmpVolumeSrc, "some-file"), []byte("some-content\n"), 0777) assert.Nil(err) }) @@ -1154,7 +1895,7 @@ func testAcceptance( "build", repoName, "-p", filepath.Join("testdata", "mock_app"), "--volume", fmt.Sprintf("%s:%s", tmpVolumeSrc, volumeDest), - "--buildpack", buildpacks.ReadVolume.FullPathIn(tmpDir), + "--buildpack", buildpacks.BpReadVolume.FullPathIn(tmpDir), "--env", "TEST_FILE_PATH="+testFilePath, ) @@ -1171,7 +1912,7 @@ func testAcceptance( "build", repoName, "-p", filepath.Join("testdata", "mock_app"), "--volume", fmt.Sprintf("%s:%s", tmpVolumeSrc, volumeDest), - "--buildpack", buildpacks.ReadWriteVolume.FullPathIn(tmpDir), + "--buildpack", buildpacks.BpReadWriteVolume.FullPathIn(tmpDir), "--env", "DETECT_TEST_FILE_PATH="+testDetectFilePath, "--env", "BUILD_TEST_FILE_PATH="+testBuildFilePath, ) @@ -1191,7 +1932,7 @@ func testAcceptance( "build", repoName, "-p", filepath.Join("testdata", "mock_app"), "--volume", fmt.Sprintf("%s:%s:rw", tmpVolumeSrc, volumeDest), - "--buildpack", buildpacks.ReadWriteVolume.FullPathIn(tmpDir), + "--buildpack", buildpacks.BpReadWriteVolume.FullPathIn(tmpDir), "--env", "DETECT_TEST_FILE_PATH="+testDetectFilePath, "--env", "BUILD_TEST_FILE_PATH="+testBuildFilePath, ) @@ -1248,7 +1989,7 @@ func testAcceptance( it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "archive-buildpack-tests-") + tmpDir, err = os.MkdirTemp("", "archive-buildpack-tests-") assert.Nil(err) }) @@ -1257,12 +1998,12 @@ func testAcceptance( }) it("adds the buildpack to the builder and runs it", func() { - buildpackManager.PrepareBuildpacks(tmpDir, buildpacks.ArchiveNotInBuilder) + buildpackManager.PrepareBuildModules(tmpDir, buildpacks.BpArchiveNotInBuilder) output := pack.RunSuccessfully( "build", repoName, "-p", filepath.Join("testdata", "mock_app"), - "--buildpack", buildpacks.ArchiveNotInBuilder.FullPathIn(tmpDir), + "--buildpack", buildpacks.BpArchiveNotInBuilder.FullPathIn(tmpDir), ) assertOutput := assertions.NewOutputAssertionManager(t, output) @@ -1279,7 +2020,7 @@ func testAcceptance( it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "folder-buildpack-tests-") + tmpDir, err = os.MkdirTemp("", "folder-buildpack-tests-") assert.Nil(err) }) @@ -1290,12 +2031,12 @@ func testAcceptance( it("adds the buildpacks to the builder and runs it", func() { h.SkipIf(t, runtime.GOOS == "windows", "buildpack directories not supported on windows") - buildpackManager.PrepareBuildpacks(tmpDir, buildpacks.FolderNotInBuilder) + buildpackManager.PrepareBuildModules(tmpDir, buildpacks.BpFolderNotInBuilder) output := pack.RunSuccessfully( "build", repoName, "-p", filepath.Join("testdata", "mock_app"), - "--buildpack", buildpacks.FolderNotInBuilder.FullPathIn(tmpDir), + "--buildpack", buildpacks.BpFolderNotInBuilder.FullPathIn(tmpDir), ) assertOutput := assertions.NewOutputAssertionManager(t, output) @@ -1307,6 +2048,43 @@ func testAcceptance( }) }) + when("the argument is meta-buildpack directory", func() { + var tmpDir string + + it.Before(func() { + var err error + tmpDir, err = os.MkdirTemp("", "folder-buildpack-tests-") + assert.Nil(err) + }) + + it.After(func() { + _ = os.RemoveAll(tmpDir) + }) + + it("adds the buildpacks to the builder and runs it", func() { + h.SkipIf(t, runtime.GOOS == "windows", "buildpack directories not supported on windows") + // This only works if pack is new, therefore skip if pack is old + h.SkipIf(t, !pack.SupportsFeature(invoke.MetaBuildpackFolder), "") + + buildpackManager.PrepareBuildModules(tmpDir, buildpacks.MetaBpFolder) + buildpackManager.PrepareBuildModules(tmpDir, buildpacks.MetaBpDependency) + + output := pack.RunSuccessfully( + "build", repoName, + "-p", filepath.Join("testdata", "mock_app"), + "--buildpack", buildpacks.MetaBpFolder.FullPathIn(tmpDir), + ) + + assertOutput := assertions.NewOutputAssertionManager(t, output) + assertOutput.ReportsAddingBuildpack("local/meta-bp", "local-meta-bp-version") + assertOutput.ReportsAddingBuildpack("local/meta-bp-dep", "local-meta-bp-version") + assertOutput.ReportsSuccessfulImageBuild(repoName) + + assertBuildpackOutput := assertions.NewTestBuildpackOutputAssertionManager(t, output) + assertBuildpackOutput.ReportsBuildStep("Local Meta-Buildpack Dependency") + }) + }) + when("the argument is a buildpackage image", func() { var ( tmpDir string @@ -1328,12 +2106,12 @@ func testAcceptance( packageImageName, packageTomlPath, buildpacks.WithRequiredBuildpacks( - buildpacks.FolderSimpleLayersParent, - buildpacks.FolderSimpleLayers, + buildpacks.BpFolderSimpleLayersParent, + buildpacks.BpFolderSimpleLayers, ), ) - buildpackManager.PrepareBuildpacks(tmpDir, packageImage) + buildpackManager.PrepareBuildModules(tmpDir, packageImage) output := pack.RunSuccessfully( "build", repoName, @@ -1352,6 +2130,45 @@ func testAcceptance( assertBuildpackOutput := assertions.NewTestBuildpackOutputAssertionManager(t, output) assertBuildpackOutput.ReportsBuildStep("Simple Layers Buildpack") }) + + when("buildpackage is in a registry", func() { + it("adds the buildpacks to the builder and runs them", func() { + h.SkipIf(t, !pack.SupportsFeature(invoke.PlatformRetries), "") + packageImageName = registryConfig.RepoName("buildpack-" + h.RandString(8)) + + packageTomlPath := generatePackageTomlWithOS(t, assert, pack, tmpDir, "package_for_build_cmd.toml", imageManager.HostOS()) + packageImage := buildpacks.NewPackageImage( + t, + pack, + packageImageName, + packageTomlPath, + buildpacks.WithRequiredBuildpacks( + buildpacks.BpFolderSimpleLayersParent, + buildpacks.BpFolderSimpleLayers, + ), + buildpacks.WithPublish(), + ) + + buildpackManager.PrepareBuildModules(tmpDir, packageImage) + + output := pack.RunSuccessfully( + "build", repoName, + "-p", filepath.Join("testdata", "mock_app"), + "--buildpack", packageImageName, + ) + + assertOutput := assertions.NewOutputAssertionManager(t, output) + assertOutput.ReportsAddingBuildpack( + "simple/layers/parent", + "simple-layers-parent-version", + ) + assertOutput.ReportsAddingBuildpack("simple/layers", "simple-layers-version") + assertOutput.ReportsSuccessfulImageBuild(repoName) + + assertBuildpackOutput := assertions.NewTestBuildpackOutputAssertionManager(t, output) + assertBuildpackOutput.ReportsBuildStep("Simple Layers Buildpack") + }) + }) }) when("the argument is a buildpackage file", func() { @@ -1359,7 +2176,7 @@ func testAcceptance( it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "package-file") + tmpDir, err = os.MkdirTemp("", "package-file") assert.Nil(err) }) @@ -1380,12 +2197,12 @@ func testAcceptance( packageFileLocation, packageTomlPath, buildpacks.WithRequiredBuildpacks( - buildpacks.FolderSimpleLayersParent, - buildpacks.FolderSimpleLayers, + buildpacks.BpFolderSimpleLayersParent, + buildpacks.BpFolderSimpleLayers, ), ) - buildpackManager.PrepareBuildpacks(tmpDir, packageFile) + buildpackManager.PrepareBuildModules(tmpDir, packageFile) output := pack.RunSuccessfully( "build", repoName, @@ -1410,24 +2227,51 @@ func testAcceptance( var otherStackBuilderTgz string it.Before(func() { + // The Platform API is new if pack is new AND the lifecycle is new + // Therefore skip if pack is old OR the lifecycle is old + h.SkipIf(t, + pack.SupportsFeature(invoke.StackValidation) || + api.MustParse(lifecycle.LatestPlatformAPIVersion()).LessThan("0.12"), "") otherStackBuilderTgz = h.CreateTGZ(t, filepath.Join(bpDir, "other-stack-buildpack"), "./", 0755) }) it.After(func() { + h.SkipIf(t, + pack.SupportsFeature(invoke.StackValidation) || + api.MustParse(lifecycle.LatestPlatformAPIVersion()).LessThan("0.12"), "") assert.Succeeds(os.Remove(otherStackBuilderTgz)) }) - it("errors", func() { - output, err := pack.Run( + it("succeeds", func() { + _, err := pack.Run( "build", repoName, "-p", filepath.Join("testdata", "mock_app"), "--buildpack", otherStackBuilderTgz, ) + assert.Nil(err) + }) - assert.NotNil(err) - assert.Contains(output, "other/stack/bp") - assert.Contains(output, "other-stack-version") - assert.Contains(output, "does not support stack 'pack.test.stack'") + when("platform API < 0.12", func() { + it.Before(func() { + // The Platform API is old if pack is old OR the lifecycle is old + // Therefore skip if pack is new AND the lifecycle is new + h.SkipIf(t, + !pack.SupportsFeature(invoke.StackValidation) && + api.MustParse(lifecycle.LatestPlatformAPIVersion()).AtLeast("0.12"), "") + }) + + it("errors", func() { + output, err := pack.Run( + "build", repoName, + "-p", filepath.Join("testdata", "mock_app"), + "--buildpack", otherStackBuilderTgz, + ) + + assert.NotNil(err) + assert.Contains(output, "other/stack/bp") + assert.Contains(output, "other-stack-version") + assert.Contains(output, "does not support stack 'pack.test.stack'") + }) }) }) }) @@ -1436,7 +2280,7 @@ func testAcceptance( var envPath string it.Before(func() { - envfile, err := ioutil.TempFile("", "envfile") + envfile, err := os.CreateTemp("", "envfile") assert.Nil(err) defer envfile.Close() @@ -1542,6 +2386,10 @@ func testAcceptance( t.Log("uses the run image as the base image") assertImage.HasBaseImage(repoName, runImageName) + if pack.SupportsFeature(invoke.FixesRunImageMetadata) { + t.Log(fmt.Sprintf("run-image %s was added into 'io.buildpacks.lifecycle.metadata' label", runImageName)) + assertImage.HasLabelContaining(repoName, "io.buildpacks.lifecycle.metadata", fmt.Sprintf(`"image":"%s"`, runImageName)) + } }) }) @@ -1559,20 +2407,45 @@ func testAcceptance( imageManager.CleanupImages(runImageName) }) - it("fails with a message", func() { - output, err := pack.Run( - "build", repoName, - "-p", filepath.Join("testdata", "mock_app"), - "--run-image", runImageName, - ) - assert.NotNil(err) + when("should validate stack", func() { + it.Before(func() { + h.SkipIf(t, pack.SupportsFeature(invoke.StackWarning), "stack is validated in prior versions") + }) + it("fails with a message", func() { + + output, err := pack.Run( + "build", repoName, + "-p", filepath.Join("testdata", "mock_app"), + "--run-image", runImageName, + ) + assert.NotNil(err) + + assertOutput := assertions.NewOutputAssertionManager(t, output) + assertOutput.ReportsRunImageStackNotMatchingBuilder( + "other.stack.id", + "pack.test.stack", + ) + }) + }) - assertOutput := assertions.NewOutputAssertionManager(t, output) - assertOutput.ReportsRunImageStackNotMatchingBuilder( - "other.stack.id", - "pack.test.stack", - ) + when("should not validate stack", func() { + it.Before(func() { + h.SkipIf(t, !pack.SupportsFeature(invoke.StackWarning), "stack is no longer validated") + }) + it("succeeds with a warning", func() { + + output, err := pack.Run( + "build", repoName, + "-p", filepath.Join("testdata", "mock_app"), + "--run-image", runImageName, + ) + assert.Nil(err) + + assertOutput := assertions.NewOutputAssertionManager(t, output) + assertOutput.ReportsDeprecatedUseOfStack() + }) }) + }) }) @@ -1604,24 +2477,22 @@ func testAcceptance( helloCommand string helloArgs []string helloArgsPrefix string + imageWorkdir string ) if imageManager.HostOS() == "windows" { webCommand = ".\\run" helloCommand = "cmd" helloArgs = []string{"/c", "echo hello world"} helloArgsPrefix = " " + imageWorkdir = "c:\\workspace" } else { webCommand = "./run" helloCommand = "echo" helloArgs = []string{"hello", "world"} helloArgsPrefix = "" + imageWorkdir = "/workspace" } formats := []compareFormat{ - { - extension: "txt", - compareFunc: assert.TrimmedEq, - outputArg: "human-readable", - }, { extension: "json", compareFunc: assert.EqualJSON, @@ -1655,6 +2526,8 @@ func testAcceptance( "hello_command": helloCommand, "hello_args": helloArgs, "hello_args_prefix": helloArgsPrefix, + "image_workdir": imageWorkdir, + "rebasable": true, }, ) @@ -1759,6 +2632,74 @@ func testAcceptance( }) }) + when("--cache with options for build cache as image", func() { + var cacheImageName, cacheFlags string + it.Before(func() { + cacheImageName = fmt.Sprintf("%s-cache", repoName) + cacheFlags = fmt.Sprintf("type=build;format=image;name=%s", cacheImageName) + }) + + it("creates image and cache image on the registry", func() { + buildArgs := []string{ + repoName, + "-p", filepath.Join("testdata", "mock_app"), + "--publish", + "--cache", + cacheFlags, + } + if imageManager.HostOS() != "windows" { + buildArgs = append(buildArgs, "--network", "host") + } + + output := pack.RunSuccessfully("build", buildArgs...) + assertions.NewOutputAssertionManager(t, output).ReportsSuccessfulImageBuild(repoName) + + cacheImageRef, err := name.ParseReference(cacheImageName, name.WeakValidation) + assert.Nil(err) + + t.Log("checking that registry has contents") + assertImage.CanBePulledFromRegistry(repoName) + if imageManager.HostOS() == "windows" { + // Cache images are automatically Linux container images, and therefore can't be pulled + // and inspected correctly on WCOW systems + // https://github.com/buildpacks/lifecycle/issues/529 + imageManager.PullImage(cacheImageRef.Name(), registryConfig.RegistryAuth()) + } else { + assertImage.CanBePulledFromRegistry(cacheImageRef.Name()) + } + + defer imageManager.CleanupImages(cacheImageRef.Name()) + }) + }) + + when("--cache with options for build cache as bind", func() { + var bindCacheDir, cacheFlags string + it.Before(func() { + h.SkipIf(t, !pack.SupportsFeature(invoke.Cache), "") + cacheBindName := strings.ReplaceAll(strings.ReplaceAll(fmt.Sprintf("%s-bind", repoName), string(filepath.Separator), "-"), ":", "-") + var err error + bindCacheDir, err = os.MkdirTemp("", cacheBindName) + assert.Nil(err) + cacheFlags = fmt.Sprintf("type=build;format=bind;source=%s", bindCacheDir) + }) + + it("creates image and cache image on the registry", func() { + buildArgs := []string{ + repoName, + "-p", filepath.Join("testdata", "mock_app"), + "--cache", + cacheFlags, + } + + output := pack.RunSuccessfully("build", buildArgs...) + assertions.NewOutputAssertionManager(t, output).ReportsSuccessfulImageBuild(repoName) + + t.Log("checking that bind mount has cache contents") + assert.FileExists(fmt.Sprintf("%s/committed", bindCacheDir)) + defer os.RemoveAll(bindCacheDir) + }) + }) + when("ctrl+c", func() { it("stops the execution", func() { var buf = new(bytes.Buffer) @@ -1784,10 +2725,10 @@ func testAcceptance( h.SkipIf(t, runtime.GOOS == "windows", "buildpack directories not supported on windows") var err error - tempAppDir, err = ioutil.TempDir("", "descriptor-app") + tempAppDir, err = os.MkdirTemp("", "descriptor-app") assert.Nil(err) - tempWorkingDir, err = ioutil.TempDir("", "descriptor-app") + tempWorkingDir, err = os.MkdirTemp("", "descriptor-app") assert.Nil(err) origWorkingDir, err = os.Getwd() @@ -1806,14 +2747,14 @@ func testAcceptance( err = os.Mkdir(filepath.Join(tempAppDir, "media"), 0755) assert.Nil(err) - err = ioutil.WriteFile(filepath.Join(tempAppDir, "media", "mountain.jpg"), []byte("fake image bytes"), 0755) + err = os.WriteFile(filepath.Join(tempAppDir, "media", "mountain.jpg"), []byte("fake image bytes"), 0755) assert.Nil(err) - err = ioutil.WriteFile(filepath.Join(tempAppDir, "media", "person.png"), []byte("fake image bytes"), 0755) + err = os.WriteFile(filepath.Join(tempAppDir, "media", "person.png"), []byte("fake image bytes"), 0755) assert.Nil(err) - err = ioutil.WriteFile(filepath.Join(tempAppDir, "cookie.jar"), []byte("chocolate chip"), 0755) + err = os.WriteFile(filepath.Join(tempAppDir, "cookie.jar"), []byte("chocolate chip"), 0755) assert.Nil(err) - err = ioutil.WriteFile(filepath.Join(tempAppDir, "test.sh"), []byte("echo test"), 0755) + err = os.WriteFile(filepath.Join(tempAppDir, "test.sh"), []byte("echo test"), 0755) assert.Nil(err) projectToml := ` @@ -1828,7 +2769,7 @@ exclude = [ "*.sh", "media/person.png", "descriptor-buildpack" ] uri = "descriptor-buildpack" ` excludeDescriptorPath := filepath.Join(tempAppDir, "project.toml") - err = ioutil.WriteFile(excludeDescriptorPath, []byte(projectToml), 0755) + err = os.WriteFile(excludeDescriptorPath, []byte(projectToml), 0755) assert.Nil(err) // set working dir to be outside of the app we are building @@ -1860,7 +2801,7 @@ uri = "descriptor-buildpack" buildpackTgz = h.CreateTGZ(t, filepath.Join(bpDir, "descriptor-buildpack"), "./", 0755) var err error - tempAppDir, err = ioutil.TempDir("", "descriptor-app") + tempAppDir, err = os.MkdirTemp("", "descriptor-app") assert.Nil(err) // Create test directories and files: @@ -1880,32 +2821,32 @@ uri = "descriptor-buildpack" err = os.Mkdir(filepath.Join(tempAppDir, "secrets"), 0755) assert.Nil(err) - err = ioutil.WriteFile(filepath.Join(tempAppDir, "secrets", "api_keys.json"), []byte("{}"), 0755) + err = os.WriteFile(filepath.Join(tempAppDir, "secrets", "api_keys.json"), []byte("{}"), 0755) assert.Nil(err) - err = ioutil.WriteFile(filepath.Join(tempAppDir, "secrets", "user_token"), []byte("token"), 0755) + err = os.WriteFile(filepath.Join(tempAppDir, "secrets", "user_token"), []byte("token"), 0755) assert.Nil(err) err = os.Mkdir(filepath.Join(tempAppDir, "nested"), 0755) assert.Nil(err) - err = ioutil.WriteFile(filepath.Join(tempAppDir, "nested", "nested-cookie.jar"), []byte("chocolate chip"), 0755) + err = os.WriteFile(filepath.Join(tempAppDir, "nested", "nested-cookie.jar"), []byte("chocolate chip"), 0755) assert.Nil(err) - err = ioutil.WriteFile(filepath.Join(tempAppDir, "other-cookie.jar"), []byte("chocolate chip"), 0755) + err = os.WriteFile(filepath.Join(tempAppDir, "other-cookie.jar"), []byte("chocolate chip"), 0755) assert.Nil(err) - err = ioutil.WriteFile(filepath.Join(tempAppDir, "nested-cookie.jar"), []byte("chocolate chip"), 0755) + err = os.WriteFile(filepath.Join(tempAppDir, "nested-cookie.jar"), []byte("chocolate chip"), 0755) assert.Nil(err) err = os.Mkdir(filepath.Join(tempAppDir, "media"), 0755) assert.Nil(err) - err = ioutil.WriteFile(filepath.Join(tempAppDir, "media", "mountain.jpg"), []byte("fake image bytes"), 0755) + err = os.WriteFile(filepath.Join(tempAppDir, "media", "mountain.jpg"), []byte("fake image bytes"), 0755) assert.Nil(err) - err = ioutil.WriteFile(filepath.Join(tempAppDir, "media", "person.png"), []byte("fake image bytes"), 0755) + err = os.WriteFile(filepath.Join(tempAppDir, "media", "person.png"), []byte("fake image bytes"), 0755) assert.Nil(err) - err = ioutil.WriteFile(filepath.Join(tempAppDir, "cookie.jar"), []byte("chocolate chip"), 0755) + err = os.WriteFile(filepath.Join(tempAppDir, "cookie.jar"), []byte("chocolate chip"), 0755) assert.Nil(err) - err = ioutil.WriteFile(filepath.Join(tempAppDir, "test.sh"), []byte("echo test"), 0755) + err = os.WriteFile(filepath.Join(tempAppDir, "test.sh"), []byte("echo test"), 0755) assert.Nil(err) }) @@ -1923,7 +2864,7 @@ type = "MIT" exclude = [ "*.sh", "secrets/", "media/metadata", "/other-cookie.jar" ,"/nested-cookie.jar"] ` excludeDescriptorPath := filepath.Join(tempAppDir, "exclude.toml") - err := ioutil.WriteFile(excludeDescriptorPath, []byte(projectToml), 0755) + err := os.WriteFile(excludeDescriptorPath, []byte(projectToml), 0755) assert.Nil(err) output := pack.RunSuccessfully( @@ -1954,7 +2895,7 @@ type = "MIT" include = [ "*.jar", "media/mountain.jpg", "/media/person.png", ] ` includeDescriptorPath := filepath.Join(tempAppDir, "include.toml") - err := ioutil.WriteFile(includeDescriptorPath, []byte(projectToml), 0755) + err := os.WriteFile(includeDescriptorPath, []byte(projectToml), 0755) assert.Nil(err) output := pack.RunSuccessfully( @@ -1974,16 +2915,156 @@ include = [ "*.jar", "media/mountain.jpg", "/media/person.png", ] }) }) }) - }) - }) - when("inspecting builder", func() { - when("inspecting a nested builder", func() { - it.Before(func() { - // create our nested builder - h.SkipIf(t, imageManager.HostOS() == "windows", "These tests are not yet compatible with Windows-based containers") + when("--creation-time", func() { + when("provided as 'now'", func() { + it("image has create time of the current time", func() { + expectedTime := time.Now() + pack.RunSuccessfully( + "build", repoName, + "-p", filepath.Join("testdata", "mock_app"), + "--creation-time", "now", + ) + assertImage.HasCreateTime(repoName, expectedTime) + }) + }) - // create a task, handled by a 'task manager' which executes our pack commands during tests. + when("provided as unix timestamp", func() { + it("image has create time of the time that was provided", func() { + pack.RunSuccessfully( + "build", repoName, + "-p", filepath.Join("testdata", "mock_app"), + "--creation-time", "1566172801", + ) + expectedTime, err := time.Parse("2006-01-02T03:04:05Z", "2019-08-19T00:00:01Z") + h.AssertNil(t, err) + assertImage.HasCreateTime(repoName, expectedTime) + }) + }) + + when("not provided", func() { + it("image has create time of Jan 1, 1980", func() { + pack.RunSuccessfully( + "build", repoName, + "-p", filepath.Join("testdata", "mock_app"), + ) + expectedTime, err := time.Parse("2006-01-02T03:04:05Z", "1980-01-01T00:00:01Z") + h.AssertNil(t, err) + assertImage.HasCreateTime(repoName, expectedTime) + }) + }) + }) + + when("--platform", func() { + it.Before(func() { + h.SkipIf(t, !pack.SupportsFeature(invoke.PlatformOption), "") + }) + + it("uses the builder with the desired platform", func() { + output, _ := pack.Run( + "build", repoName, + "-p", filepath.Join("testdata", "mock_app"), + "--platform", "linux/not-exist-arch", + ) + h.AssertContainsMatch(t, output, "Pulling image '.*test/builder.*' with platform 'linux/not-exist-arch") + }) + }) + }) + + when("build --buildpack ", func() { + var ( + tmpDir string + flattenedPackageName string + simplePackageConfigFixtureName = "package.toml" + ) + + generateAggregatePackageToml := func(buildpackURI, nestedPackageName, operatingSystem string) string { + t.Helper() + packageTomlFile, err := os.CreateTemp(tmpDir, "package_aggregate-*.toml") + assert.Nil(err) + + pack.FixtureManager().TemplateFixtureToFile( + "package_aggregate.toml", + packageTomlFile, + map[string]interface{}{ + "BuildpackURI": buildpackURI, + "PackageName": nestedPackageName, + "OS": operatingSystem, + }, + ) + + assert.Nil(packageTomlFile.Close()) + return packageTomlFile.Name() + } + + it.Before(func() { + h.SkipIf(t, !pack.SupportsFeature(invoke.BuildpackFlatten), "") + h.SkipIf(t, imageManager.HostOS() == "windows", "buildpack directories not supported on windows") + + var err error + tmpDir, err = os.MkdirTemp("", "buildpack-package-flattened-tests") + assert.Nil(err) + + buildpackManager = buildpacks.NewBuildModuleManager(t, assert) + buildpackManager.PrepareBuildModules(tmpDir, buildpacks.BpSimpleLayersParent, buildpacks.BpSimpleLayers) + + // set up a flattened buildpack + packageTomlPath := generatePackageTomlWithOS(t, assert, pack, tmpDir, simplePackageConfigFixtureName, imageManager.HostOS()) + nestedPackageName := "test/flattened-package-" + h.RandString(10) + nestedPackage := buildpacks.NewPackageImage( + t, + pack, + nestedPackageName, + packageTomlPath, + buildpacks.WithRequiredBuildpacks(buildpacks.BpSimpleLayers), + ) + buildpackManager.PrepareBuildModules(tmpDir, nestedPackage) + assertImage.ExistsLocally(nestedPackageName) + + aggregatePackageToml := generateAggregatePackageToml("simple-layers-parent-buildpack.tgz", nestedPackageName, imageManager.HostOS()) + flattenedPackageName = "test/package-" + h.RandString(10) + + _ = pack.RunSuccessfully( + "buildpack", "package", flattenedPackageName, + "-c", aggregatePackageToml, + "--flatten", + ) + + assertImage.ExistsLocally(flattenedPackageName) + assertImage.HasLengthLayers(flattenedPackageName, 1) + }) + + it.After(func() { + assert.Nil(os.RemoveAll(tmpDir)) + imageManager.CleanupImages(flattenedPackageName) + }) + + when("--flatten", func() { + it("does not write duplicate tar files when creating the ephemeral builder", func() { + output := pack.RunSuccessfully( + "build", repoName, + "-p", filepath.Join("testdata", "mock_app"), + "--buildpack", fmt.Sprintf("docker://%s", flattenedPackageName), + "--builder", builderName, + ) + // buildpack returning an empty tar file is non-deterministic, + // but we expect one of them to throw the warning + h.AssertContainsMatch(t, output, "Buildpack '(simple/layers@simple-layers-version|simple/layers/parent@simple-layers-parent-version)' is a component of a flattened buildpack that will be added elsewhere, skipping...") + + // simple/layers BP exists on the builder and in the flattened buildpack + h.AssertContainsMatch(t, output, "Buildpack 'simple/layers@simple-layers-version' already exists on builder with same contents, skipping...") + }) + }) + }) + }) + + when("inspecting builder", func() { + when("inspecting a nested builder", func() { + it.Before(func() { + // create our nested builder + h.SkipIf(t, imageManager.HostOS() == "windows", "These tests are not yet compatible with Windows-based containers") + + // create a task, handled by a 'task manager' which executes our pack commands during tests. // looks like this is used to de-dup tasks key := taskKey( "create-complex-builder", @@ -2341,8 +3422,9 @@ include = [ "*.jar", "media/mountain.jpg", "/media/person.png", ] imageManager.CleanupImages(origID, repoName, runBefore) ref, err := name.ParseReference(repoName, name.WeakValidation) assert.Nil(err) - buildCacheVolume := cache.NewVolumeCache(ref, "build", dockerCli) - launchCacheVolume := cache.NewVolumeCache(ref, "launch", dockerCli) + logger := logging.NewSimpleLogger(&bytes.Buffer{}) + buildCacheVolume, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "build", dockerCli, logger) + launchCacheVolume, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "launch", dockerCli, logger) assert.Succeeds(buildCacheVolume.Clear(context.TODO())) assert.Succeeds(launchCacheVolume.Clear(context.TODO())) }) @@ -2361,11 +3443,15 @@ include = [ "*.jar", "media/mountain.jpg", "/media/person.png", ] }) it("uses provided run image", func() { - output := pack.RunSuccessfully( - "rebase", repoName, + args := []string{ + repoName, "--run-image", runAfter, "--pull-policy", "never", - ) + } + if pack.SupportsFeature(invoke.ForceRebase) { + args = append(args, "--force") + } + output := pack.RunSuccessfully("rebase", args...) assert.Contains(output, fmt.Sprintf("Successfully rebased image '%s'", repoName)) assertImage.RunsWithOutput( @@ -2380,6 +3466,14 @@ include = [ "*.jar", "media/mountain.jpg", "/media/person.png", ] var localRunImageMirror string it.Before(func() { + imageManager.CleanupImages(repoName) + pack.RunSuccessfully( + "build", repoName, + "-p", filepath.Join("testdata", "mock_app"), + "--builder", builderName, + "--pull-policy", "never", + ) + localRunImageMirror = registryConfig.RepoName("run-after/" + h.RandString(10)) buildRunImage(localRunImageMirror, "local-mirror-after-1", "local-mirror-after-2") pack.JustRunSuccessfully("config", "run-image-mirrors", "add", runImage, "-m", localRunImageMirror) @@ -2390,7 +3484,11 @@ include = [ "*.jar", "media/mountain.jpg", "/media/person.png", ] }) it("prefers the local mirror", func() { - output := pack.RunSuccessfully("rebase", repoName, "--pull-policy", "never") + args := []string{repoName, "--pull-policy", "never"} + if pack.SupportsFeature(invoke.ForceRebase) { + args = append(args, "--force") + } + output := pack.RunSuccessfully("rebase", args...) assertOutput := assertions.NewOutputAssertionManager(t, output) assertOutput.ReportsSelectingRunImageMirrorFromLocalConfig(localRunImageMirror) @@ -2406,9 +3504,16 @@ include = [ "*.jar", "media/mountain.jpg", "/media/person.png", ] when("image metadata has a mirror", func() { it.Before(func() { // clean up existing mirror first to avoid leaking images - imageManager.CleanupImages(runImageMirror) - + imageManager.CleanupImages(runImageMirror, repoName) buildRunImage(runImageMirror, "mirror-after-1", "mirror-after-2") + + pack.RunSuccessfully( + "build", repoName, + "-p", filepath.Join("testdata", "mock_app"), + "--builder", builderName, + "--pull-policy", "never", + ) + }) it("selects the best mirror", func() { @@ -2445,7 +3550,11 @@ include = [ "*.jar", "media/mountain.jpg", "/media/person.png", ] }) it("uses provided run image", func() { - output := pack.RunSuccessfully("rebase", repoName, "--publish", "--run-image", runAfter) + args := []string{repoName, "--publish", "--run-image", runAfter} + if pack.SupportsFeature(invoke.ForceRebase) { + args = append(args, "--force") + } + output := pack.RunSuccessfully("rebase", args...) assertions.NewOutputAssertionManager(t, output).ReportsSuccessfulRebase(repoName) assertImage.CanBePulledFromRegistry(repoName) @@ -2458,37 +3567,268 @@ include = [ "*.jar", "media/mountain.jpg", "/media/person.png", ] }) }) }) + + when("multi-platform", func() { + var ( + tmpDir string + multiArchBuildpackPackage string + builderTomlPath string + remoteRunImage string + remoteBuildImage string + err error + ) + + it.Before(func() { + h.SkipIf(t, !pack.SupportsFeature(invoke.MultiPlatformBuildersAndBuildPackages), "multi-platform builders and buildpack packages are available since 0.34.0") + + tmpDir, err = os.MkdirTemp("", "multi-platform-builder-create-tests") + assert.Nil(err) + + // used to avoid authentication issues with the local registry + os.Setenv("DOCKER_CONFIG", registryConfig.DockerConfigDir) + + // create a multi-platform buildpack and push it to a registry + multiArchBuildpackPackage = registryConfig.RepoName("simple-multi-platform-buildpack" + h.RandString(8)) + sourceDir := filepath.Join("testdata", "mock_buildpacks") + path := filepath.Join(tmpDir, "simple-layers-buildpack") + err = buildpacks.BpFolderSimpleLayers.Prepare(sourceDir, tmpDir) + h.AssertNil(t, err) + + output := pack.RunSuccessfully( + "buildpack", "package", multiArchBuildpackPackage, + "--path", path, + "--publish", + "--target", "linux/amd64", + "--target", "linux/arm64", + "--target", "windows/amd64", + ) + assertions.NewOutputAssertionManager(t, output).ReportsPackagePublished(multiArchBuildpackPackage) + assertions.NewOutputAssertionManager(t, output).ReportsSuccessfulIndexPushed(multiArchBuildpackPackage) + h.AssertRemoteImageIndex(t, multiArchBuildpackPackage, types.OCIImageIndex, 3) + + // runImage and buildImage are saved in the daemon, for this test we want them to be available in a registry + remoteRunImage = registryConfig.RepoName(runImage + h.RandString(8)) + remoteBuildImage = registryConfig.RepoName(buildImage + h.RandString(8)) + + imageManager.TagImage(runImage, remoteRunImage) + imageManager.TagImage(buildImage, remoteBuildImage) + + h.AssertNil(t, h.PushImage(dockerCli, remoteRunImage, registryConfig)) + h.AssertNil(t, h.PushImage(dockerCli, remoteBuildImage, registryConfig)) + }) + + it.After(func() { + imageManager.CleanupImages(remoteBuildImage) + imageManager.CleanupImages(remoteRunImage) + os.RemoveAll(tmpDir) + }) + + generateMultiPlatformBuilderToml := func(template, buildpackURI, buildImage, runImage string) string { + t.Helper() + buildpackToml, err := os.CreateTemp(tmpDir, "buildpack-*.toml") + assert.Nil(err) + + pack.FixtureManager().TemplateFixtureToFile( + template, + buildpackToml, + map[string]interface{}{ + "BuildpackURI": buildpackURI, + "BuildImage": buildImage, + "RunImage": runImage, + }, + ) + assert.Nil(buildpackToml.Close()) + return buildpackToml.Name() + } + + when("builder.toml has no targets but the user provides --target", func() { + when("--publish", func() { + it.Before(func() { + builderName = registryConfig.RepoName("remote-multi-platform-builder" + h.RandString(8)) + + // We need to configure our builder.toml with image references that points to our ephemeral registry + builderTomlPath = generateMultiPlatformBuilderToml("builder_multi_platform-no-targets.toml", multiArchBuildpackPackage, remoteBuildImage, remoteRunImage) + }) + + it("publishes builder images for each requested target to the registry and creates an image index", func() { + output := pack.RunSuccessfully( + "builder", "create", builderName, + "--config", builderTomlPath, + "--publish", + "--target", "linux/amd64", + "--target", "linux/arm64", + "--target", "windows/amd64", + ) + + defer imageManager.CleanupImages(builderName) + assertions.NewOutputAssertionManager(t, output).ReportsBuilderCreated(builderName) + + assertImage.CanBePulledFromRegistry(builderName) + + assertions.NewOutputAssertionManager(t, output).ReportsSuccessfulIndexPushed(builderName) + h.AssertRemoteImageIndex(t, builderName, types.OCIImageIndex, 3) + }) + }) + + when("--daemon", func() { + it.Before(func() { + builderName = registryConfig.RepoName("local-multi-platform-builder" + h.RandString(8)) + + // We need to configure our builder.toml with image references that points to our ephemeral registry + builderTomlPath = generateMultiPlatformBuilderToml("builder_multi_platform-no-targets.toml", multiArchBuildpackPackage, buildImage, runImage) + }) + + it("publishes builder image to the daemon for the given target", func() { + platform := "linux/amd64" + if imageManager.HostOS() == "windows" { + platform = "windows/amd64" + } + + output := pack.RunSuccessfully( + "builder", "create", builderName, + "--config", builderTomlPath, + "--target", platform, + ) + + defer imageManager.CleanupImages(builderName) + assertions.NewOutputAssertionManager(t, output).ReportsBuilderCreated(builderName) + }) + }) + }) + + when("builder.toml has targets", func() { + when("--publish", func() { + it.Before(func() { + builderName = registryConfig.RepoName("remote-multi-platform-builder" + h.RandString(8)) + + // We need to configure our builder.toml with image references that points to our ephemeral registry + builderTomlPath = generateMultiPlatformBuilderToml("builder_multi_platform.toml", multiArchBuildpackPackage, remoteBuildImage, remoteRunImage) + }) + + it("publishes builder images for each configured target to the registry and creates an image index", func() { + output := pack.RunSuccessfully( + "builder", "create", builderName, + "--config", builderTomlPath, + "--publish", + ) + + defer imageManager.CleanupImages(builderName) + assertions.NewOutputAssertionManager(t, output).ReportsBuilderCreated(builderName) + + assertImage.CanBePulledFromRegistry(builderName) + + assertions.NewOutputAssertionManager(t, output).ReportsSuccessfulIndexPushed(builderName) + h.AssertRemoteImageIndex(t, builderName, types.OCIImageIndex, 2) + }) + }) + + when("--daemon", func() { + it.Before(func() { + builderName = registryConfig.RepoName("local-multi-platform-builder" + h.RandString(8)) + + // We need to configure our builder.toml with image references that points to our ephemeral registry + builderTomlPath = generateMultiPlatformBuilderToml("builder_multi_platform.toml", multiArchBuildpackPackage, buildImage, runImage) + }) + + it("publishes builder image to the daemon for the given target", func() { + platform := "linux/amd64" + if imageManager.HostOS() == "windows" { + platform = "windows/amd64" + } + + output := pack.RunSuccessfully( + "builder", "create", builderName, + "--config", builderTomlPath, + "--target", platform, + ) + + defer imageManager.CleanupImages(builderName) + assertions.NewOutputAssertionManager(t, output).ReportsBuilderCreated(builderName) + }) + }) + }) + }) + }) + + when("builder create", func() { + when("--flatten=", func() { + it("should flatten together all specified buildpacks", func() { + h.SkipIf(t, !createBuilderPack.SupportsFeature(invoke.FlattenBuilderCreationV2), "pack version <= 0.33.0 fails with this test") + h.SkipIf(t, imageManager.HostOS() == "windows", "These tests are not yet compatible with Windows-based containers") + + // create a task, handled by a 'task manager' which executes our pack commands during tests. + // looks like this is used to de-dup tasks + key := taskKey( + "create-complex-flattened-builder", + append( + []string{runImageMirror, createBuilderPackConfig.Path(), lifecycle.Identifier()}, + createBuilderPackConfig.FixturePaths()..., + )..., + ) + + builderName, err := suiteManager.RunTaskOnceString(key, func() (string, error) { + return createFlattenBuilder(t, + assert, + buildpackManager, + lifecycle, + createBuilderPack, + runImageMirror) + }) + assert.Nil(err) + + // register task to be run to 'clean up' a task + suiteManager.RegisterCleanUp("clean-"+key, func() error { + imageManager.CleanupImages(builderName) + return nil + }) + + assertImage.ExistsLocally(builderName) + + // 3 layers for runtime OS + // 1 layer setting cnb, platform, layers folders + // 1 layer for lifecycle binaries + // 1 layer for order.toml + // 1 layer for run.toml + // 1 layer for stack.toml + // 1 layer status file changed + // Base Layers = 9 + + // 1 layer for 3 flattened builpacks + // 3 layers for single buildpacks not flattened + assertImage.HasLengthLayers(builderName, 13) + }) + }) }) }) } -func buildpacksDir(bpAPIVersion string) string { - return filepath.Join("testdata", "mock_buildpacks", bpAPIVersion) +func buildModulesDir() string { + return filepath.Join("testdata", "mock_buildpacks") } func createComplexBuilder(t *testing.T, assert h.AssertionManager, pack *invoke.PackInvoker, lifecycle config.LifecycleAsset, - buildpackManager buildpacks.BuildpackManager, + buildpackManager buildpacks.BuildModuleManager, runImageMirror string, ) (string, error) { t.Log("creating complex builder image...") // CREATE TEMP WORKING DIR - tmpDir, err := ioutil.TempDir("", "create-complex-test-builder") + tmpDir, err := os.MkdirTemp("", "create-complex-test-builder") if err != nil { return "", err } defer os.RemoveAll(tmpDir) // ARCHIVE BUILDPACKS - builderBuildpacks := []buildpacks.TestBuildpack{ - buildpacks.Noop, - buildpacks.Noop2, - buildpacks.OtherStack, - buildpacks.ReadEnv, + builderBuildpacks := []buildpacks.TestBuildModule{ + buildpacks.BpNoop, + buildpacks.BpNoop2, + buildpacks.BpOtherStack, + buildpacks.BpReadEnv, } templateMapping := map[string]interface{}{ @@ -2509,7 +3849,7 @@ func createComplexBuilder(t *testing.T, fixtureManager := pack.FixtureManager() - nestedLevelOneConfigFile, err := ioutil.TempFile(tmpDir, "nested-level-1-package.toml") + nestedLevelOneConfigFile, err := os.CreateTemp(tmpDir, "nested-level-1-package.toml") assert.Nil(err) fixtureManager.TemplateFixtureToFile( "nested-level-1-buildpack_package.toml", @@ -2519,7 +3859,7 @@ func createComplexBuilder(t *testing.T, err = nestedLevelOneConfigFile.Close() assert.Nil(err) - nestedLevelTwoConfigFile, err := ioutil.TempFile(tmpDir, "nested-level-2-package.toml") + nestedLevelTwoConfigFile, err := os.CreateTemp(tmpDir, "nested-level-2-package.toml") assert.Nil(err) fixtureManager.TemplateFixtureToFile( "nested-level-2-buildpack_package.toml", @@ -2536,20 +3876,20 @@ func createComplexBuilder(t *testing.T, packageImageName, nestedLevelOneConfigFile.Name(), buildpacks.WithRequiredBuildpacks( - buildpacks.NestedLevelOne, + buildpacks.BpNestedLevelOne, buildpacks.NewPackageImage( t, pack, nestedLevelTwoBuildpackName, nestedLevelTwoConfigFile.Name(), buildpacks.WithRequiredBuildpacks( - buildpacks.NestedLevelTwo, + buildpacks.BpNestedLevelTwo, buildpacks.NewPackageImage( t, pack, simpleLayersBuildpackName, fixtureManager.FixtureLocation("simple-layers-buildpack_package.toml"), - buildpacks.WithRequiredBuildpacks(buildpacks.SimpleLayers), + buildpacks.WithRequiredBuildpacks(buildpacks.BpSimpleLayers), ), ), ), @@ -2561,7 +3901,7 @@ func createComplexBuilder(t *testing.T, pack, simpleLayersBuildpackDifferentShaName, fixtureManager.FixtureLocation("simple-layers-buildpack-different-sha_package.toml"), - buildpacks.WithRequiredBuildpacks(buildpacks.SimpleLayersDifferentSha), + buildpacks.WithRequiredBuildpacks(buildpacks.BpSimpleLayersDifferentSha), ) defer imageManager.CleanupImages(packageImageName, nestedLevelTwoBuildpackName, simpleLayersBuildpackName, simpleLayersBuildpackDifferentShaName) @@ -2572,7 +3912,7 @@ func createComplexBuilder(t *testing.T, simpleLayersDifferentShaBuildpack, ) - buildpackManager.PrepareBuildpacks(tmpDir, builderBuildpacks...) + buildpackManager.PrepareBuildModules(tmpDir, builderBuildpacks...) // ADD lifecycle if lifecycle.HasLocation() { @@ -2586,7 +3926,7 @@ func createComplexBuilder(t *testing.T, } // RENDER builder.toml - builderConfigFile, err := ioutil.TempFile(tmpDir, "nested_builder.toml") + builderConfigFile, err := os.CreateTemp(tmpDir, "nested_builder.toml") if err != nil { return "", err } @@ -2619,13 +3959,13 @@ func createBuilder( assert h.AssertionManager, pack *invoke.PackInvoker, lifecycle config.LifecycleAsset, - buildpackManager buildpacks.BuildpackManager, + buildpackManager buildpacks.BuildModuleManager, runImageMirror string, ) (string, error) { t.Log("creating builder image...") // CREATE TEMP WORKING DIR - tmpDir, err := ioutil.TempDir("", "create-test-builder") + tmpDir, err := os.MkdirTemp("", "create-test-builder") assert.Nil(err) defer os.RemoveAll(tmpDir) @@ -2634,11 +3974,11 @@ func createBuilder( } // ARCHIVE BUILDPACKS - builderBuildpacks := []buildpacks.TestBuildpack{ - buildpacks.Noop, - buildpacks.Noop2, - buildpacks.OtherStack, - buildpacks.ReadEnv, + builderBuildpacks := []buildpacks.TestBuildModule{ + buildpacks.BpNoop, + buildpacks.BpNoop2, + buildpacks.BpOtherStack, + buildpacks.BpReadEnv, } packageTomlPath := generatePackageTomlWithOS(t, assert, pack, tmpDir, "package.toml", imageManager.HostOS()) @@ -2649,7 +3989,7 @@ func createBuilder( pack, packageImageName, packageTomlPath, - buildpacks.WithRequiredBuildpacks(buildpacks.SimpleLayers), + buildpacks.WithRequiredBuildpacks(buildpacks.BpSimpleLayers), ) defer imageManager.CleanupImages(packageImageName) @@ -2659,7 +3999,7 @@ func createBuilder( templateMapping["package_image_name"] = packageImageName templateMapping["package_id"] = "simple/layers" - buildpackManager.PrepareBuildpacks(tmpDir, builderBuildpacks...) + buildpackManager.PrepareBuildModules(tmpDir, builderBuildpacks...) // ADD lifecycle var lifecycleURI string @@ -2677,7 +4017,84 @@ func createBuilder( // RENDER builder.toml configFileName := "builder.toml" - builderConfigFile, err := ioutil.TempFile(tmpDir, "builder.toml") + builderConfigFile, err := os.CreateTemp(tmpDir, "builder.toml") + assert.Nil(err) + + pack.FixtureManager().TemplateFixtureToFile( + configFileName, + builderConfigFile, + templateMapping, + ) + + err = builderConfigFile.Close() + assert.Nil(err) + + // NAME BUILDER + bldr := registryConfig.RepoName("test/builder-" + h.RandString(10)) + + // CREATE BUILDER + output := pack.RunSuccessfully( + "builder", "create", bldr, + "-c", builderConfigFile.Name(), + "--no-color", + ) + + assert.Contains(output, fmt.Sprintf("Successfully created builder image '%s'", bldr)) + assert.Succeeds(h.PushImage(dockerCli, bldr, registryConfig)) + + return bldr, nil +} + +func createBuilderWithExtensions( + t *testing.T, + assert h.AssertionManager, + pack *invoke.PackInvoker, + lifecycle config.LifecycleAsset, + buildpackManager buildpacks.BuildModuleManager, + runImageMirror string, +) (string, error) { + t.Log("creating builder image with extensions...") + + // CREATE TEMP WORKING DIR + tmpDir, err := os.MkdirTemp("", "create-test-builder-extensions") + assert.Nil(err) + defer os.RemoveAll(tmpDir) + + templateMapping := map[string]interface{}{ + "run_image_mirror": runImageMirror, + } + + // BUILDPACKS + builderBuildpacks := []buildpacks.TestBuildModule{ + buildpacks.BpReadEnv, // archive buildpack + buildpacks.BpFolderSimpleLayers, // folder buildpack + } + buildpackManager.PrepareBuildModules(tmpDir, builderBuildpacks...) + + // EXTENSIONS + builderExtensions := []buildpacks.TestBuildModule{ + buildpacks.ExtReadEnv, // archive extension + buildpacks.ExtFolderSimpleLayers, // folder extension + } + buildpackManager.PrepareBuildModules(tmpDir, builderExtensions...) + + // ADD lifecycle + var lifecycleURI string + var lifecycleVersion string + if lifecycle.HasLocation() { + lifecycleURI = lifecycle.EscapedPath() + t.Logf("adding lifecycle path '%s' to builder config", lifecycleURI) + templateMapping["lifecycle_uri"] = lifecycleURI + } else { + lifecycleVersion = lifecycle.Version() + t.Logf("adding lifecycle version '%s' to builder config", lifecycleVersion) + templateMapping["lifecycle_version"] = lifecycleVersion + } + + // RENDER builder.toml + configFileName := "builder_extensions.toml" + + builderConfigFile, err := os.CreateTemp(tmpDir, "builder.toml") assert.Nil(err) pack.FixtureManager().TemplateFixtureToFile( @@ -2692,6 +4109,9 @@ func createBuilder( // NAME BUILDER bldr := registryConfig.RepoName("test/builder-" + h.RandString(10)) + // SET EXPERIMENTAL + pack.JustRunSuccessfully("config", "experimental", "true") + // CREATE BUILDER output := pack.RunSuccessfully( "builder", "create", bldr, @@ -2715,7 +4135,7 @@ func generatePackageTomlWithOS( ) string { t.Helper() - packageTomlFile, err := ioutil.TempFile(tmpDir, "package-*.toml") + packageTomlFile, err := os.CreateTemp(tmpDir, "package-*.toml") assert.Nil(err) pack.FixtureManager().TemplateFixtureToFile( @@ -2731,7 +4151,7 @@ func generatePackageTomlWithOS( return packageTomlFile.Name() } -func createStack(t *testing.T, dockerCli client.CommonAPIClient, runImageMirror string) error { +func createStack(t *testing.T, dockerCli *client.Client, runImageMirror string) error { t.Helper() t.Log("creating stack images...") @@ -2752,19 +4172,170 @@ func createStack(t *testing.T, dockerCli client.CommonAPIClient, runImageMirror return nil } -func createStackImage(dockerCli client.CommonAPIClient, repoName string, dir string) error { +func createStackImage(dockerCli *client.Client, repoName string, dir string) error { defaultFilterFunc := func(file string) bool { return true } ctx := context.Background() buildContext := archive.ReadDirAsTar(dir, "/", 0, 0, -1, true, false, defaultFilterFunc) - return h.CheckImageBuildResult(dockerCli.ImageBuild(ctx, buildContext, dockertypes.ImageBuildOptions{ + return h.CheckImageBuildResult(dockerCli.ImageBuild(ctx, buildContext, client.ImageBuildOptions{ Tags: []string{repoName}, Remove: true, ForceRemove: true, })) } +func createFlattenBuilder( + t *testing.T, + assert h.AssertionManager, + buildpackManager buildpacks.BuildModuleManager, + lifecycle config.LifecycleAsset, + pack *invoke.PackInvoker, + runImageMirror string, +) (string, error) { + t.Helper() + t.Log("creating flattened builder image...") + + // CREATE TEMP WORKING DIR + tmpDir, err := os.MkdirTemp("", "create-complex-test-flattened-builder") + if err != nil { + return "", err + } + defer os.RemoveAll(tmpDir) + + // ARCHIVE BUILDPACKS + builderBuildpacks := []buildpacks.TestBuildModule{ + buildpacks.BpNoop, + buildpacks.BpNoop2, + buildpacks.BpOtherStack, + buildpacks.BpReadEnv, + } + + templateMapping := map[string]interface{}{ + "run_image_mirror": runImageMirror, + } + + packageImageName := registryConfig.RepoName("nested-level-1-buildpack-" + h.RandString(8)) + nestedLevelTwoBuildpackName := registryConfig.RepoName("nested-level-2-buildpack-" + h.RandString(8)) + simpleLayersBuildpackName := registryConfig.RepoName("simple-layers-buildpack-" + h.RandString(8)) + simpleLayersBuildpackDifferentShaName := registryConfig.RepoName("simple-layers-buildpack-different-name-" + h.RandString(8)) + + templateMapping["package_id"] = "simple/nested-level-1" + templateMapping["package_image_name"] = packageImageName + templateMapping["nested_level_1_buildpack"] = packageImageName + templateMapping["nested_level_2_buildpack"] = nestedLevelTwoBuildpackName + templateMapping["simple_layers_buildpack"] = simpleLayersBuildpackName + templateMapping["simple_layers_buildpack_different_sha"] = simpleLayersBuildpackDifferentShaName + + fixtureManager := pack.FixtureManager() + + nestedLevelOneConfigFile, err := os.CreateTemp(tmpDir, "nested-level-1-package.toml") + assert.Nil(err) + fixtureManager.TemplateFixtureToFile( + "nested-level-1-buildpack_package.toml", + nestedLevelOneConfigFile, + templateMapping, + ) + err = nestedLevelOneConfigFile.Close() + assert.Nil(err) + + nestedLevelTwoConfigFile, err := os.CreateTemp(tmpDir, "nested-level-2-package.toml") + assert.Nil(err) + fixtureManager.TemplateFixtureToFile( + "nested-level-2-buildpack_package.toml", + nestedLevelTwoConfigFile, + templateMapping, + ) + + err = nestedLevelTwoConfigFile.Close() + assert.Nil(err) + + packageImageBuildpack := buildpacks.NewPackageImage( + t, + pack, + packageImageName, + nestedLevelOneConfigFile.Name(), + buildpacks.WithRequiredBuildpacks( + buildpacks.BpNestedLevelOne, + buildpacks.NewPackageImage( + t, + pack, + nestedLevelTwoBuildpackName, + nestedLevelTwoConfigFile.Name(), + buildpacks.WithRequiredBuildpacks( + buildpacks.BpNestedLevelTwo, + buildpacks.NewPackageImage( + t, + pack, + simpleLayersBuildpackName, + fixtureManager.FixtureLocation("simple-layers-buildpack_package.toml"), + buildpacks.WithRequiredBuildpacks(buildpacks.BpSimpleLayers), + ), + ), + ), + ), + ) + + simpleLayersDifferentShaBuildpack := buildpacks.NewPackageImage( + t, + pack, + simpleLayersBuildpackDifferentShaName, + fixtureManager.FixtureLocation("simple-layers-buildpack-different-sha_package.toml"), + buildpacks.WithRequiredBuildpacks(buildpacks.BpSimpleLayersDifferentSha), + ) + + defer imageManager.CleanupImages(packageImageName, nestedLevelTwoBuildpackName, simpleLayersBuildpackName, simpleLayersBuildpackDifferentShaName) + + builderBuildpacks = append( + builderBuildpacks, + packageImageBuildpack, + simpleLayersDifferentShaBuildpack, + ) + + buildpackManager.PrepareBuildModules(tmpDir, builderBuildpacks...) + + // ADD lifecycle + if lifecycle.HasLocation() { + lifecycleURI := lifecycle.EscapedPath() + t.Logf("adding lifecycle path '%s' to builder config", lifecycleURI) + templateMapping["lifecycle_uri"] = lifecycleURI + } else { + lifecycleVersion := lifecycle.Version() + t.Logf("adding lifecycle version '%s' to builder config", lifecycleVersion) + templateMapping["lifecycle_version"] = lifecycleVersion + } + + // RENDER builder.toml + builderConfigFile, err := os.CreateTemp(tmpDir, "nested_builder.toml") + if err != nil { + return "", err + } + + pack.FixtureManager().TemplateFixtureToFile("nested_builder.toml", builderConfigFile, templateMapping) + + err = builderConfigFile.Close() + if err != nil { + return "", err + } + + // NAME BUILDER + bldr := registryConfig.RepoName("test/flatten-builder-" + h.RandString(10)) + + // CREATE BUILDER + output := pack.RunSuccessfully( + "builder", "create", bldr, + "-c", builderConfigFile.Name(), + "--no-color", + "--verbose", + "--flatten", "read/env@read-env-version,noop.buildpack@noop.buildpack.version,noop.buildpack@noop.buildpack.later-version", + ) + + assert.Contains(output, fmt.Sprintf("Successfully created builder image '%s'", bldr)) + assert.Succeeds(h.PushImage(dockerCli, bldr, registryConfig)) + + return bldr, nil +} + // taskKey creates a key from the prefix and all arguments to be unique func taskKey(prefix string, args ...string) string { hash := sha256.New() diff --git a/acceptance/assertions/image.go b/acceptance/assertions/image.go index dcbf0a66d7..269b06b721 100644 --- a/acceptance/assertions/image.go +++ b/acceptance/assertions/image.go @@ -1,11 +1,11 @@ //go:build acceptance -// +build acceptance package assertions import ( "fmt" "testing" + "time" "github.com/buildpacks/pack/acceptance/managers" h "github.com/buildpacks/pack/testhelpers" @@ -50,7 +50,16 @@ func (a ImageAssertionManager) HasBaseImage(image, base string) { } } -func (a ImageAssertionManager) HasLabelWithData(image, label, data string) { +func (a ImageAssertionManager) HasCreateTime(image string, expectedTime time.Time) { + a.testObject.Helper() + inspect, err := a.imageManager.InspectLocal(image) + a.assert.Nil(err) + actualTime, err := time.Parse("2006-01-02T15:04:05Z", inspect.Created) + a.assert.Nil(err) + a.assert.TrueWithMessage(actualTime.Sub(expectedTime) < 5*time.Second && expectedTime.Sub(actualTime) < 5*time.Second, fmt.Sprintf("expected image create time %s to match expected time %s", actualTime, expectedTime)) +} + +func (a ImageAssertionManager) HasLabelContaining(image, label, data string) { a.testObject.Helper() inspect, err := a.imageManager.InspectLocal(image) a.assert.Nil(err) @@ -59,6 +68,22 @@ func (a ImageAssertionManager) HasLabelWithData(image, label, data string) { a.assert.Contains(label, data) } +func (a ImageAssertionManager) HasLabelNotContaining(image, label, data string) { + a.testObject.Helper() + inspect, err := a.imageManager.InspectLocal(image) + a.assert.Nil(err) + label, ok := inspect.Config.Labels[label] + a.assert.TrueWithMessage(ok, fmt.Sprintf("expected label %s to exist", label)) + a.assert.NotContains(label, data) +} + +func (a ImageAssertionManager) HasLengthLayers(image string, length int) { + a.testObject.Helper() + inspect, err := a.imageManager.InspectLocal(image) + a.assert.Nil(err) + a.assert.TrueWithMessage(len(inspect.RootFS.Layers) == length, fmt.Sprintf("expected image to have %d layers, found %d", length, len(inspect.RootFS.Layers))) +} + func (a ImageAssertionManager) RunsWithOutput(image string, expectedOutputs ...string) { a.testObject.Helper() containerName := "test-" + h.RandString(10) diff --git a/acceptance/assertions/lifecycle_output.go b/acceptance/assertions/lifecycle_output.go index 7a6ffdfbcb..d3c28f0e25 100644 --- a/acceptance/assertions/lifecycle_output.go +++ b/acceptance/assertions/lifecycle_output.go @@ -1,11 +1,11 @@ //go:build acceptance -// +build acceptance package assertions import ( "fmt" "regexp" + "strings" "testing" h "github.com/buildpacks/pack/testhelpers" @@ -70,8 +70,27 @@ func (l LifecycleOutputAssertionManager) IncludesSeparatePhases() { l.assert.ContainsAll(l.output, "[detector]", "[analyzer]", "[builder]", "[exporter]") } -func (l LifecycleOutputAssertionManager) IncludesLifecycleImageTag(tag string) { +func (l LifecycleOutputAssertionManager) IncludesSeparatePhasesWithBuildExtension() { l.testObject.Helper() - l.assert.Contains(l.output, tag) + // Earlier pack versions print `[extender]`, later pack versions print `[extender (build)]`. + // Removing the `]` for the extend phase allows us to navigate compat suite complexity without undo headache. + // When previous pack is old enough, we can make the matcher more precise. + l.assert.ContainsAll(l.output, "[detector]", "[analyzer]", "[extender", "[exporter]") +} + +func (l LifecycleOutputAssertionManager) IncludesSeparatePhasesWithRunExtension() { + l.testObject.Helper() + + l.assert.ContainsAll(l.output, "[detector]", "[analyzer]", "[extender (run)]", "[exporter]") +} + +func (l LifecycleOutputAssertionManager) IncludesTagOrEphemeralLifecycle(tag string) { + l.testObject.Helper() + + if !strings.Contains(l.output, tag) { + if !strings.Contains(l.output, "pack.local/lifecycle") { + l.testObject.Fatalf("Unable to locate reference to lifecycle image within output") + } + } } diff --git a/acceptance/assertions/output.go b/acceptance/assertions/output.go index 863f5a3efa..50ea8246c8 100644 --- a/acceptance/assertions/output.go +++ b/acceptance/assertions/output.go @@ -1,5 +1,4 @@ //go:build acceptance -// +build acceptance package assertions @@ -32,6 +31,42 @@ func (o OutputAssertionManager) ReportsSuccessfulImageBuild(name string) { o.assert.ContainsF(o.output, "Successfully built image '%s'", name) } +func (o OutputAssertionManager) ReportsSuccessfulIndexLocallyCreated(name string) { + o.testObject.Helper() + + o.assert.ContainsF(o.output, "Successfully created manifest list '%s'", name) +} + +func (o OutputAssertionManager) ReportsSuccessfulIndexPushed(name string) { + o.testObject.Helper() + + o.assert.ContainsF(o.output, "Successfully pushed manifest list '%s' to registry", name) +} + +func (o OutputAssertionManager) ReportsSuccessfulManifestAddedToIndex(name string) { + o.testObject.Helper() + + o.assert.ContainsF(o.output, "Successfully added image '%s' to index", name) +} + +func (o OutputAssertionManager) ReportsSuccessfulIndexDeleted() { + o.testObject.Helper() + + o.assert.Contains(o.output, "Successfully deleted manifest list(s) from local storage") +} + +func (o OutputAssertionManager) ReportsSuccessfulIndexAnnotated(name, manifest string) { + o.testObject.Helper() + + o.assert.ContainsF(o.output, "Successfully annotated image '%s' in index '%s'", name, manifest) +} + +func (o OutputAssertionManager) ReportsSuccessfulRemoveManifestFromIndex(name string) { + o.testObject.Helper() + + o.assert.ContainsF(o.output, "Successfully removed image(s) from index: '%s'", name) +} + func (o OutputAssertionManager) ReportSuccessfulQuietBuild(name string) { o.testObject.Helper() o.testObject.Log("quiet mode") @@ -82,6 +117,12 @@ func (o OutputAssertionManager) ReportsRunImageStackNotMatchingBuilder(runImageS ) } +func (o OutputAssertionManager) ReportsDeprecatedUseOfStack() { + o.testObject.Helper() + + o.assert.Contains(o.output, "Warning: deprecated usage of stack") +} + func (o OutputAssertionManager) WithoutColors() { o.testObject.Helper() o.testObject.Log("has no color") @@ -137,6 +178,12 @@ func (o OutputAssertionManager) IncludesUsagePrompt() { o.assert.Contains(o.output, "Run 'pack --help' for usage.") } +func (o OutputAssertionManager) ReportsBuilderCreated(name string) { + o.testObject.Helper() + + o.assert.ContainsF(o.output, "Successfully created builder image '%s'", name) +} + func (o OutputAssertionManager) ReportsSettingDefaultBuilder(name string) { o.testObject.Helper() @@ -167,7 +214,7 @@ func (o OutputAssertionManager) IncludesTrustedBuildersHeading() { o.assert.Contains(o.output, "Trusted Builders:") } -const googleBuilder = "gcr.io/buildpacks/builder:v1" +const googleBuilder = "gcr.io/buildpacks/builder:google-22" func (o OutputAssertionManager) IncludesGoogleBuilder() { o.testObject.Helper() @@ -182,7 +229,7 @@ func (o OutputAssertionManager) IncludesPrefixedGoogleBuilder() { } var herokuBuilders = []string{ - "heroku/buildpacks:20", + "heroku/builder:24", } func (o OutputAssertionManager) IncludesHerokuBuilders() { @@ -200,9 +247,9 @@ func (o OutputAssertionManager) IncludesPrefixedHerokuBuilders() { } var paketoBuilders = []string{ - "paketobuildpacks/builder:base", - "paketobuildpacks/builder:full", - "paketobuildpacks/builder:tiny", + "paketobuildpacks/builder-jammy-base", + "paketobuildpacks/builder-jammy-full", + "paketobuildpacks/builder-jammy-tiny", } func (o OutputAssertionManager) IncludesPaketoBuilders() { diff --git a/acceptance/assertions/test_buildpack_output.go b/acceptance/assertions/test_buildpack_output.go index 10a13d3c1b..a44943a337 100644 --- a/acceptance/assertions/test_buildpack_output.go +++ b/acceptance/assertions/test_buildpack_output.go @@ -1,5 +1,4 @@ //go:build acceptance -// +build acceptance package assertions diff --git a/acceptance/buildpacks/archive_buildpack.go b/acceptance/buildpacks/archive_buildpack.go index adc3aebc64..2015479c64 100644 --- a/acceptance/buildpacks/archive_buildpack.go +++ b/acceptance/buildpacks/archive_buildpack.go @@ -1,5 +1,4 @@ //go:build acceptance -// +build acceptance package buildpacks @@ -7,7 +6,6 @@ import ( "archive/tar" "compress/gzip" "fmt" - "io/ioutil" "os" "path/filepath" @@ -23,38 +21,38 @@ const ( defaultMode = 0755 ) -type archiveBuildpack struct { +type archiveBuildModule struct { name string } -func (a archiveBuildpack) Prepare(sourceDir, destination string) error { +func (a archiveBuildModule) Prepare(sourceDir, destination string) error { location, err := a.createTgz(sourceDir) if err != nil { - return errors.Wrapf(err, "creating archive for buildpack %s", a) + return errors.Wrapf(err, "creating archive for build module %s", a) } err = os.Rename(location, filepath.Join(destination, a.FileName())) if err != nil { - return errors.Wrapf(err, "renaming temporary archive for buildpack %s", a) + return errors.Wrapf(err, "renaming temporary archive for build module %s", a) } return nil } -func (a archiveBuildpack) FileName() string { +func (a archiveBuildModule) FileName() string { return fmt.Sprintf("%s.tgz", a) } -func (a archiveBuildpack) String() string { +func (a archiveBuildModule) String() string { return a.name } -func (a archiveBuildpack) FullPathIn(parentFolder string) string { +func (a archiveBuildModule) FullPathIn(parentFolder string) string { return filepath.Join(parentFolder, a.FileName()) } -func (a archiveBuildpack) createTgz(sourceDir string) (string, error) { - tempFile, err := ioutil.TempFile("", "*.tgz") +func (a archiveBuildModule) createTgz(sourceDir string) (string, error) { + tempFile, err := os.CreateTemp("", "*.tgz") if err != nil { return "", errors.Wrap(err, "creating temporary archive") } @@ -86,17 +84,18 @@ func (a archiveBuildpack) createTgz(sourceDir string) (string, error) { } var ( - SimpleLayersParent = &archiveBuildpack{name: "simple-layers-parent-buildpack"} - SimpleLayers = &archiveBuildpack{name: "simple-layers-buildpack"} - SimpleLayersDifferentSha = &archiveBuildpack{name: "simple-layers-buildpack-different-sha"} - InternetCapable = &archiveBuildpack{name: "internet-capable-buildpack"} - ReadVolume = &archiveBuildpack{name: "read-volume-buildpack"} - ReadWriteVolume = &archiveBuildpack{name: "read-write-volume-buildpack"} - ArchiveNotInBuilder = &archiveBuildpack{name: "not-in-builder-buildpack"} - Noop = &archiveBuildpack{name: "noop-buildpack"} - Noop2 = &archiveBuildpack{name: "noop-buildpack-2"} - OtherStack = &archiveBuildpack{name: "other-stack-buildpack"} - ReadEnv = &archiveBuildpack{name: "read-env-buildpack"} - NestedLevelOne = &archiveBuildpack{name: "nested-level-1-buildpack"} - NestedLevelTwo = &archiveBuildpack{name: "nested-level-2-buildpack"} + BpSimpleLayersParent = &archiveBuildModule{name: "simple-layers-parent-buildpack"} + BpSimpleLayers = &archiveBuildModule{name: "simple-layers-buildpack"} + BpSimpleLayersDifferentSha = &archiveBuildModule{name: "simple-layers-buildpack-different-sha"} + BpInternetCapable = &archiveBuildModule{name: "internet-capable-buildpack"} + BpReadVolume = &archiveBuildModule{name: "read-volume-buildpack"} + BpReadWriteVolume = &archiveBuildModule{name: "read-write-volume-buildpack"} + BpArchiveNotInBuilder = &archiveBuildModule{name: "not-in-builder-buildpack"} + BpNoop = &archiveBuildModule{name: "noop-buildpack"} + BpNoop2 = &archiveBuildModule{name: "noop-buildpack-2"} + BpOtherStack = &archiveBuildModule{name: "other-stack-buildpack"} + BpReadEnv = &archiveBuildModule{name: "read-env-buildpack"} + BpNestedLevelOne = &archiveBuildModule{name: "nested-level-1-buildpack"} + BpNestedLevelTwo = &archiveBuildModule{name: "nested-level-2-buildpack"} + ExtReadEnv = &archiveBuildModule{name: "read-env-extension"} ) diff --git a/acceptance/buildpacks/folder_buildpack.go b/acceptance/buildpacks/folder_buildpack.go index 5211fed99a..5c739da158 100644 --- a/acceptance/buildpacks/folder_buildpack.go +++ b/acceptance/buildpacks/folder_buildpack.go @@ -1,5 +1,4 @@ //go:build acceptance -// +build acceptance package buildpacks @@ -11,37 +10,41 @@ import ( h "github.com/buildpacks/pack/testhelpers" ) -type folderBuildpack struct { +type folderBuildModule struct { name string } -func (f folderBuildpack) Prepare(sourceDir, destination string) error { +func (f folderBuildModule) Prepare(sourceDir, destination string) error { sourceBuildpack := filepath.Join(sourceDir, f.name) info, err := os.Stat(sourceBuildpack) if err != nil { - return fmt.Errorf("retrieving buildpack folder info for folder: %s: %w", sourceBuildpack, err) + return fmt.Errorf("retrieving folder info for folder: %s: %w", sourceBuildpack, err) } destinationBuildpack := filepath.Join(destination, f.name) err = os.Mkdir(filepath.Join(destinationBuildpack), info.Mode()) if err != nil { - return fmt.Errorf("creating temp buildpack folder in: %s: %w", destinationBuildpack, err) + return fmt.Errorf("creating temp folder in: %s: %w", destinationBuildpack, err) } err = h.RecursiveCopyE(filepath.Join(sourceDir, f.name), destinationBuildpack) if err != nil { - return fmt.Errorf("copying folder buildpack %s: %w", f.name, err) + return fmt.Errorf("copying folder build module %s: %w", f.name, err) } return nil } -func (f folderBuildpack) FullPathIn(parentFolder string) string { +func (f folderBuildModule) FullPathIn(parentFolder string) string { return filepath.Join(parentFolder, f.name) } var ( - FolderNotInBuilder = folderBuildpack{name: "not-in-builder-buildpack"} - FolderSimpleLayersParent = folderBuildpack{name: "simple-layers-parent-buildpack"} - FolderSimpleLayers = folderBuildpack{name: "simple-layers-buildpack"} + BpFolderNotInBuilder = folderBuildModule{name: "not-in-builder-buildpack"} + BpFolderSimpleLayersParent = folderBuildModule{name: "simple-layers-parent-buildpack"} + BpFolderSimpleLayers = folderBuildModule{name: "simple-layers-buildpack"} + ExtFolderSimpleLayers = folderBuildModule{name: "simple-layers-extension"} + MetaBpFolder = folderBuildModule{name: "meta-buildpack"} + MetaBpDependency = folderBuildModule{name: "meta-buildpack-dependency"} + MultiPlatformFolderBP = folderBuildModule{name: "multi-platform-buildpack"} ) diff --git a/acceptance/buildpacks/manager.go b/acceptance/buildpacks/manager.go index eafd80a88e..63e309c699 100644 --- a/acceptance/buildpacks/manager.go +++ b/acceptance/buildpacks/manager.go @@ -1,5 +1,4 @@ //go:build acceptance -// +build acceptance package buildpacks @@ -7,30 +6,22 @@ import ( "path/filepath" "testing" - "github.com/buildpacks/pack/internal/builder" - "github.com/buildpacks/pack/testhelpers" ) -type BuildpackManager struct { +type BuildModuleManager struct { testObject *testing.T assert testhelpers.AssertionManager sourceDir string } -type BuildpackManagerModifier func(b *BuildpackManager) - -func WithBuildpackAPIVersion(apiVersion string) func(b *BuildpackManager) { - return func(b *BuildpackManager) { - b.sourceDir = filepath.Join("testdata", "mock_buildpacks", apiVersion) - } -} +type BuildModuleManagerModifier func(b *BuildModuleManager) -func NewBuildpackManager(t *testing.T, assert testhelpers.AssertionManager, modifiers ...BuildpackManagerModifier) BuildpackManager { - m := BuildpackManager{ +func NewBuildModuleManager(t *testing.T, assert testhelpers.AssertionManager, modifiers ...BuildModuleManagerModifier) BuildModuleManager { + m := BuildModuleManager{ testObject: t, assert: assert, - sourceDir: filepath.Join("testdata", "mock_buildpacks", builder.DefaultBuildpackAPIVersion), + sourceDir: filepath.Join("testdata", "mock_buildpacks"), } for _, mod := range modifiers { @@ -40,26 +31,26 @@ func NewBuildpackManager(t *testing.T, assert testhelpers.AssertionManager, modi return m } -type TestBuildpack interface { +type TestBuildModule interface { Prepare(source, destination string) error } -func (b BuildpackManager) PrepareBuildpacks(destination string, buildpacks ...TestBuildpack) { +func (b BuildModuleManager) PrepareBuildModules(destination string, modules ...TestBuildModule) { b.testObject.Helper() - for _, buildpack := range buildpacks { - err := buildpack.Prepare(b.sourceDir, destination) + for _, module := range modules { + err := module.Prepare(b.sourceDir, destination) b.assert.Nil(err) } } type Modifiable interface { SetPublish() - SetBuildpacks([]TestBuildpack) + SetBuildpacks([]TestBuildModule) } type PackageModifier func(p Modifiable) -func WithRequiredBuildpacks(buildpacks ...TestBuildpack) PackageModifier { +func WithRequiredBuildpacks(buildpacks ...TestBuildModule) PackageModifier { return func(p Modifiable) { p.SetBuildpacks(buildpacks) } diff --git a/acceptance/buildpacks/package_file_buildpack.go b/acceptance/buildpacks/package_file_buildpack.go index ce9d291123..34df47ff46 100644 --- a/acceptance/buildpacks/package_file_buildpack.go +++ b/acceptance/buildpacks/package_file_buildpack.go @@ -1,12 +1,10 @@ //go:build acceptance -// +build acceptance package buildpacks import ( "errors" "fmt" - "io/ioutil" "os" "path/filepath" "strings" @@ -21,10 +19,10 @@ type PackageFile struct { pack *invoke.PackInvoker destination string sourceConfigLocation string - buildpacks []TestBuildpack + buildpacks []TestBuildModule } -func (p *PackageFile) SetBuildpacks(buildpacks []TestBuildpack) { +func (p *PackageFile) SetBuildpacks(buildpacks []TestBuildModule) { p.buildpacks = buildpacks } @@ -54,7 +52,7 @@ func (p PackageFile) Prepare(sourceDir, _ string) error { p.testObject.Helper() p.testObject.Log("creating package file from:", sourceDir) - tmpDir, err := ioutil.TempDir("", "package-buildpacks") + tmpDir, err := os.MkdirTemp("", "package-buildpacks") if err != nil { return fmt.Errorf("creating temp dir for package buildpacks: %w", err) } diff --git a/acceptance/buildpacks/package_image_buildpack.go b/acceptance/buildpacks/package_image_buildpack.go index a924e9bc67..ea994bbd50 100644 --- a/acceptance/buildpacks/package_image_buildpack.go +++ b/acceptance/buildpacks/package_image_buildpack.go @@ -1,11 +1,9 @@ //go:build acceptance -// +build acceptance package buildpacks import ( "fmt" - "io/ioutil" "os" "path/filepath" "testing" @@ -22,11 +20,11 @@ type PackageImage struct { pack *invoke.PackInvoker name string sourceConfigLocation string - buildpacks []TestBuildpack + buildpacks []TestBuildModule publish bool } -func (p *PackageImage) SetBuildpacks(buildpacks []TestBuildpack) { +func (p *PackageImage) SetBuildpacks(buildpacks []TestBuildModule) { p.buildpacks = buildpacks } @@ -58,7 +56,7 @@ func (p PackageImage) Prepare(sourceDir, _ string) error { p.testObject.Helper() p.testObject.Log("creating package image from:", sourceDir) - tmpDir, err := ioutil.TempDir("", "package-buildpacks") + tmpDir, err := os.MkdirTemp("", "package-buildpacks") if err != nil { return fmt.Errorf("creating temp dir for package buildpacks: %w", err) } diff --git a/acceptance/config/asset_manager.go b/acceptance/config/asset_manager.go index 7a6ea0c4aa..aafe788f96 100644 --- a/acceptance/config/asset_manager.go +++ b/acceptance/config/asset_manager.go @@ -1,11 +1,9 @@ //go:build acceptance -// +build acceptance package config import ( "fmt" - "io/ioutil" "os" "os/exec" "path/filepath" @@ -27,7 +25,7 @@ const ( var ( currentPackFixturesDir = filepath.Join("testdata", "pack_fixtures") previousPackFixturesOverridesDir = filepath.Join("testdata", "pack_previous_fixtures_overrides") - lifecycleTgzExp = regexp.MustCompile(`lifecycle-v\d+.\d+.\d+\+linux.x86-64.tgz`) + lifecycleTgzExp = regexp.MustCompile(`lifecycle-v\d+.\d+.\d+(-pre.\d+)?(-rc.\d+)?\+linux.x86-64.tgz`) ) type AssetManager struct { @@ -248,7 +246,7 @@ func (b assetManagerBuilder) ensurePreviousPackFixtures() string { sourceDir, err := b.githubAssetFetcher.FetchReleaseSource("buildpacks", "pack", version) b.assert.Nil(err) - sourceDirFiles, err := ioutil.ReadDir(sourceDir) + sourceDirFiles, err := os.ReadDir(sourceDir) b.assert.Nil(err) // GitHub source tarballs have a top-level directory whose name includes the current commit sha. innerDir := sourceDirFiles[0].Name() @@ -346,7 +344,7 @@ func (b assetManagerBuilder) downloadLifecycleRelative(relativeVersion int) stri func (b assetManagerBuilder) buildPack(compileVersion string) string { b.testObject.Helper() - packTmpDir, err := ioutil.TempDir("", "pack.acceptance.binary.") + packTmpDir, err := os.MkdirTemp("", "pack.acceptance.binary.") b.assert.Nil(err) packPath := filepath.Join(packTmpDir, acceptanceOS.PackBinaryName) @@ -355,9 +353,9 @@ func (b assetManagerBuilder) buildPack(compileVersion string) string { b.assert.Nil(err) cmd := exec.Command("go", "build", + // XXX the version setter is wrong here, there is no cmd.Version "-ldflags", fmt.Sprintf("-X 'github.com/buildpacks/pack/cmd.Version=%s'", compileVersion), "-o", packPath, - "./cmd/pack", ) if filepath.Base(cwd) == "acceptance" { cmd.Dir = filepath.Dir(cwd) diff --git a/acceptance/config/github_asset_fetcher.go b/acceptance/config/github_asset_fetcher.go index 3b490178e4..944d2f4534 100644 --- a/acceptance/config/github_asset_fetcher.go +++ b/acceptance/config/github_asset_fetcher.go @@ -1,5 +1,4 @@ //go:build acceptance -// +build acceptance package config @@ -10,7 +9,6 @@ import ( "encoding/json" "fmt" "io" - "io/ioutil" "net/http" "os" "path" @@ -330,7 +328,7 @@ func (f *GithubAssetFetcher) loadCacheManifest() (assetCache, error) { return assetCache{}, nil } - content, err := ioutil.ReadFile(filepath.Join(f.cacheDir, assetCacheManifest)) + content, err := os.ReadFile(filepath.Join(f.cacheDir, assetCacheManifest)) if err != nil { return nil, errors.Wrap(err, "reading cache manifest") } @@ -371,7 +369,7 @@ func (f *GithubAssetFetcher) writeCacheManifest(owner, repo string, op func(cach return errors.Wrap(err, "marshaling cache manifest content") } - return ioutil.WriteFile(filepath.Join(f.cacheDir, assetCacheManifest), content, 0644) + return os.WriteFile(filepath.Join(f.cacheDir, assetCacheManifest), content, 0644) } func (f *GithubAssetFetcher) downloadAndSave(assetURI, destPath string) error { diff --git a/acceptance/config/input_configuration_manager.go b/acceptance/config/input_configuration_manager.go index 1598f59fbf..58f318daf6 100644 --- a/acceptance/config/input_configuration_manager.go +++ b/acceptance/config/input_configuration_manager.go @@ -1,5 +1,4 @@ //go:build acceptance -// +build acceptance package config diff --git a/acceptance/config/lifecycle_asset.go b/acceptance/config/lifecycle_asset.go index 69edddadc0..79503b560c 100644 --- a/acceptance/config/lifecycle_asset.go +++ b/acceptance/config/lifecycle_asset.go @@ -1,5 +1,4 @@ //go:build acceptance -// +build acceptance package config @@ -70,6 +69,20 @@ func earliestVersion(versions []*api.Version) *api.Version { return earliest } +func latestVersion(versions []*api.Version) *api.Version { + var latest *api.Version + for _, version := range versions { + switch { + case version == nil: + continue + case latest == nil: + latest = version + case latest.Compare(version) < 0: + latest = version + } + } + return latest +} func (l *LifecycleAsset) EarliestBuildpackAPIVersion() string { return earliestVersion(l.descriptor.APIs.Buildpack.Supported).String() } @@ -78,6 +91,10 @@ func (l *LifecycleAsset) EarliestPlatformAPIVersion() string { return earliestVersion(l.descriptor.APIs.Platform.Supported).String() } +func (l *LifecycleAsset) LatestPlatformAPIVersion() string { + return latestVersion(l.descriptor.APIs.Platform.Supported).String() +} + func (l *LifecycleAsset) OutputForAPIs() (deprecatedBuildpackAPIs, supportedBuildpackAPIs, deprecatedPlatformAPIs, supportedPlatformAPIs string) { stringify := func(apiSet builder.APISet) string { versions := apiSet.AsStrings() @@ -181,7 +198,35 @@ func (l *LifecycleAsset) JSONOutputForAPIs(baseIndentationWidth int) ( type LifecycleFeature int -var lifecycleFeatureTests = map[LifecycleFeature]func(l *LifecycleAsset) bool{} +const ( + CreationTime = iota + BuildImageExtensions + RunImageExtensions +) + +type LifecycleAssetSupported func(l *LifecycleAsset) bool + +func supportsPlatformAPI(version string) LifecycleAssetSupported { + return func(i *LifecycleAsset) bool { + for _, platformAPI := range i.descriptor.APIs.Platform.Supported { + if platformAPI.AtLeast(version) { + return true + } + } + for _, platformAPI := range i.descriptor.APIs.Platform.Deprecated { + if platformAPI.AtLeast(version) { + return true + } + } + return false + } +} + +var lifecycleFeatureTests = map[LifecycleFeature]LifecycleAssetSupported{ + CreationTime: supportsPlatformAPI("0.9"), + BuildImageExtensions: supportsPlatformAPI("0.10"), + RunImageExtensions: supportsPlatformAPI("0.12"), +} func (l *LifecycleAsset) SupportsFeature(f LifecycleFeature) bool { return lifecycleFeatureTests[f](l) diff --git a/acceptance/config/pack_assets.go b/acceptance/config/pack_assets.go index 40586c397f..b8c960f7ca 100644 --- a/acceptance/config/pack_assets.go +++ b/acceptance/config/pack_assets.go @@ -1,5 +1,4 @@ //go:build acceptance -// +build acceptance package config diff --git a/acceptance/config/run_combination.go b/acceptance/config/run_combination.go index b1b7db4adf..811b5f1caf 100644 --- a/acceptance/config/run_combination.go +++ b/acceptance/config/run_combination.go @@ -1,5 +1,4 @@ //go:build acceptance -// +build acceptance package config diff --git a/acceptance/invoke/pack.go b/acceptance/invoke/pack.go index 23a638094a..3ea1c56112 100644 --- a/acceptance/invoke/pack.go +++ b/acceptance/invoke/pack.go @@ -1,11 +1,10 @@ //go:build acceptance -// +build acceptance package invoke import ( "bytes" - "io/ioutil" + "fmt" "os" "os/exec" "path/filepath" @@ -44,7 +43,7 @@ func NewPackInvoker( testObject.Helper() - home, err := ioutil.TempDir("", "buildpack.pack.home.") + home, err := os.MkdirTemp("", "buildpack.pack.home.") if err != nil { testObject.Fatalf("couldn't create home folder for pack: %s", err) } @@ -140,6 +139,10 @@ func (i *PackInvoker) StartWithWriter(combinedOutput *bytes.Buffer, name string, } } +func (i *PackInvoker) Home() string { + return i.home +} + type InterruptCmd struct { testObject *testing.T assert h.AssertionManager @@ -190,7 +193,7 @@ func (i *PackInvoker) EnableExperimental() { // - "" (e.g. "create-builder") // - "" (e.g. "--verbose") // - " " (e.g. "build --network") -// - "... " (e.g. "config trusted-builder--network") +// - "... " (e.g. "config trusted-builder --network") // // Any other form may return false. func (i *PackInvoker) Supports(command string) bool { @@ -210,15 +213,83 @@ func (i *PackInvoker) Supports(command string) bool { search = command } + re := regexp.MustCompile(fmt.Sprint(`\b%s\b`, search)) output, err := i.baseCmd(cmdParts...).CombinedOutput() i.assert.Nil(err) - return strings.Contains(string(output), search) && !strings.Contains(string(output), "Unknown help topic") + // FIXME: this doesn't appear to be working as expected, + // as tests against "build --creation-time" and "build --cache" are returning unsupported + // even on the latest version of pack. + return re.MatchString(string(output)) && !strings.Contains(string(output), "Unknown help topic") } type Feature int -var featureTests = map[Feature]func(i *PackInvoker) bool{} +const ( + CreationTime = iota + Cache + BuildImageExtensions + RunImageExtensions + StackValidation + ForceRebase + BuildpackFlatten + MetaBuildpackFolder + PlatformRetries + FlattenBuilderCreationV2 + FixesRunImageMetadata + ManifestCommands + PlatformOption + MultiPlatformBuildersAndBuildPackages + StackWarning +) + +var featureTests = map[Feature]func(i *PackInvoker) bool{ + CreationTime: func(i *PackInvoker) bool { + return i.Supports("build --creation-time") + }, + Cache: func(i *PackInvoker) bool { + return i.Supports("build --cache") + }, + BuildImageExtensions: func(i *PackInvoker) bool { + return i.laterThan("v0.27.0") + }, + RunImageExtensions: func(i *PackInvoker) bool { + return i.laterThan("v0.29.0") + }, + StackValidation: func(i *PackInvoker) bool { + return !i.atLeast("v0.30.0") + }, + ForceRebase: func(i *PackInvoker) bool { + return i.atLeast("v0.30.0") + }, + BuildpackFlatten: func(i *PackInvoker) bool { + return i.atLeast("v0.30.0") + }, + MetaBuildpackFolder: func(i *PackInvoker) bool { + return i.atLeast("v0.30.0") + }, + PlatformRetries: func(i *PackInvoker) bool { + return i.atLeast("v0.32.1") + }, + FlattenBuilderCreationV2: func(i *PackInvoker) bool { + return i.atLeast("v0.33.1") + }, + FixesRunImageMetadata: func(i *PackInvoker) bool { + return i.atLeast("v0.34.0") + }, + ManifestCommands: func(i *PackInvoker) bool { + return i.atLeast("v0.34.0") + }, + PlatformOption: func(i *PackInvoker) bool { + return i.atLeast("v0.34.0") + }, + MultiPlatformBuildersAndBuildPackages: func(i *PackInvoker) bool { + return i.atLeast("v0.34.0") + }, + StackWarning: func(i *PackInvoker) bool { + return i.atLeast("v0.37.0") + }, +} func (i *PackInvoker) SupportsFeature(f Feature) bool { return featureTests[f](i) @@ -249,7 +320,7 @@ func (i *PackInvoker) atLeast(version string) bool { func (i *PackInvoker) ConfigFileContents() string { i.testObject.Helper() - contents, err := ioutil.ReadFile(filepath.Join(i.home, "config.toml")) + contents, err := os.ReadFile(filepath.Join(i.home, "config.toml")) i.assert.Nil(err) return string(contents) diff --git a/acceptance/invoke/pack_fixtures.go b/acceptance/invoke/pack_fixtures.go index 367ebd3905..aefc06d461 100644 --- a/acceptance/invoke/pack_fixtures.go +++ b/acceptance/invoke/pack_fixtures.go @@ -1,5 +1,4 @@ //go:build acceptance -// +build acceptance package invoke @@ -7,7 +6,6 @@ import ( "bytes" "fmt" "io" - "io/ioutil" "os" "path/filepath" "strings" @@ -58,7 +56,7 @@ func (m PackFixtureManager) VersionedFixtureOrFallbackLocation(pattern, version, func (m PackFixtureManager) TemplateFixture(templateName string, templateData map[string]interface{}) string { m.testObject.Helper() - outputTemplate, err := ioutil.ReadFile(m.FixtureLocation(templateName)) + outputTemplate, err := os.ReadFile(m.FixtureLocation(templateName)) m.assert.Nil(err) return m.fillTemplate(outputTemplate, templateData) @@ -69,7 +67,7 @@ func (m PackFixtureManager) TemplateVersionedFixture( templateData map[string]interface{}, ) string { m.testObject.Helper() - outputTemplate, err := ioutil.ReadFile(m.VersionedFixtureOrFallbackLocation(versionedPattern, version, fallback)) + outputTemplate, err := os.ReadFile(m.VersionedFixtureOrFallbackLocation(versionedPattern, version, fallback)) m.assert.Nil(err) return m.fillTemplate(outputTemplate, templateData) diff --git a/acceptance/managers/image_manager.go b/acceptance/managers/image_manager.go index e3976ba139..64faad805d 100644 --- a/acceptance/managers/image_manager.go +++ b/acceptance/managers/image_manager.go @@ -1,5 +1,4 @@ //go:build acceptance -// +build acceptance package managers @@ -10,10 +9,10 @@ import ( "testing" "time" - dockertypes "github.com/docker/docker/api/types" - "github.com/docker/docker/api/types/container" - "github.com/docker/docker/client" - "github.com/docker/go-connections/nat" + "github.com/moby/moby/api/types/container" + "github.com/moby/moby/api/types/image" + "github.com/moby/moby/api/types/network" + "github.com/moby/moby/client" h "github.com/buildpacks/pack/testhelpers" ) @@ -23,10 +22,10 @@ var DefaultDuration = 10 * time.Second type ImageManager struct { testObject *testing.T assert h.AssertionManager - dockerCli client.CommonAPIClient + dockerCli *client.Client } -func NewImageManager(t *testing.T, dockerCli client.CommonAPIClient) ImageManager { +func NewImageManager(t *testing.T, dockerCli *client.Client) ImageManager { return ImageManager{ testObject: t, assert: h.NewAssertionManager(t), @@ -42,10 +41,10 @@ func (im ImageManager) CleanupImages(imageNames ...string) { } } -func (im ImageManager) InspectLocal(image string) (dockertypes.ImageInspect, error) { +func (im ImageManager) InspectLocal(imageName string) (image.InspectResponse, error) { im.testObject.Helper() - inspect, _, err := im.dockerCli.ImageInspectWithRaw(context.Background(), image) - return inspect, err + result, err := im.dockerCli.ImageInspect(context.Background(), imageName) + return result.InspectResponse, err } func (im ImageManager) GetImageID(image string) string { @@ -57,14 +56,17 @@ func (im ImageManager) GetImageID(image string) string { func (im ImageManager) HostOS() string { im.testObject.Helper() - daemonInfo, err := im.dockerCli.Info(context.Background()) + infoResult, err := im.dockerCli.Info(context.Background(), client.InfoOptions{}) im.assert.Nil(err) - return daemonInfo.OSType + return infoResult.Info.OSType } -func (im ImageManager) TagImage(image, ref string) { +func (im ImageManager) TagImage(imageName, ref string) { im.testObject.Helper() - err := im.dockerCli.ImageTag(context.Background(), image, ref) + _, err := im.dockerCli.ImageTag(context.Background(), client.ImageTagOptions{ + Source: imageName, + Target: ref, + }) im.assert.Nil(err) } @@ -78,19 +80,23 @@ func (im ImageManager) ExposePortOnImage(image, containerName string) TestContai im.testObject.Helper() ctx := context.Background() - ctr, err := im.dockerCli.ContainerCreate(ctx, &container.Config{ - Image: image, - ExposedPorts: map[nat.Port]struct{}{"8080/tcp": {}}, - Healthcheck: nil, - }, &container.HostConfig{ - PortBindings: nat.PortMap{ - "8080/tcp": []nat.PortBinding{{}}, + ctr, err := im.dockerCli.ContainerCreate(ctx, client.ContainerCreateOptions{ + Config: &container.Config{ + Image: image, + ExposedPorts: network.PortSet{network.MustParsePort("8080/tcp"): {}}, + Healthcheck: nil, }, - AutoRemove: true, - }, nil, nil, containerName) + HostConfig: &container.HostConfig{ + PortBindings: network.PortMap{ + network.MustParsePort("8080/tcp"): []network.PortBinding{{}}, + }, + AutoRemove: true, + }, + Name: containerName, + }) im.assert.Nil(err) - err = im.dockerCli.ContainerStart(ctx, ctr.ID, dockertypes.ContainerStartOptions{}) + _, err = im.dockerCli.ContainerStart(ctx, ctr.ID, client.ContainerStartOptions{}) im.assert.Nil(err) return TestContainer{ testObject: im.testObject, @@ -104,9 +110,12 @@ func (im ImageManager) ExposePortOnImage(image, containerName string) TestContai func (im ImageManager) CreateContainer(name string) TestContainer { im.testObject.Helper() containerName := "test-" + h.RandString(10) - ctr, err := im.dockerCli.ContainerCreate(context.Background(), &container.Config{ - Image: name, - }, nil, nil, nil, containerName) + ctr, err := im.dockerCli.ContainerCreate(context.Background(), client.ContainerCreateOptions{ + Config: &container.Config{ + Image: name, + }, + Name: containerName, + }) im.assert.Nil(err) return TestContainer{ @@ -120,7 +129,7 @@ func (im ImageManager) CreateContainer(name string) TestContainer { type TestContainer struct { testObject *testing.T - dockerCli client.CommonAPIClient + dockerCli *client.Client assert h.AssertionManager name string id string @@ -136,8 +145,8 @@ func (t TestContainer) RunWithOutput() string { func (t TestContainer) Cleanup() { t.testObject.Helper() - t.dockerCli.ContainerKill(context.Background(), t.name, "SIGKILL") - t.dockerCli.ContainerRemove(context.Background(), t.name, dockertypes.ContainerRemoveOptions{Force: true}) + t.dockerCli.ContainerKill(context.Background(), t.name, client.ContainerKillOptions{Signal: "SIGKILL"}) + t.dockerCli.ContainerRemove(context.Background(), t.name, client.ContainerRemoveOptions{Force: true}) } func (t TestContainer) WaitForResponse(duration time.Duration) string { @@ -164,8 +173,9 @@ func (t TestContainer) WaitForResponse(duration time.Duration) string { func (t TestContainer) hostPort() string { t.testObject.Helper() - i, err := t.dockerCli.ContainerInspect(context.Background(), t.name) + result, err := t.dockerCli.ContainerInspect(context.Background(), t.name, client.ContainerInspectOptions{}) t.assert.Nil(err) + i := result.Container for _, port := range i.NetworkSettings.Ports { for _, binding := range port { return binding.HostPort diff --git a/acceptance/os/variables.go b/acceptance/os/variables.go index db3d5a275d..dc41c690e3 100644 --- a/acceptance/os/variables.go +++ b/acceptance/os/variables.go @@ -1,5 +1,4 @@ //go:build acceptance && !windows -// +build acceptance,!windows package os diff --git a/acceptance/os/variables_darwin.go b/acceptance/os/variables_darwin.go index 85cea03964..2223ce60a7 100644 --- a/acceptance/os/variables_darwin.go +++ b/acceptance/os/variables_darwin.go @@ -1,5 +1,4 @@ //go:build acceptance && darwin && amd64 -// +build acceptance,darwin,amd64 package os diff --git a/acceptance/os/variables_darwin_arm64.go b/acceptance/os/variables_darwin_arm64.go index c7160dd05d..e3e6c0bc40 100644 --- a/acceptance/os/variables_darwin_arm64.go +++ b/acceptance/os/variables_darwin_arm64.go @@ -1,5 +1,4 @@ //go:build acceptance && darwin && arm64 -// +build acceptance,darwin,arm64 package os diff --git a/acceptance/os/variables_linux.go b/acceptance/os/variables_linux.go index 308a91ddf1..a2b8ad5f55 100644 --- a/acceptance/os/variables_linux.go +++ b/acceptance/os/variables_linux.go @@ -1,8 +1,7 @@ //go:build acceptance && linux -// +build acceptance,linux package os import "regexp" -var PackBinaryExp = regexp.MustCompile(`pack-v\d+.\d+.\d+-linux`) +var PackBinaryExp = regexp.MustCompile(`pack-v\d+.\d+.\d+-linux\.`) diff --git a/acceptance/os/variables_windows.go b/acceptance/os/variables_windows.go index 28079a957b..8b3f53b954 100644 --- a/acceptance/os/variables_windows.go +++ b/acceptance/os/variables_windows.go @@ -1,5 +1,4 @@ //go:build acceptance && windows -// +build acceptance,windows package os diff --git a/acceptance/suite_manager.go b/acceptance/suite_manager.go index a38bb43b4c..09a3e4b596 100644 --- a/acceptance/suite_manager.go +++ b/acceptance/suite_manager.go @@ -1,5 +1,4 @@ //go:build acceptance -// +build acceptance package acceptance diff --git a/acceptance/testdata/mock_buildpacks/0.2/descriptor-buildpack/bin/build b/acceptance/testdata/mock_buildpacks/descriptor-buildpack/bin/build similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/descriptor-buildpack/bin/build rename to acceptance/testdata/mock_buildpacks/descriptor-buildpack/bin/build diff --git a/acceptance/testdata/mock_buildpacks/0.2/descriptor-buildpack/bin/build.bat b/acceptance/testdata/mock_buildpacks/descriptor-buildpack/bin/build.bat similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/descriptor-buildpack/bin/build.bat rename to acceptance/testdata/mock_buildpacks/descriptor-buildpack/bin/build.bat diff --git a/acceptance/testdata/mock_buildpacks/0.2/descriptor-buildpack/bin/detect b/acceptance/testdata/mock_buildpacks/descriptor-buildpack/bin/detect similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/descriptor-buildpack/bin/detect rename to acceptance/testdata/mock_buildpacks/descriptor-buildpack/bin/detect diff --git a/acceptance/testdata/mock_buildpacks/0.2/descriptor-buildpack/bin/detect.bat b/acceptance/testdata/mock_buildpacks/descriptor-buildpack/bin/detect.bat similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/descriptor-buildpack/bin/detect.bat rename to acceptance/testdata/mock_buildpacks/descriptor-buildpack/bin/detect.bat diff --git a/acceptance/testdata/mock_buildpacks/0.2/descriptor-buildpack/buildpack.toml b/acceptance/testdata/mock_buildpacks/descriptor-buildpack/buildpack.toml similarity index 92% rename from acceptance/testdata/mock_buildpacks/0.2/descriptor-buildpack/buildpack.toml rename to acceptance/testdata/mock_buildpacks/descriptor-buildpack/buildpack.toml index d25a1e90e5..e60c1b61de 100644 --- a/acceptance/testdata/mock_buildpacks/0.2/descriptor-buildpack/buildpack.toml +++ b/acceptance/testdata/mock_buildpacks/descriptor-buildpack/buildpack.toml @@ -1,4 +1,4 @@ -api = "0.2" +api = "0.7" [buildpack] id = "descriptor/bp" diff --git a/acceptance/testdata/mock_buildpacks/0.2/internet-capable-buildpack/bin/build b/acceptance/testdata/mock_buildpacks/internet-capable-buildpack/bin/build similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/internet-capable-buildpack/bin/build rename to acceptance/testdata/mock_buildpacks/internet-capable-buildpack/bin/build diff --git a/acceptance/testdata/mock_buildpacks/0.2/internet-capable-buildpack/bin/build.bat b/acceptance/testdata/mock_buildpacks/internet-capable-buildpack/bin/build.bat similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/internet-capable-buildpack/bin/build.bat rename to acceptance/testdata/mock_buildpacks/internet-capable-buildpack/bin/build.bat diff --git a/acceptance/testdata/mock_buildpacks/0.2/internet-capable-buildpack/bin/detect b/acceptance/testdata/mock_buildpacks/internet-capable-buildpack/bin/detect similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/internet-capable-buildpack/bin/detect rename to acceptance/testdata/mock_buildpacks/internet-capable-buildpack/bin/detect diff --git a/acceptance/testdata/mock_buildpacks/0.2/internet-capable-buildpack/bin/detect.bat b/acceptance/testdata/mock_buildpacks/internet-capable-buildpack/bin/detect.bat similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/internet-capable-buildpack/bin/detect.bat rename to acceptance/testdata/mock_buildpacks/internet-capable-buildpack/bin/detect.bat diff --git a/acceptance/testdata/mock_buildpacks/0.2/internet-capable-buildpack/buildpack.toml b/acceptance/testdata/mock_buildpacks/internet-capable-buildpack/buildpack.toml similarity index 92% rename from acceptance/testdata/mock_buildpacks/0.2/internet-capable-buildpack/buildpack.toml rename to acceptance/testdata/mock_buildpacks/internet-capable-buildpack/buildpack.toml index f4a886a5a9..b3cd45efba 100644 --- a/acceptance/testdata/mock_buildpacks/0.2/internet-capable-buildpack/buildpack.toml +++ b/acceptance/testdata/mock_buildpacks/internet-capable-buildpack/buildpack.toml @@ -1,4 +1,4 @@ -api = "0.2" +api = "0.7" [buildpack] id = "internet/bp" diff --git a/acceptance/testdata/mock_buildpacks/meta-buildpack-dependency/bin/build b/acceptance/testdata/mock_buildpacks/meta-buildpack-dependency/bin/build new file mode 100755 index 0000000000..5cc97b99d9 --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/meta-buildpack-dependency/bin/build @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +echo "---> Build: Local Meta-Buildpack Dependency" diff --git a/acceptance/testdata/mock_buildpacks/meta-buildpack-dependency/bin/build.bat b/acceptance/testdata/mock_buildpacks/meta-buildpack-dependency/bin/build.bat new file mode 100644 index 0000000000..3d7df65295 --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/meta-buildpack-dependency/bin/build.bat @@ -0,0 +1,3 @@ +@echo off + +echo ---- Build: Local Meta-Buildpack Dependency diff --git a/acceptance/testdata/mock_buildpacks/0.2/not-in-builder-buildpack/bin/detect b/acceptance/testdata/mock_buildpacks/meta-buildpack-dependency/bin/detect similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/not-in-builder-buildpack/bin/detect rename to acceptance/testdata/mock_buildpacks/meta-buildpack-dependency/bin/detect diff --git a/acceptance/testdata/mock_buildpacks/0.2/noop-buildpack/bin/detect.bat b/acceptance/testdata/mock_buildpacks/meta-buildpack-dependency/bin/detect.bat similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/noop-buildpack/bin/detect.bat rename to acceptance/testdata/mock_buildpacks/meta-buildpack-dependency/bin/detect.bat diff --git a/acceptance/testdata/mock_buildpacks/meta-buildpack-dependency/buildpack.toml b/acceptance/testdata/mock_buildpacks/meta-buildpack-dependency/buildpack.toml new file mode 100644 index 0000000000..1792063062 --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/meta-buildpack-dependency/buildpack.toml @@ -0,0 +1,9 @@ +api = "0.7" + +[buildpack] + id = "local/meta-bp-dep" + version = "local-meta-bp-version" + name = "Local Meta-Buildpack Dependency" + +[[stacks]] + id = "pack.test.stack" \ No newline at end of file diff --git a/acceptance/testdata/mock_buildpacks/meta-buildpack/buildpack.toml b/acceptance/testdata/mock_buildpacks/meta-buildpack/buildpack.toml new file mode 100644 index 0000000000..3bf66937b8 --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/meta-buildpack/buildpack.toml @@ -0,0 +1,11 @@ +api = "0.7" + +[buildpack] + id = "local/meta-bp" + version = "local-meta-bp-version" + name = "Local Meta-Buildpack" + +[[order]] +[[order.group]] +id = "local/meta-bp-dep" +version = "local-meta-bp-version" \ No newline at end of file diff --git a/acceptance/testdata/mock_buildpacks/meta-buildpack/package.toml b/acceptance/testdata/mock_buildpacks/meta-buildpack/package.toml new file mode 100644 index 0000000000..ced1549d98 --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/meta-buildpack/package.toml @@ -0,0 +1,5 @@ +[buildpack] +uri = "." + +[[dependencies]] +uri = "../meta-buildpack-dependency" \ No newline at end of file diff --git a/acceptance/testdata/mock_buildpacks/multi-platform-buildpack/buildpack.toml b/acceptance/testdata/mock_buildpacks/multi-platform-buildpack/buildpack.toml new file mode 100644 index 0000000000..c3ec65adf2 --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/multi-platform-buildpack/buildpack.toml @@ -0,0 +1,21 @@ +api = "0.10" + +[buildpack] + id = "simple/layers" + version = "simple-layers-version" + name = "Simple Layers Buildpack" + +[[targets]] +os = "linux" +arch = "amd64" + +[[targets]] +os = "linux" +arch = "arm64" + +[[targets]] +os = "windows" +arch = "amd64" + +[[stacks]] + id = "*" diff --git a/acceptance/testdata/mock_buildpacks/0.2/noop-buildpack/bin/build b/acceptance/testdata/mock_buildpacks/multi-platform-buildpack/linux/amd64/bin/build similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/noop-buildpack/bin/build rename to acceptance/testdata/mock_buildpacks/multi-platform-buildpack/linux/amd64/bin/build diff --git a/acceptance/testdata/mock_buildpacks/0.2/noop-buildpack-2/bin/detect b/acceptance/testdata/mock_buildpacks/multi-platform-buildpack/linux/amd64/bin/detect similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/noop-buildpack-2/bin/detect rename to acceptance/testdata/mock_buildpacks/multi-platform-buildpack/linux/amd64/bin/detect diff --git a/acceptance/testdata/mock_buildpacks/0.2/noop-buildpack/bin/build.bat b/acceptance/testdata/mock_buildpacks/multi-platform-buildpack/windows/amd64/bin/build.bat similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/noop-buildpack/bin/build.bat rename to acceptance/testdata/mock_buildpacks/multi-platform-buildpack/windows/amd64/bin/build.bat diff --git a/acceptance/testdata/mock_buildpacks/0.2/not-in-builder-buildpack/bin/detect.bat b/acceptance/testdata/mock_buildpacks/multi-platform-buildpack/windows/amd64/bin/detect.bat similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/not-in-builder-buildpack/bin/detect.bat rename to acceptance/testdata/mock_buildpacks/multi-platform-buildpack/windows/amd64/bin/detect.bat diff --git a/acceptance/testdata/mock_buildpacks/0.2/nested-level-1-buildpack/buildpack.toml b/acceptance/testdata/mock_buildpacks/nested-level-1-buildpack/buildpack.toml similarity index 94% rename from acceptance/testdata/mock_buildpacks/0.2/nested-level-1-buildpack/buildpack.toml rename to acceptance/testdata/mock_buildpacks/nested-level-1-buildpack/buildpack.toml index 268e7f8cb4..a711b1eb1a 100644 --- a/acceptance/testdata/mock_buildpacks/0.2/nested-level-1-buildpack/buildpack.toml +++ b/acceptance/testdata/mock_buildpacks/nested-level-1-buildpack/buildpack.toml @@ -1,4 +1,4 @@ -api = "0.2" +api = "0.7" [buildpack] id = "simple/nested-level-1" diff --git a/acceptance/testdata/mock_buildpacks/0.2/nested-level-2-buildpack/buildpack.toml b/acceptance/testdata/mock_buildpacks/nested-level-2-buildpack/buildpack.toml similarity index 94% rename from acceptance/testdata/mock_buildpacks/0.2/nested-level-2-buildpack/buildpack.toml rename to acceptance/testdata/mock_buildpacks/nested-level-2-buildpack/buildpack.toml index 5e58244fd8..4531cb6be0 100644 --- a/acceptance/testdata/mock_buildpacks/0.2/nested-level-2-buildpack/buildpack.toml +++ b/acceptance/testdata/mock_buildpacks/nested-level-2-buildpack/buildpack.toml @@ -1,4 +1,4 @@ -api = "0.2" +api = "0.7" [buildpack] id = "simple/nested-level-2" diff --git a/acceptance/testdata/mock_buildpacks/0.2/noop-buildpack-2/bin/build b/acceptance/testdata/mock_buildpacks/noop-buildpack-2/bin/build similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/noop-buildpack-2/bin/build rename to acceptance/testdata/mock_buildpacks/noop-buildpack-2/bin/build diff --git a/acceptance/testdata/mock_buildpacks/0.2/noop-buildpack/bin/detect b/acceptance/testdata/mock_buildpacks/noop-buildpack-2/bin/detect similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/noop-buildpack/bin/detect rename to acceptance/testdata/mock_buildpacks/noop-buildpack-2/bin/detect diff --git a/acceptance/testdata/mock_buildpacks/0.2/noop-buildpack-2/buildpack.toml b/acceptance/testdata/mock_buildpacks/noop-buildpack-2/buildpack.toml similarity index 93% rename from acceptance/testdata/mock_buildpacks/0.2/noop-buildpack-2/buildpack.toml rename to acceptance/testdata/mock_buildpacks/noop-buildpack-2/buildpack.toml index 1fc577bbcc..1c0351887e 100644 --- a/acceptance/testdata/mock_buildpacks/0.2/noop-buildpack-2/buildpack.toml +++ b/acceptance/testdata/mock_buildpacks/noop-buildpack-2/buildpack.toml @@ -1,4 +1,4 @@ -api = "0.2" +api = "0.7" [buildpack] id = "noop.buildpack" diff --git a/acceptance/testdata/mock_buildpacks/noop-buildpack/bin/build b/acceptance/testdata/mock_buildpacks/noop-buildpack/bin/build new file mode 100755 index 0000000000..88c1b8049b --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/noop-buildpack/bin/build @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +echo "---> Build: NOOP Buildpack" diff --git a/acceptance/testdata/mock_buildpacks/noop-buildpack/bin/build.bat b/acceptance/testdata/mock_buildpacks/noop-buildpack/bin/build.bat new file mode 100644 index 0000000000..39731e4422 --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/noop-buildpack/bin/build.bat @@ -0,0 +1,3 @@ +@echo off + +echo ---- Build: NOOP Buildpack diff --git a/acceptance/testdata/mock_buildpacks/noop-buildpack/bin/detect b/acceptance/testdata/mock_buildpacks/noop-buildpack/bin/detect new file mode 100755 index 0000000000..d1813055aa --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/noop-buildpack/bin/detect @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +## always detect \ No newline at end of file diff --git a/acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack-different-sha/bin/detect.bat b/acceptance/testdata/mock_buildpacks/noop-buildpack/bin/detect.bat similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack-different-sha/bin/detect.bat rename to acceptance/testdata/mock_buildpacks/noop-buildpack/bin/detect.bat diff --git a/acceptance/testdata/mock_buildpacks/0.2/noop-buildpack/buildpack.toml b/acceptance/testdata/mock_buildpacks/noop-buildpack/buildpack.toml similarity index 91% rename from acceptance/testdata/mock_buildpacks/0.2/noop-buildpack/buildpack.toml rename to acceptance/testdata/mock_buildpacks/noop-buildpack/buildpack.toml index f3460a484e..9c6a587124 100644 --- a/acceptance/testdata/mock_buildpacks/0.2/noop-buildpack/buildpack.toml +++ b/acceptance/testdata/mock_buildpacks/noop-buildpack/buildpack.toml @@ -1,4 +1,4 @@ -api = "0.2" +api = "0.7" [buildpack] id = "noop.buildpack" diff --git a/acceptance/testdata/mock_buildpacks/0.2/not-in-builder-buildpack/bin/build b/acceptance/testdata/mock_buildpacks/not-in-builder-buildpack/bin/build similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/not-in-builder-buildpack/bin/build rename to acceptance/testdata/mock_buildpacks/not-in-builder-buildpack/bin/build diff --git a/acceptance/testdata/mock_buildpacks/0.2/not-in-builder-buildpack/bin/build.bat b/acceptance/testdata/mock_buildpacks/not-in-builder-buildpack/bin/build.bat similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/not-in-builder-buildpack/bin/build.bat rename to acceptance/testdata/mock_buildpacks/not-in-builder-buildpack/bin/build.bat diff --git a/acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack-different-sha/bin/detect b/acceptance/testdata/mock_buildpacks/not-in-builder-buildpack/bin/detect similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack-different-sha/bin/detect rename to acceptance/testdata/mock_buildpacks/not-in-builder-buildpack/bin/detect diff --git a/acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack/bin/detect.bat b/acceptance/testdata/mock_buildpacks/not-in-builder-buildpack/bin/detect.bat similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack/bin/detect.bat rename to acceptance/testdata/mock_buildpacks/not-in-builder-buildpack/bin/detect.bat diff --git a/acceptance/testdata/mock_buildpacks/0.2/not-in-builder-buildpack/buildpack.toml b/acceptance/testdata/mock_buildpacks/not-in-builder-buildpack/buildpack.toml similarity index 91% rename from acceptance/testdata/mock_buildpacks/0.2/not-in-builder-buildpack/buildpack.toml rename to acceptance/testdata/mock_buildpacks/not-in-builder-buildpack/buildpack.toml index c0f012d555..32ec32b5de 100644 --- a/acceptance/testdata/mock_buildpacks/0.2/not-in-builder-buildpack/buildpack.toml +++ b/acceptance/testdata/mock_buildpacks/not-in-builder-buildpack/buildpack.toml @@ -1,4 +1,4 @@ -api = "0.2" +api = "0.7" [buildpack] id = "local/bp" diff --git a/acceptance/testdata/mock_buildpacks/other-stack-buildpack/bin/build b/acceptance/testdata/mock_buildpacks/other-stack-buildpack/bin/build new file mode 100755 index 0000000000..64c190326e --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/other-stack-buildpack/bin/build @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +echo "---> Build: Other Stack Buildpack" diff --git a/acceptance/testdata/mock_buildpacks/other-stack-buildpack/bin/build.bat b/acceptance/testdata/mock_buildpacks/other-stack-buildpack/bin/build.bat new file mode 100644 index 0000000000..1b788214b7 --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/other-stack-buildpack/bin/build.bat @@ -0,0 +1,3 @@ +@echo off + +echo ---- Build: Other Stack Buildpack diff --git a/acceptance/testdata/mock_buildpacks/other-stack-buildpack/bin/detect b/acceptance/testdata/mock_buildpacks/other-stack-buildpack/bin/detect new file mode 100755 index 0000000000..d1813055aa --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/other-stack-buildpack/bin/detect @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +## always detect \ No newline at end of file diff --git a/acceptance/testdata/mock_buildpacks/other-stack-buildpack/bin/detect.bat b/acceptance/testdata/mock_buildpacks/other-stack-buildpack/bin/detect.bat new file mode 100644 index 0000000000..15823e73f1 --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/other-stack-buildpack/bin/detect.bat @@ -0,0 +1,2 @@ +@echo off +:: always detect diff --git a/acceptance/testdata/mock_buildpacks/0.2/other-stack-buildpack/buildpack.toml b/acceptance/testdata/mock_buildpacks/other-stack-buildpack/buildpack.toml similarity index 91% rename from acceptance/testdata/mock_buildpacks/0.2/other-stack-buildpack/buildpack.toml rename to acceptance/testdata/mock_buildpacks/other-stack-buildpack/buildpack.toml index 115ca9547b..5cf0da993d 100644 --- a/acceptance/testdata/mock_buildpacks/0.2/other-stack-buildpack/buildpack.toml +++ b/acceptance/testdata/mock_buildpacks/other-stack-buildpack/buildpack.toml @@ -1,4 +1,4 @@ -api = "0.2" +api = "0.7" [buildpack] id = "other/stack/bp" diff --git a/acceptance/testdata/mock_buildpacks/0.2/read-env-buildpack/bin/build b/acceptance/testdata/mock_buildpacks/read-env-buildpack/bin/build similarity index 77% rename from acceptance/testdata/mock_buildpacks/0.2/read-env-buildpack/bin/build rename to acceptance/testdata/mock_buildpacks/read-env-buildpack/bin/build index fc20479ce9..c5145bf2bd 100755 --- a/acceptance/testdata/mock_buildpacks/0.2/read-env-buildpack/bin/build +++ b/acceptance/testdata/mock_buildpacks/read-env-buildpack/bin/build @@ -16,7 +16,8 @@ if [[ -f "$platform_dir/env/ENV1_CONTENTS" ]]; then contents=$(cat "$platform_dir/env/ENV1_CONTENTS") echo "$contents" > "$launch_dir/env1-launch-layer/env1-launch-dep" ln -snf "$launch_dir/env1-launch-layer" env1-launch-deps - echo "launch = true" > "$launch_dir/env1-launch-layer.toml" + echo "[types]" > "$launch_dir/env1-launch-layer.toml" + echo "launch = true" >> "$launch_dir/env1-launch-layer.toml" fi ## makes a launch layer @@ -26,7 +27,8 @@ if [[ -f "$platform_dir/env/ENV2_CONTENTS" ]]; then contents=$(cat "$platform_dir/env/ENV2_CONTENTS") echo "$contents" > "$launch_dir/env2-launch-layer/env2-launch-dep" ln -snf "$launch_dir/env2-launch-layer" env2-launch-deps - echo "launch = true" > "$launch_dir/env2-launch-layer.toml" + echo "[types]" > "$launch_dir/env2-launch-layer.toml" + echo "launch = true" >> "$launch_dir/env2-launch-layer.toml" fi echo "---> Done" diff --git a/acceptance/testdata/mock_buildpacks/0.2/read-env-buildpack/bin/build.bat b/acceptance/testdata/mock_buildpacks/read-env-buildpack/bin/build.bat similarity index 76% rename from acceptance/testdata/mock_buildpacks/0.2/read-env-buildpack/bin/build.bat rename to acceptance/testdata/mock_buildpacks/read-env-buildpack/bin/build.bat index dd5e96d5cf..e505dc98ec 100644 --- a/acceptance/testdata/mock_buildpacks/0.2/read-env-buildpack/bin/build.bat +++ b/acceptance/testdata/mock_buildpacks/read-env-buildpack/bin/build.bat @@ -11,7 +11,8 @@ if exist %platform_dir%\env\ENV1_CONTENTS ( set /p contents=<%platform_dir%\env\ENV1_CONTENTS echo !contents!> %launch_dir%\env1-launch-layer\env1-launch-dep mklink /j env1-launch-deps %launch_dir%\env1-launch-layer - echo launch = true> %launch_dir%\env1-launch-layer.toml + echo [types] > %launch_dir%\env1-launch-layer.toml + echo launch = true >> %launch_dir%\env1-launch-layer.toml ) :: makes a launch layer @@ -21,7 +22,8 @@ if exist %platform_dir%\env\ENV2_CONTENTS ( set /p contents=<%platform_dir%\env\ENV2_CONTENTS echo !contents!> %launch_dir%\env2-launch-layer\env2-launch-dep mklink /j env2-launch-deps %launch_dir%\env2-launch-layer - echo launch = true> %launch_dir%\env2-launch-layer.toml + echo [types] > %launch_dir%\env2-launch-layer.toml + echo launch = true >> %launch_dir%\env2-launch-layer.toml ) echo --- Done diff --git a/acceptance/testdata/mock_buildpacks/0.2/read-env-buildpack/bin/detect b/acceptance/testdata/mock_buildpacks/read-env-buildpack/bin/detect similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/read-env-buildpack/bin/detect rename to acceptance/testdata/mock_buildpacks/read-env-buildpack/bin/detect diff --git a/acceptance/testdata/mock_buildpacks/0.2/read-env-buildpack/bin/detect.bat b/acceptance/testdata/mock_buildpacks/read-env-buildpack/bin/detect.bat similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/read-env-buildpack/bin/detect.bat rename to acceptance/testdata/mock_buildpacks/read-env-buildpack/bin/detect.bat diff --git a/acceptance/testdata/mock_buildpacks/0.2/read-env-buildpack/buildpack.toml b/acceptance/testdata/mock_buildpacks/read-env-buildpack/buildpack.toml similarity index 91% rename from acceptance/testdata/mock_buildpacks/0.2/read-env-buildpack/buildpack.toml rename to acceptance/testdata/mock_buildpacks/read-env-buildpack/buildpack.toml index 09a0ef2463..f6a1b5093f 100644 --- a/acceptance/testdata/mock_buildpacks/0.2/read-env-buildpack/buildpack.toml +++ b/acceptance/testdata/mock_buildpacks/read-env-buildpack/buildpack.toml @@ -1,4 +1,4 @@ -api = "0.2" +api = "0.7" [buildpack] id = "read/env" diff --git a/acceptance/testdata/mock_buildpacks/read-env-extension/bin/detect b/acceptance/testdata/mock_buildpacks/read-env-extension/bin/detect new file mode 100755 index 0000000000..855bb0631a --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/read-env-extension/bin/detect @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +echo "---> Detect: Read Env Extension" diff --git a/acceptance/testdata/mock_buildpacks/read-env-extension/bin/generate b/acceptance/testdata/mock_buildpacks/read-env-extension/bin/generate new file mode 100755 index 0000000000..76e4ea0056 --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/read-env-extension/bin/generate @@ -0,0 +1,40 @@ +#!/usr/bin/env bash + +echo "---> Generate: Read Env Extension" + +# 1. Get args +output_dir=$CNB_OUTPUT_DIR + +# 2. Generate build.Dockerfile +cat >> "${output_dir}/build.Dockerfile" <>"${output_dir}/run.Dockerfile" < /from-ext.txt + +ARG user_id +USER \${user_id} +EOL +fi + +if [[ -z "$EXT_RUN_SWITCH" ]]; then + echo "Skipping run image switch, not requested..." +else + echo "Generating run.Dockerfile for run image switch..." + cat >>"${output_dir}/run.Dockerfile" < "$launch_dir/launch-layer/launch-dep" ln -snf "$launch_dir/launch-layer" launch-deps -echo "launch = true" > "$launch_dir/launch-layer.toml" +echo "[types]" > "$launch_dir/launch-layer.toml" +echo "launch = true" >> "$launch_dir/launch-layer.toml" ## makes a cached launch layer if [[ ! -f "$launch_dir/cached-launch-layer.toml" ]]; then @@ -25,7 +26,8 @@ if [[ ! -f "$launch_dir/cached-launch-layer.toml" ]]; then mkdir "$launch_dir/cached-launch-layer" echo "Cached Dep Contents" > "$launch_dir/cached-launch-layer/cached-dep" ln -snf "$launch_dir/cached-launch-layer" cached-deps - echo "launch = true" > "$launch_dir/cached-launch-layer.toml" + echo "[types]" > "$launch_dir/cached-launch-layer.toml" + echo "launch = true" >> "$launch_dir/cached-launch-layer.toml" echo "cache = true" >> "$launch_dir/cached-launch-layer.toml" else echo "reusing cached launch layer" @@ -38,6 +40,7 @@ cat < "$launch_dir/launch.toml" type = "web" command = "./run" args = ["8080"] + default = true [[processes]] type = "hello" diff --git a/acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack-different-sha/bin/build.bat b/acceptance/testdata/mock_buildpacks/simple-layers-buildpack-different-sha/bin/build.bat similarity index 82% rename from acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack-different-sha/bin/build.bat rename to acceptance/testdata/mock_buildpacks/simple-layers-buildpack-different-sha/bin/build.bat index 01421d77fc..be3e8497dc 100644 --- a/acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack-different-sha/bin/build.bat +++ b/acceptance/testdata/mock_buildpacks/simple-layers-buildpack-different-sha/bin/build.bat @@ -8,7 +8,8 @@ echo making launch layer %launch_dir%\launch-layer mkdir %launch_dir%\launch-layer echo Launch Dep Contents > "%launch_dir%\launch-layer\launch-dep mklink /j launch-deps %launch_dir%\launch-layer -echo launch = true > %launch_dir%\launch-layer.toml +echo [types] > %launch_dir%\launch-layer.toml +echo launch = true >> %launch_dir%\launch-layer.toml :: makes a cached launch layer if not exist %launch_dir%\cached-launch-layer.toml ( @@ -16,7 +17,8 @@ if not exist %launch_dir%\cached-launch-layer.toml ( mkdir %launch_dir%\cached-launch-layer echo Cached Dep Contents > %launch_dir%\cached-launch-layer\cached-dep mklink /j cached-deps %launch_dir%\cached-launch-layer - echo launch = true > %launch_dir%\cached-launch-layer.toml + echo [types] > %launch_dir%\cached-launch-layer.toml + echo launch = true >> %launch_dir%\cached-launch-layer.toml echo cache = true >> %launch_dir%\cached-launch-layer.toml ) else ( echo reusing cached launch layer %launch_dir%\cached-launch-layer @@ -29,6 +31,7 @@ echo [[processes]] echo type = "web" echo command = '.\run' echo args = ["8080"] +echo default = true echo. echo [[processes]] echo type = "hello" diff --git a/acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack/bin/detect b/acceptance/testdata/mock_buildpacks/simple-layers-buildpack-different-sha/bin/detect similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack/bin/detect rename to acceptance/testdata/mock_buildpacks/simple-layers-buildpack-different-sha/bin/detect diff --git a/acceptance/testdata/mock_buildpacks/simple-layers-buildpack-different-sha/bin/detect.bat b/acceptance/testdata/mock_buildpacks/simple-layers-buildpack-different-sha/bin/detect.bat new file mode 100644 index 0000000000..15823e73f1 --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/simple-layers-buildpack-different-sha/bin/detect.bat @@ -0,0 +1,2 @@ +@echo off +:: always detect diff --git a/acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack-different-sha/bin/extra_file.txt b/acceptance/testdata/mock_buildpacks/simple-layers-buildpack-different-sha/bin/extra_file.txt similarity index 100% rename from acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack-different-sha/bin/extra_file.txt rename to acceptance/testdata/mock_buildpacks/simple-layers-buildpack-different-sha/bin/extra_file.txt diff --git a/acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack/buildpack.toml b/acceptance/testdata/mock_buildpacks/simple-layers-buildpack-different-sha/buildpack.toml similarity index 92% rename from acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack/buildpack.toml rename to acceptance/testdata/mock_buildpacks/simple-layers-buildpack-different-sha/buildpack.toml index 22b290e57e..b772b61ab8 100644 --- a/acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack/buildpack.toml +++ b/acceptance/testdata/mock_buildpacks/simple-layers-buildpack-different-sha/buildpack.toml @@ -1,4 +1,4 @@ -api = "0.2" +api = "0.7" [buildpack] id = "simple/layers" diff --git a/acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack/bin/build b/acceptance/testdata/mock_buildpacks/simple-layers-buildpack/bin/build similarity index 71% rename from acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack/bin/build rename to acceptance/testdata/mock_buildpacks/simple-layers-buildpack/bin/build index 01568be0f2..e2dce349b2 100755 --- a/acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack/bin/build +++ b/acceptance/testdata/mock_buildpacks/simple-layers-buildpack/bin/build @@ -17,7 +17,8 @@ echo "Color: Styled" mkdir "$launch_dir/launch-layer" echo "Launch Dep Contents" > "$launch_dir/launch-layer/launch-dep" ln -snf "$launch_dir/launch-layer" launch-deps -echo "launch = true" > "$launch_dir/launch-layer.toml" +echo "[types]" > "$launch_dir/launch-layer.toml" +echo "launch = true" >> "$launch_dir/launch-layer.toml" ## makes a cached launch layer if [[ ! -f "$launch_dir/cached-launch-layer.toml" ]]; then @@ -25,10 +26,14 @@ if [[ ! -f "$launch_dir/cached-launch-layer.toml" ]]; then mkdir "$launch_dir/cached-launch-layer" echo "Cached Dep Contents" > "$launch_dir/cached-launch-layer/cached-dep" ln -snf "$launch_dir/cached-launch-layer" cached-deps - echo "launch = true" > "$launch_dir/cached-launch-layer.toml" + echo "[types]" > "$launch_dir/cached-launch-layer.toml" + echo "launch = true" >> "$launch_dir/cached-launch-layer.toml" echo "cache = true" >> "$launch_dir/cached-launch-layer.toml" else echo "reusing cached launch layer" + echo "[types]" > "$launch_dir/cached-launch-layer.toml" + echo "launch = true" >> "$launch_dir/cached-launch-layer.toml" + echo "cache = true" >> "$launch_dir/cached-launch-layer.toml" ln -snf "$launch_dir/cached-launch-layer" cached-deps fi @@ -38,6 +43,7 @@ cat < "$launch_dir/launch.toml" type = "web" command = "./run" args = ["8080"] + default = true [[processes]] type = "hello" diff --git a/acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack/bin/build.bat b/acceptance/testdata/mock_buildpacks/simple-layers-buildpack/bin/build.bat similarity index 72% rename from acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack/bin/build.bat rename to acceptance/testdata/mock_buildpacks/simple-layers-buildpack/bin/build.bat index 2047dc714e..78fd79c8ff 100644 --- a/acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack/bin/build.bat +++ b/acceptance/testdata/mock_buildpacks/simple-layers-buildpack/bin/build.bat @@ -8,7 +8,8 @@ echo making launch layer %launch_dir%\launch-layer mkdir %launch_dir%\launch-layer echo Launch Dep Contents > "%launch_dir%\launch-layer\launch-dep mklink /j launch-deps %launch_dir%\launch-layer -echo launch = true > %launch_dir%\launch-layer.toml +echo [types] > %launch_dir%\launch-layer.toml +echo launch = true >> %launch_dir%\launch-layer.toml :: makes a cached launch layer if not exist %launch_dir%\cached-launch-layer.toml ( @@ -16,10 +17,14 @@ if not exist %launch_dir%\cached-launch-layer.toml ( mkdir %launch_dir%\cached-launch-layer echo Cached Dep Contents > %launch_dir%\cached-launch-layer\cached-dep mklink /j cached-deps %launch_dir%\cached-launch-layer - echo launch = true > %launch_dir%\cached-launch-layer.toml + echo [types] > %launch_dir%\cached-launch-layer.toml + echo launch = true >> %launch_dir%\cached-launch-layer.toml echo cache = true >> %launch_dir%\cached-launch-layer.toml ) else ( echo reusing cached launch layer %launch_dir%\cached-launch-layer + echo [types] > %launch_dir%\cached-launch-layer.toml + echo launch = true >> %launch_dir%\cached-launch-layer.toml + echo cache = true >> %launch_dir%\cached-launch-layer.toml mklink /j cached-deps %launch_dir%\cached-launch-layer ) @@ -29,6 +34,7 @@ echo [[processes]] echo type = "web" echo command = '.\run' echo args = ["8080"] +echo default = true echo. echo [[processes]] echo type = "hello" diff --git a/acceptance/testdata/mock_buildpacks/simple-layers-buildpack/bin/detect b/acceptance/testdata/mock_buildpacks/simple-layers-buildpack/bin/detect new file mode 100755 index 0000000000..e4cffa69d9 --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/simple-layers-buildpack/bin/detect @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +## always detect diff --git a/acceptance/testdata/mock_buildpacks/simple-layers-buildpack/bin/detect.bat b/acceptance/testdata/mock_buildpacks/simple-layers-buildpack/bin/detect.bat new file mode 100644 index 0000000000..15823e73f1 --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/simple-layers-buildpack/bin/detect.bat @@ -0,0 +1,2 @@ +@echo off +:: always detect diff --git a/acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack-different-sha/buildpack.toml b/acceptance/testdata/mock_buildpacks/simple-layers-buildpack/buildpack.toml similarity index 92% rename from acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack-different-sha/buildpack.toml rename to acceptance/testdata/mock_buildpacks/simple-layers-buildpack/buildpack.toml index 22b290e57e..b772b61ab8 100644 --- a/acceptance/testdata/mock_buildpacks/0.2/simple-layers-buildpack-different-sha/buildpack.toml +++ b/acceptance/testdata/mock_buildpacks/simple-layers-buildpack/buildpack.toml @@ -1,4 +1,4 @@ -api = "0.2" +api = "0.7" [buildpack] id = "simple/layers" diff --git a/acceptance/testdata/mock_buildpacks/simple-layers-extension/bin/detect b/acceptance/testdata/mock_buildpacks/simple-layers-extension/bin/detect new file mode 100755 index 0000000000..7964c98bda --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/simple-layers-extension/bin/detect @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +echo "---> Detect: Simple Layers Extension" diff --git a/acceptance/testdata/mock_buildpacks/simple-layers-extension/bin/generate b/acceptance/testdata/mock_buildpacks/simple-layers-extension/bin/generate new file mode 100755 index 0000000000..2a6841888c --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/simple-layers-extension/bin/generate @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +echo "---> Generate: Simple Layers Extension" diff --git a/acceptance/testdata/mock_buildpacks/simple-layers-extension/extension.toml b/acceptance/testdata/mock_buildpacks/simple-layers-extension/extension.toml new file mode 100644 index 0000000000..9b780cd5db --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/simple-layers-extension/extension.toml @@ -0,0 +1,6 @@ +api = "0.7" + +[extension] + id = "simple/layers" + version = "simple-layers-version" + name = "Simple Layers Extension" diff --git a/acceptance/testdata/mock_buildpacks/0.2/simple-layers-parent-buildpack/buildpack.toml b/acceptance/testdata/mock_buildpacks/simple-layers-parent-buildpack/buildpack.toml similarity index 94% rename from acceptance/testdata/mock_buildpacks/0.2/simple-layers-parent-buildpack/buildpack.toml rename to acceptance/testdata/mock_buildpacks/simple-layers-parent-buildpack/buildpack.toml index 46f3a39ea9..a92d2c757e 100644 --- a/acceptance/testdata/mock_buildpacks/0.2/simple-layers-parent-buildpack/buildpack.toml +++ b/acceptance/testdata/mock_buildpacks/simple-layers-parent-buildpack/buildpack.toml @@ -1,4 +1,4 @@ -api = "0.2" +api = "0.7" [buildpack] id = "simple/layers/parent" diff --git a/acceptance/testdata/mock_buildpacks/system-fail-detect/bin/build b/acceptance/testdata/mock_buildpacks/system-fail-detect/bin/build new file mode 100755 index 0000000000..9e3da0d6b9 --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/system-fail-detect/bin/build @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +echo "---> BUILD: System Fail Detect buildpack (should never run)" + +# This should never be reached +exit 1 \ No newline at end of file diff --git a/acceptance/testdata/mock_buildpacks/system-fail-detect/bin/detect b/acceptance/testdata/mock_buildpacks/system-fail-detect/bin/detect new file mode 100755 index 0000000000..d2e6cd562d --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/system-fail-detect/bin/detect @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +echo "---> DETECT: System Fail Detect buildpack (will fail)" + +# Always fail detection +exit 1 \ No newline at end of file diff --git a/acceptance/testdata/mock_buildpacks/system-fail-detect/buildpack.toml b/acceptance/testdata/mock_buildpacks/system-fail-detect/buildpack.toml new file mode 100644 index 0000000000..551699c515 --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/system-fail-detect/buildpack.toml @@ -0,0 +1,9 @@ +api = "0.7" + +[buildpack] + id = "system/fail-detect" + version = "system-fail-detect-version" + name = "System Fail Detect Buildpack" + +[[stacks]] + id = "pack.test.stack" \ No newline at end of file diff --git a/acceptance/testdata/mock_buildpacks/system-post-buildpack/bin/build b/acceptance/testdata/mock_buildpacks/system-post-buildpack/bin/build new file mode 100755 index 0000000000..b654732656 --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/system-post-buildpack/bin/build @@ -0,0 +1,20 @@ +#!/usr/bin/env bash + +echo "---> BUILD: System Post buildpack" + +set -o errexit +set -o pipefail + +layers_dir=$1 +platform_dir=$2 + +# Create a layer to verify it ran +mkdir -p "${layers_dir}/system-post" +cat > "${layers_dir}/system-post.toml" < "${layers_dir}/system-post/marker" + +exit 0 \ No newline at end of file diff --git a/acceptance/testdata/mock_buildpacks/system-post-buildpack/bin/detect b/acceptance/testdata/mock_buildpacks/system-post-buildpack/bin/detect new file mode 100755 index 0000000000..238c845c77 --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/system-post-buildpack/bin/detect @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +echo "---> DETECT: System Post buildpack" + +# Always pass detection for testing +exit 0 \ No newline at end of file diff --git a/acceptance/testdata/mock_buildpacks/system-post-buildpack/buildpack.toml b/acceptance/testdata/mock_buildpacks/system-post-buildpack/buildpack.toml new file mode 100644 index 0000000000..54c3bfc8f3 --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/system-post-buildpack/buildpack.toml @@ -0,0 +1,9 @@ +api = "0.7" + +[buildpack] + id = "system/post" + version = "system-post-version" + name = "System Post Buildpack" + +[[stacks]] + id = "pack.test.stack" \ No newline at end of file diff --git a/acceptance/testdata/mock_buildpacks/system-pre-buildpack/bin/build b/acceptance/testdata/mock_buildpacks/system-pre-buildpack/bin/build new file mode 100755 index 0000000000..ab3cfd0fa9 --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/system-pre-buildpack/bin/build @@ -0,0 +1,20 @@ +#!/usr/bin/env bash + +echo "---> BUILD: System Pre buildpack" + +set -o errexit +set -o pipefail + +layers_dir=$1 +platform_dir=$2 + +# Create a layer to verify it ran +mkdir -p "${layers_dir}/system-pre" +cat > "${layers_dir}/system-pre.toml" < "${layers_dir}/system-pre/marker" + +exit 0 \ No newline at end of file diff --git a/acceptance/testdata/mock_buildpacks/system-pre-buildpack/bin/detect b/acceptance/testdata/mock_buildpacks/system-pre-buildpack/bin/detect new file mode 100755 index 0000000000..c3b990d3a2 --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/system-pre-buildpack/bin/detect @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +echo "---> DETECT: System Pre buildpack" + +# Always pass detection for testing +exit 0 \ No newline at end of file diff --git a/acceptance/testdata/mock_buildpacks/system-pre-buildpack/buildpack.toml b/acceptance/testdata/mock_buildpacks/system-pre-buildpack/buildpack.toml new file mode 100644 index 0000000000..d54441155d --- /dev/null +++ b/acceptance/testdata/mock_buildpacks/system-pre-buildpack/buildpack.toml @@ -0,0 +1,9 @@ +api = "0.7" + +[buildpack] + id = "system/pre" + version = "system-pre-version" + name = "System Pre Buildpack" + +[[stacks]] + id = "pack.test.stack" \ No newline at end of file diff --git a/acceptance/testdata/mock_stack/windows/run/server.go b/acceptance/testdata/mock_stack/windows/run/server.go index 4eb1a8b5fc..36e54393f9 100644 --- a/acceptance/testdata/mock_stack/windows/run/server.go +++ b/acceptance/testdata/mock_stack/windows/run/server.go @@ -6,9 +6,9 @@ package main import ( "flag" - "io/ioutil" "log" "net/http" + "os" "path/filepath" "strings" ) @@ -28,7 +28,7 @@ func main() { } for _, path := range paths { - contents, err := ioutil.ReadFile(filepath.Clean(path)) + contents, err := os.ReadFile(filepath.Clean(path)) if err != nil { panic(err.Error()) } diff --git a/acceptance/testdata/pack_fixtures/builder_extensions.toml b/acceptance/testdata/pack_fixtures/builder_extensions.toml new file mode 100644 index 0000000000..42d967af76 --- /dev/null +++ b/acceptance/testdata/pack_fixtures/builder_extensions.toml @@ -0,0 +1,53 @@ +[[buildpacks]] + id = "read/env" + version = "read-env-version" + uri = "read-env-buildpack.tgz" + +[[buildpacks]] + id = "simple/layers" + version = "simple-layers-version" + uri = "simple-layers-buildpack" + +[[extensions]] + id = "read/env" + version = "read-env-version" + uri = "read-env-extension.tgz" + +[[extensions]] + id = "simple/layers" + version = "simple-layers-version" + uri = "simple-layers-extension" + +[[order]] +[[order.group]] + id = "read/env" + version = "read-env-version" + optional = true + +[[order.group]] + id = "simple/layers" + version = "simple-layers-version" + optional = true + +[[order-extensions]] +[[order-extensions.group]] + id = "read/env" + version = "read-env-version" + +[[order-extensions.group]] + id = "simple/layers" + version = "simple-layers-version" + +[stack] + id = "pack.test.stack" + build-image = "pack-test/build" + run-image = "pack-test/run" + run-image-mirrors = ["{{.run_image_mirror}}"] + +[lifecycle] +{{- if .lifecycle_uri}} + uri = "{{.lifecycle_uri}}" +{{- end}} +{{- if .lifecycle_version}} + version = "{{.lifecycle_version}}" +{{- end}} diff --git a/acceptance/testdata/pack_fixtures/builder_multi_platform-no-targets.toml b/acceptance/testdata/pack_fixtures/builder_multi_platform-no-targets.toml new file mode 100644 index 0000000000..8e6998332d --- /dev/null +++ b/acceptance/testdata/pack_fixtures/builder_multi_platform-no-targets.toml @@ -0,0 +1,19 @@ +[[buildpacks]] +id = "simple/layers" +version = "simple-layers-version" +uri = "{{ .BuildpackURI }}" + +[[order]] +[[order.group]] +id = "simple/layers" +version = "simple-layers-version" + +[build] +image = "{{ .BuildImage }}" + +[run] +[[run.images]] +image = "{{ .RunImage }}" + + + diff --git a/acceptance/testdata/pack_fixtures/builder_multi_platform.toml b/acceptance/testdata/pack_fixtures/builder_multi_platform.toml new file mode 100644 index 0000000000..cc95496e30 --- /dev/null +++ b/acceptance/testdata/pack_fixtures/builder_multi_platform.toml @@ -0,0 +1,28 @@ +[[buildpacks]] +id = "simple/layers" +version = "simple-layers-version" +uri = "{{ .BuildpackURI }}" + +[[order]] +[[order.group]] +id = "simple/layers" +version = "simple-layers-version" + +# Targets the buildpack will work with +[[targets]] +os = "linux" +arch = "amd64" + +[[targets]] +os = "windows" +arch = "amd64" + +[build] +image = "{{ .BuildImage }}" + +[run] +[[run.images]] +image = "{{ .RunImage }}" + + + diff --git a/acceptance/testdata/pack_fixtures/builder_with_failing_system_buildpack.toml b/acceptance/testdata/pack_fixtures/builder_with_failing_system_buildpack.toml new file mode 100644 index 0000000000..054975e7f4 --- /dev/null +++ b/acceptance/testdata/pack_fixtures/builder_with_failing_system_buildpack.toml @@ -0,0 +1,33 @@ +[[buildpacks]] + id = "simple-layers-buildpack" + uri = "file://{{.Fixtures}}/simple-layers-buildpack" + +[[buildpacks]] + id = "system/fail-detect" + uri = "file://{{.Fixtures}}/system-fail-detect" + +[[buildpacks]] + id = "system/post" + uri = "file://{{.Fixtures}}/system-post-buildpack" + +# System buildpacks configuration +[system] +[system.pre] +buildpacks = [ + { id = "system/fail-detect", version = "system-fail-detect-version", optional = false } +] + +[system.post] +buildpacks = [ + { id = "system/post", version = "system-post-version", optional = true } +] + +[[order]] +[[order.group]] + id = "simple-layers-buildpack" + version = "simple-layers-buildpack-version" + +[stack] + id = "pack.test.stack" + build-image = "pack-test/build" + run-image = "pack-test/run" \ No newline at end of file diff --git a/acceptance/testdata/pack_fixtures/builder_with_optional_failing_system_buildpack.toml b/acceptance/testdata/pack_fixtures/builder_with_optional_failing_system_buildpack.toml new file mode 100644 index 0000000000..ff9a5a2363 --- /dev/null +++ b/acceptance/testdata/pack_fixtures/builder_with_optional_failing_system_buildpack.toml @@ -0,0 +1,29 @@ +[[buildpacks]] + id = "simple-layers-buildpack" + uri = "file://{{.Fixtures}}/simple-layers-buildpack" + +[[buildpacks]] + id = "system/fail-detect" + uri = "file://{{.Fixtures}}/system-fail-detect" + +[[buildpacks]] + id = "system/pre" + uri = "file://{{.Fixtures}}/system-pre-buildpack" + +# System buildpacks configuration +[system] +[system.pre] +buildpacks = [ + { id = "system/fail-detect", version = "system-fail-detect-version", optional = true }, + { id = "system/pre", version = "system-pre-version", optional = false } +] + +[[order]] +[[order.group]] + id = "simple-layers-buildpack" + version = "simple-layers-buildpack-version" + +[stack] + id = "pack.test.stack" + build-image = "pack-test/build" + run-image = "pack-test/run" \ No newline at end of file diff --git a/acceptance/testdata/pack_fixtures/builder_with_system_buildpacks.toml b/acceptance/testdata/pack_fixtures/builder_with_system_buildpacks.toml new file mode 100644 index 0000000000..c41bf67fa6 --- /dev/null +++ b/acceptance/testdata/pack_fixtures/builder_with_system_buildpacks.toml @@ -0,0 +1,33 @@ +[[buildpacks]] + id = "simple-layers-buildpack" + uri = "file://{{.Fixtures}}/simple-layers-buildpack" + +[[buildpacks]] + id = "system/pre" + uri = "file://{{.Fixtures}}/system-pre-buildpack" + +[[buildpacks]] + id = "system/post" + uri = "file://{{.Fixtures}}/system-post-buildpack" + +# System buildpacks configuration +[system] +[system.pre] +buildpacks = [ + { id = "system/pre", version = "system-pre-version", optional = false } +] + +[system.post] +buildpacks = [ + { id = "system/post", version = "system-post-version", optional = true } +] + +[[order]] +[[order.group]] + id = "simple-layers-buildpack" + version = "simple-layers-buildpack-version" + +[stack] + id = "pack.test.stack" + build-image = "pack-test/build" + run-image = "pack-test/run" \ No newline at end of file diff --git a/acceptance/testdata/pack_fixtures/inspect_image_local_output.json b/acceptance/testdata/pack_fixtures/inspect_image_local_output.json index 381fb8438d..154c1ca122 100644 --- a/acceptance/testdata/pack_fixtures/inspect_image_local_output.json +++ b/acceptance/testdata/pack_fixtures/inspect_image_local_output.json @@ -3,6 +3,7 @@ "remote_info": null, "local_info": { "stack": "pack.test.stack", + "rebasable": {{.rebasable}}, "base_image": { "top_layer": "{{.base_image_top_layer}}", "reference": "{{.base_image_id}}" @@ -25,6 +26,7 @@ "version": "simple-layers-version" } ], + "extensions": null, "processes": [ { "type": "web", @@ -33,7 +35,8 @@ "default": true, "args": [ "8080" - ] + ], + "working-dir": "{{ ( StringsEscapeBackslash .image_workdir ) }}" }, { "type": "hello", @@ -42,8 +45,9 @@ "default": false, "args": [ {{ ( StringsJoin (StringsDoubleQuote .hello_args) "," ) }} - ] + ], + "working-dir": "{{ ( StringsEscapeBackslash .image_workdir ) }}" } ] } -} \ No newline at end of file +} diff --git a/acceptance/testdata/pack_fixtures/inspect_image_local_output.toml b/acceptance/testdata/pack_fixtures/inspect_image_local_output.toml index 24e19a76fa..6798c3115e 100644 --- a/acceptance/testdata/pack_fixtures/inspect_image_local_output.toml +++ b/acceptance/testdata/pack_fixtures/inspect_image_local_output.toml @@ -2,6 +2,7 @@ image_name = "{{.image_name}}" [local_info] stack = "pack.test.stack" +rebasable = {{.rebasable}} [local_info.base_image] top_layer = "{{.base_image_top_layer}}" @@ -27,6 +28,7 @@ stack = "pack.test.stack" command = "{{ ( StringsEscapeBackslash .web_command ) }}" default = true args = [ "8080" ] + working-dir = "{{ ( StringsEscapeBackslash .image_workdir ) }}" [[local_info.processes]] type = "hello" @@ -34,3 +36,4 @@ stack = "pack.test.stack" command = "{{.hello_command}}" default = false args = [ {{ ( StringsJoin (StringsDoubleQuote .hello_args) ",") }} ] + working-dir = "{{ ( StringsEscapeBackslash .image_workdir ) }}" diff --git a/acceptance/testdata/pack_fixtures/inspect_image_local_output.txt b/acceptance/testdata/pack_fixtures/inspect_image_local_output.txt deleted file mode 100644 index 5f5794ba40..0000000000 --- a/acceptance/testdata/pack_fixtures/inspect_image_local_output.txt +++ /dev/null @@ -1,26 +0,0 @@ -Inspecting image: '{{.image_name}}' - -REMOTE: -(not present) - -LOCAL: - -Stack: pack.test.stack - -Base Image: - Reference: {{.base_image_id}} - Top Layer: {{.base_image_top_layer}} - -Run Images: - {{.run_image_local_mirror}} (user-configured) - pack-test/run - {{.run_image_mirror}} - -Buildpacks: - ID VERSION HOMEPAGE - simple/layers simple-layers-version - - -Processes: - TYPE SHELL COMMAND ARGS - web (default) bash {{.web_command}} 8080 - hello {{.hello_command}} {{ .hello_args_prefix }}{{ StringsJoin .hello_args " "}} diff --git a/acceptance/testdata/pack_fixtures/inspect_image_local_output.yaml b/acceptance/testdata/pack_fixtures/inspect_image_local_output.yaml index b9ffb7b9bd..11bf97dca9 100644 --- a/acceptance/testdata/pack_fixtures/inspect_image_local_output.yaml +++ b/acceptance/testdata/pack_fixtures/inspect_image_local_output.yaml @@ -3,6 +3,7 @@ image_name: "{{.image_name}}" remote_info: local_info: stack: pack.test.stack + rebasable: {{.rebasable}} base_image: top_layer: "{{.base_image_top_layer}}" reference: "{{.base_image_id}}" @@ -14,6 +15,7 @@ local_info: buildpacks: - id: simple/layers version: simple-layers-version + extensions: [] processes: - type: web shell: bash @@ -21,8 +23,10 @@ local_info: default: true args: - '8080' + working-dir: "{{ ( StringsEscapeBackslash .image_workdir ) }}" - type: hello shell: '' command: "{{.hello_command}}" default: false - args: [ {{ ( StringsJoin (StringsDoubleQuote .hello_args) ",") }} ] \ No newline at end of file + args: [ {{ ( StringsJoin (StringsDoubleQuote .hello_args) ",") }} ] + working-dir: "{{ ( StringsEscapeBackslash .image_workdir ) }}" diff --git a/acceptance/testdata/pack_fixtures/inspect_image_published_output.json b/acceptance/testdata/pack_fixtures/inspect_image_published_output.json index e829870086..f03d82df9e 100644 --- a/acceptance/testdata/pack_fixtures/inspect_image_published_output.json +++ b/acceptance/testdata/pack_fixtures/inspect_image_published_output.json @@ -3,6 +3,7 @@ "local_info": null, "remote_info": { "stack": "pack.test.stack", + "rebasable": {{.rebasable}}, "base_image": { "top_layer": "{{.base_image_top_layer}}", "reference": "{{.base_image_ref}}" @@ -21,6 +22,7 @@ "version": "simple-layers-version" } ], + "extensions": null, "processes": [ { "type": "web", @@ -29,7 +31,8 @@ "default": true, "args": [ "8080" - ] + ], + "working-dir": "{{ ( StringsEscapeBackslash .image_workdir ) }}" }, { "type": "hello", @@ -38,8 +41,9 @@ "default": false, "args": [ {{ ( StringsJoin (StringsDoubleQuote .hello_args) "," ) }} - ] + ], + "working-dir": "{{ ( StringsEscapeBackslash .image_workdir ) }}" } ] } -} \ No newline at end of file +} diff --git a/acceptance/testdata/pack_fixtures/inspect_image_published_output.toml b/acceptance/testdata/pack_fixtures/inspect_image_published_output.toml index 4502400a0f..a216f51254 100644 --- a/acceptance/testdata/pack_fixtures/inspect_image_published_output.toml +++ b/acceptance/testdata/pack_fixtures/inspect_image_published_output.toml @@ -2,6 +2,7 @@ image_name = "{{.image_name}}" [remote_info] stack = "pack.test.stack" +rebasable = {{.rebasable}} [remote_info.base_image] top_layer = "{{.base_image_top_layer}}" @@ -23,6 +24,7 @@ stack = "pack.test.stack" command = "{{( StringsEscapeBackslash .web_command )}}" default = true args = [ "8080" ] + working-dir = "{{ ( StringsEscapeBackslash .image_workdir ) }}" [[remote_info.processes]] type = "hello" @@ -30,4 +32,5 @@ stack = "pack.test.stack" command = "{{.hello_command}}" default = false args = [ {{ ( StringsJoin (StringsDoubleQuote .hello_args) "," ) }} ] + working-dir = "{{ ( StringsEscapeBackslash .image_workdir ) }}" diff --git a/acceptance/testdata/pack_fixtures/inspect_image_published_output.txt b/acceptance/testdata/pack_fixtures/inspect_image_published_output.txt deleted file mode 100644 index f1f411e924..0000000000 --- a/acceptance/testdata/pack_fixtures/inspect_image_published_output.txt +++ /dev/null @@ -1,25 +0,0 @@ -Inspecting image: '{{.image_name}}' - -REMOTE: - -Stack: pack.test.stack - -Base Image: - Reference: {{.base_image_ref}} - Top Layer: {{.base_image_top_layer}} - -Run Images: - pack-test/run - {{.run_image_mirror}} - -Buildpacks: - ID VERSION HOMEPAGE - simple/layers simple-layers-version - - -Processes: - TYPE SHELL COMMAND ARGS - web (default) bash {{.web_command}} 8080 - hello {{.hello_command}} {{ .hello_args_prefix }}{{ StringsJoin .hello_args " "}} - -LOCAL: -(not present) diff --git a/acceptance/testdata/pack_fixtures/inspect_image_published_output.yaml b/acceptance/testdata/pack_fixtures/inspect_image_published_output.yaml index a2b6f79e48..b9b745bc2b 100644 --- a/acceptance/testdata/pack_fixtures/inspect_image_published_output.yaml +++ b/acceptance/testdata/pack_fixtures/inspect_image_published_output.yaml @@ -3,6 +3,7 @@ image_name: "{{.image_name}}" local_info: null remote_info: stack: pack.test.stack + rebasable: {{.rebasable}} base_image: top_layer: "{{.base_image_top_layer}}" reference: "{{.base_image_ref}}" @@ -12,6 +13,7 @@ remote_info: buildpacks: - id: simple/layers version: simple-layers-version + extensions: [] processes: - type: web shell: bash @@ -19,8 +21,10 @@ remote_info: default: true args: - '8080' + working-dir: "{{ ( StringsEscapeBackslash .image_workdir ) }}" - type: hello shell: '' command: "{{.hello_command}}" default: false args: [ {{ ( StringsJoin (StringsDoubleQuote .hello_args) "," ) }} ] + working-dir: "{{ ( StringsEscapeBackslash .image_workdir ) }}" diff --git a/acceptance/testdata/pack_fixtures/package_aggregate.toml b/acceptance/testdata/pack_fixtures/package_aggregate.toml index e6a5e2c980..0f8cc5d93a 100644 --- a/acceptance/testdata/pack_fixtures/package_aggregate.toml +++ b/acceptance/testdata/pack_fixtures/package_aggregate.toml @@ -5,4 +5,4 @@ uri = "{{ .BuildpackURI }}" image = "{{ .PackageName }}" [platform] -os = "{{ .OS }}" \ No newline at end of file +os = "{{ .OS }}" diff --git a/acceptance/testdata/pack_fixtures/package_multi_platform.toml b/acceptance/testdata/pack_fixtures/package_multi_platform.toml new file mode 100644 index 0000000000..02b4ff47a6 --- /dev/null +++ b/acceptance/testdata/pack_fixtures/package_multi_platform.toml @@ -0,0 +1,5 @@ +[buildpack] +uri = "{{ .BuildpackURI }}" + +[[dependencies]] +uri = "{{ .PackageName }}" diff --git a/acceptance/testdata/pack_fixtures/report_output.txt b/acceptance/testdata/pack_fixtures/report_output.txt index 0bd3d10121..a585e5d7d8 100644 --- a/acceptance/testdata/pack_fixtures/report_output.txt +++ b/acceptance/testdata/pack_fixtures/report_output.txt @@ -2,10 +2,11 @@ Pack: Version: {{ .Version }} OS/Arch: {{ .OS }}/{{ .Arch }} -Default Lifecycle Version: 0.13.3 +Default Lifecycle Version: 0.21.0 -Supported Platform APIs: 0.3, 0.4, 0.5, 0.6, 0.7, 0.8 +Supported Platform APIs: 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.10, 0.11, 0.12, 0.13, 0.14, 0.15 Config: default-builder-image = "{{ .DefaultBuilder }}" experimental = true + layout-repo-dir = "{{ .LayoutRepoDir }}" diff --git a/acceptance/testdata/pack_previous_fixtures_overrides/builder.toml b/acceptance/testdata/pack_previous_fixtures_overrides/builder.toml deleted file mode 100644 index afd2e7d036..0000000000 --- a/acceptance/testdata/pack_previous_fixtures_overrides/builder.toml +++ /dev/null @@ -1,43 +0,0 @@ -[[buildpacks]] - id = "read/env" - version = "read-env-version" - uri = "read-env-buildpack.tgz" - -[[buildpacks]] - # intentionally missing id/version as they are optional - uri = "noop-buildpack.tgz" - -[[buildpacks]] - # noop-buildpack-2 has the same id but a different version compared to noop-buildpack - uri = "noop-buildpack-2.tgz" - -{{- if .package_image_name}} -[[buildpacks]] - image = "{{.package_image_name}}" -{{- end}} - -[[order]] -{{- if .package_id}} -[[order.group]] - id = "{{.package_id}}" - # intentionlly missing version to test support -{{- end}} - -[[order.group]] - id = "read/env" - version = "read-env-version" - optional = true - -[stack] - id = "pack.test.stack" - build-image = "pack-test/build" - run-image = "pack-test/run" - run-image-mirrors = ["{{.run_image_mirror}}"] - -[lifecycle] -{{- if .lifecycle_uri}} - uri = "{{.lifecycle_uri}}" -{{- end}} -{{- if .lifecycle_version}} - version = "{{.lifecycle_version}}" -{{- end}} diff --git a/acceptance/testdata/pack_previous_fixtures_overrides/inspect_X.Y.Z_builder_output.txt b/acceptance/testdata/pack_previous_fixtures_overrides/inspect_X.Y.Z_builder_output.txt deleted file mode 100644 index fce877e4b9..0000000000 --- a/acceptance/testdata/pack_previous_fixtures_overrides/inspect_X.Y.Z_builder_output.txt +++ /dev/null @@ -1 +0,0 @@ -Files like these represent the expected output of calling `pack inspect-builder` on a builder created with pack vX.Y.Z diff --git a/acceptance/testdata/pack_previous_fixtures_overrides/inspect_builder_nested_depth_2_output.txt b/acceptance/testdata/pack_previous_fixtures_overrides/inspect_builder_nested_depth_2_output.txt deleted file mode 100644 index 32de2b333e..0000000000 --- a/acceptance/testdata/pack_previous_fixtures_overrides/inspect_builder_nested_depth_2_output.txt +++ /dev/null @@ -1,93 +0,0 @@ -Inspecting builder: '{{.builder_name}}' - -REMOTE: - -Created By: - Name: Pack CLI - Version: {{.pack_version}} - -Trusted: {{.trusted}} - -Stack: - ID: pack.test.stack - Mixins: - mixinA - netcat - mixin3 - build:mixinTwo - -Lifecycle: - Version: {{.lifecycle_version}} - Buildpack APIs: - Deprecated: {{ .deprecated_buildpack_apis }} - Supported: {{ .supported_buildpack_apis }} - Platform APIs: - Deprecated: {{ .deprecated_platform_apis }} - Supported: {{ .supported_platform_apis }} - -Run Images: - some-registry.com/pack-test/run1 (user-configured) - pack-test/run - {{.run_image_mirror}} - -Buildpacks: - ID NAME VERSION HOMEPAGE - noop.buildpack NOOP Buildpack noop.buildpack.later-version http://geocities.com/cool-bp - noop.buildpack NOOP Buildpack noop.buildpack.version - - read/env Read Env Buildpack read-env-version - - simple/layers Simple Layers Buildpack simple-layers-version - - simple/nested-level-1 Nested Level One Buildpack nested-l1-version - - simple/nested-level-2 Nested Level Two Buildpack nested-l2-version - - -Detection Order: - └ Group #1: - ├ simple/nested-level-1 - │ └ Group #1: - │ └ simple/nested-level-2@nested-l2-version - └ read/env@read-env-version (optional) - -LOCAL: - -Created By: - Name: Pack CLI - Version: {{.pack_version}} - -Trusted: {{.trusted}} - -Stack: - ID: pack.test.stack - Mixins: - mixinA - netcat - mixin3 - build:mixinTwo - -Lifecycle: - Version: {{.lifecycle_version}} - Buildpack APIs: - Deprecated: {{ .deprecated_buildpack_apis }} - Supported: {{ .supported_buildpack_apis }} - Platform APIs: - Deprecated: {{ .deprecated_platform_apis }} - Supported: {{ .supported_platform_apis }} - -Run Images: - some-registry.com/pack-test/run1 (user-configured) - pack-test/run - {{.run_image_mirror}} - -Buildpacks: - ID NAME VERSION HOMEPAGE - noop.buildpack NOOP Buildpack noop.buildpack.later-version http://geocities.com/cool-bp - noop.buildpack NOOP Buildpack noop.buildpack.version - - read/env Read Env Buildpack read-env-version - - simple/layers Simple Layers Buildpack simple-layers-version - - simple/nested-level-1 Nested Level One Buildpack nested-l1-version - - simple/nested-level-2 Nested Level Two Buildpack nested-l2-version - - -Detection Order: - └ Group #1: - ├ simple/nested-level-1 - │ └ Group #1: - │ └ simple/nested-level-2@nested-l2-version - └ read/env@read-env-version (optional) diff --git a/acceptance/testdata/pack_previous_fixtures_overrides/inspect_builder_nested_output.txt b/acceptance/testdata/pack_previous_fixtures_overrides/inspect_builder_nested_output.txt deleted file mode 100644 index 5b6c2a5c3c..0000000000 --- a/acceptance/testdata/pack_previous_fixtures_overrides/inspect_builder_nested_output.txt +++ /dev/null @@ -1,97 +0,0 @@ -Inspecting builder: '{{.builder_name}}' - -REMOTE: - -Created By: - Name: Pack CLI - Version: {{.pack_version}} - -Trusted: {{.trusted}} - -Stack: - ID: pack.test.stack - Mixins: - mixinA - netcat - mixin3 - build:mixinTwo - -Lifecycle: - Version: {{.lifecycle_version}} - Buildpack APIs: - Deprecated: {{ .deprecated_buildpack_apis }} - Supported: {{ .supported_buildpack_apis }} - Platform APIs: - Deprecated: {{ .deprecated_platform_apis }} - Supported: {{ .supported_platform_apis }} - -Run Images: - some-registry.com/pack-test/run1 (user-configured) - pack-test/run - {{.run_image_mirror}} - -Buildpacks: - ID NAME VERSION HOMEPAGE - noop.buildpack NOOP Buildpack noop.buildpack.later-version http://geocities.com/cool-bp - noop.buildpack NOOP Buildpack noop.buildpack.version - - read/env Read Env Buildpack read-env-version - - simple/layers Simple Layers Buildpack simple-layers-version - - simple/nested-level-1 Nested Level One Buildpack nested-l1-version - - simple/nested-level-2 Nested Level Two Buildpack nested-l2-version - - -Detection Order: - └ Group #1: - ├ simple/nested-level-1 - │ └ Group #1: - │ └ simple/nested-level-2@nested-l2-version - │ └ Group #1: - │ └ simple/layers@simple-layers-version - └ read/env@read-env-version (optional) - -LOCAL: - -Created By: - Name: Pack CLI - Version: {{.pack_version}} - -Trusted: {{.trusted}} - -Stack: - ID: pack.test.stack - Mixins: - mixinA - netcat - mixin3 - build:mixinTwo - -Lifecycle: - Version: {{.lifecycle_version}} - Buildpack APIs: - Deprecated: {{ .deprecated_buildpack_apis }} - Supported: {{ .supported_buildpack_apis }} - Platform APIs: - Deprecated: {{ .deprecated_platform_apis }} - Supported: {{ .supported_platform_apis }} - -Run Images: - some-registry.com/pack-test/run1 (user-configured) - pack-test/run - {{.run_image_mirror}} - -Buildpacks: - ID NAME VERSION HOMEPAGE - noop.buildpack NOOP Buildpack noop.buildpack.later-version http://geocities.com/cool-bp - noop.buildpack NOOP Buildpack noop.buildpack.version - - read/env Read Env Buildpack read-env-version - - simple/layers Simple Layers Buildpack simple-layers-version - - simple/nested-level-1 Nested Level One Buildpack nested-l1-version - - simple/nested-level-2 Nested Level Two Buildpack nested-l2-version - - -Detection Order: - └ Group #1: - ├ simple/nested-level-1 - │ └ Group #1: - │ └ simple/nested-level-2@nested-l2-version - │ └ Group #1: - │ └ simple/layers@simple-layers-version - └ read/env@read-env-version (optional) diff --git a/acceptance/testdata/pack_previous_fixtures_overrides/inspect_builder_nested_output_json.txt b/acceptance/testdata/pack_previous_fixtures_overrides/inspect_builder_nested_output_json.txt deleted file mode 100644 index 1106dc3af4..0000000000 --- a/acceptance/testdata/pack_previous_fixtures_overrides/inspect_builder_nested_output_json.txt +++ /dev/null @@ -1,199 +0,0 @@ -{ - "builder_name": "{{.builder_name}}", - "trusted": false, - "default": false, - "remote_info": { - "created_by": { - "name": "Pack CLI", - "version": "{{.pack_version}}" - }, - "stack": { - "id": "pack.test.stack", - "mixins": [ - "mixinA", - "netcat", - "mixin3", - "build:mixinTwo" - ] - }, - "lifecycle": { - "version": "{{.lifecycle_version}}", - "buildpack_apis": { - "deprecated": {{.deprecated_buildpack_apis}}, - "supported": {{.supported_buildpack_apis}} - }, - "platform_apis": { - "deprecated": {{.deprecated_platform_apis}}, - "supported": {{.supported_platform_apis}} - } - }, - "run_images": [ - { - "name": "some-registry.com/pack-test/run1", - "user_configured": true - }, - { - "name": "pack-test/run" - }, - { - "name": "{{.run_image_mirror}}" - } - ], - "buildpacks": [ - { - "id": "noop.buildpack", - "name": "NOOP Buildpack", - "version": "noop.buildpack.later-version", - "homepage": "http://geocities.com/cool-bp" - }, - { - "id": "noop.buildpack", - "name": "NOOP Buildpack", - "version": "noop.buildpack.version" - }, - { - "id": "read/env", - "name": "Read Env Buildpack", - "version": "read-env-version" - }, - { - "id": "simple/layers", - "name": "Simple Layers Buildpack", - "version": "simple-layers-version" - }, - { - "id": "simple/nested-level-1", - "name": "Nested Level One Buildpack", - "version": "nested-l1-version" - }, - { - "id": "simple/nested-level-2", - "name": "Nested Level Two Buildpack", - "version": "nested-l2-version" - } - ], - "detection_order": [ - { - "buildpacks": [ - { - "id": "simple/nested-level-1", - "buildpacks": [ - { - "id": "simple/nested-level-2", - "version": "nested-l2-version", - "buildpacks": [ - { - "id": "simple/layers", - "version": "simple-layers-version" - } - ] - } - ] - }, - { - "id": "read/env", - "version": "read-env-version", - "optional": true - } - ] - } - ] - }, - "local_info": { - "created_by": { - "name": "Pack CLI", - "version": "{{.pack_version}}" - }, - "stack": { - "id": "pack.test.stack", - "mixins": [ - "mixinA", - "netcat", - "mixin3", - "build:mixinTwo" - ] - }, - "lifecycle": { - "version": "{{.lifecycle_version}}", - "buildpack_apis": { - "deprecated": {{.deprecated_buildpack_apis}}, - "supported": {{.supported_buildpack_apis}} - }, - "platform_apis": { - "deprecated": {{.deprecated_platform_apis}}, - "supported": {{.supported_platform_apis}} - } - }, - "run_images": [ - { - "name": "some-registry.com/pack-test/run1", - "user_configured": true - }, - { - "name": "pack-test/run" - }, - { - "name": "{{.run_image_mirror}}" - } - ], - "buildpacks": [ - { - "id": "noop.buildpack", - "name": "NOOP Buildpack", - "version": "noop.buildpack.later-version", - "homepage": "http://geocities.com/cool-bp" - }, - { - "id": "noop.buildpack", - "name": "NOOP Buildpack", - "version": "noop.buildpack.version" - }, - { - "id": "read/env", - "name": "Read Env Buildpack", - "version": "read-env-version" - }, - { - "id": "simple/layers", - "name": "Simple Layers Buildpack", - "version": "simple-layers-version" - }, - { - "id": "simple/nested-level-1", - "name": "Nested Level One Buildpack", - "version": "nested-l1-version" - }, - { - "id": "simple/nested-level-2", - "name": "Nested Level Two Buildpack", - "version": "nested-l2-version" - } - ], - "detection_order": [ - { - "buildpacks": [ - { - "id": "simple/nested-level-1", - "buildpacks": [ - { - "id": "simple/nested-level-2", - "version": "nested-l2-version", - "buildpacks": [ - { - "id": "simple/layers", - "version": "simple-layers-version" - } - ] - } - ] - }, - { - "id": "read/env", - "version": "read-env-version", - "optional": true - } - ] - } - ] - } -} diff --git a/acceptance/testdata/pack_previous_fixtures_overrides/inspect_builder_nested_output_toml.txt b/acceptance/testdata/pack_previous_fixtures_overrides/inspect_builder_nested_output_toml.txt deleted file mode 100644 index db3861486e..0000000000 --- a/acceptance/testdata/pack_previous_fixtures_overrides/inspect_builder_nested_output_toml.txt +++ /dev/null @@ -1,163 +0,0 @@ -builder_name = "{{.builder_name}}" -trusted = false -default = false - -[remote_info] - - [remote_info.created_by] - Name = "Pack CLI" - Version = "{{.pack_version}}" - - [remote_info.stack] - id = "pack.test.stack" - mixins = ["mixinA", "netcat", "mixin3", "build:mixinTwo"] - - [remote_info.lifecycle] - version = "{{.lifecycle_version}}" - - [remote_info.lifecycle.buildpack_apis] - deprecated = {{.deprecated_buildpack_apis}} - supported = {{.supported_buildpack_apis}} - - [remote_info.lifecycle.platform_apis] - deprecated = {{.deprecated_platform_apis}} - supported = {{.supported_platform_apis}} - - [[remote_info.run_images]] - name = "some-registry.com/pack-test/run1" - user_configured = true - - [[remote_info.run_images]] - name = "pack-test/run" - - [[remote_info.run_images]] - name = "{{.run_image_mirror}}" - - [[remote_info.buildpacks]] - id = "noop.buildpack" - name = "NOOP Buildpack" - version = "noop.buildpack.later-version" - homepage = "http://geocities.com/cool-bp" - - [[remote_info.buildpacks]] - id = "noop.buildpack" - name = "NOOP Buildpack" - version = "noop.buildpack.version" - - [[remote_info.buildpacks]] - id = "read/env" - name = "Read Env Buildpack" - version = "read-env-version" - - [[remote_info.buildpacks]] - id = "simple/layers" - name = "Simple Layers Buildpack" - version = "simple-layers-version" - - [[remote_info.buildpacks]] - id = "simple/nested-level-1" - name = "Nested Level One Buildpack" - version = "nested-l1-version" - - [[remote_info.buildpacks]] - id = "simple/nested-level-2" - name = "Nested Level Two Buildpack" - version = "nested-l2-version" - - [[remote_info.detection_order]] - - [[remote_info.detection_order.buildpacks]] - id = "simple/nested-level-1" - - [[remote_info.detection_order.buildpacks.buildpacks]] - id = "simple/nested-level-2" - version = "nested-l2-version" - - [[remote_info.detection_order.buildpacks.buildpacks.buildpacks]] - id = "simple/layers" - version = "simple-layers-version" - - [[remote_info.detection_order.buildpacks]] - id = "read/env" - version = "read-env-version" - optional = true - -[local_info] - - [local_info.created_by] - Name = "Pack CLI" - Version = "{{.pack_version}}" - - [local_info.stack] - id = "pack.test.stack" - mixins = ["mixinA", "netcat", "mixin3", "build:mixinTwo"] - - [local_info.lifecycle] - version = "{{.lifecycle_version}}" - - [local_info.lifecycle.buildpack_apis] - deprecated = {{.deprecated_buildpack_apis}} - supported = {{.supported_buildpack_apis}} - - [local_info.lifecycle.platform_apis] - deprecated = {{.deprecated_platform_apis}} - supported = {{.supported_platform_apis}} - - [[local_info.run_images]] - name = "some-registry.com/pack-test/run1" - user_configured = true - - [[local_info.run_images]] - name = "pack-test/run" - - [[local_info.run_images]] - name = "{{.run_image_mirror}}" - - [[local_info.buildpacks]] - id = "noop.buildpack" - name = "NOOP Buildpack" - version = "noop.buildpack.later-version" - homepage = "http://geocities.com/cool-bp" - - [[local_info.buildpacks]] - id = "noop.buildpack" - name = "NOOP Buildpack" - version = "noop.buildpack.version" - - [[local_info.buildpacks]] - id = "read/env" - name = "Read Env Buildpack" - version = "read-env-version" - - [[local_info.buildpacks]] - id = "simple/layers" - name = "Simple Layers Buildpack" - version = "simple-layers-version" - - [[local_info.buildpacks]] - id = "simple/nested-level-1" - name = "Nested Level One Buildpack" - version = "nested-l1-version" - - [[local_info.buildpacks]] - id = "simple/nested-level-2" - name = "Nested Level Two Buildpack" - version = "nested-l2-version" - - [[local_info.detection_order]] - - [[local_info.detection_order.buildpacks]] - id = "simple/nested-level-1" - - [[local_info.detection_order.buildpacks.buildpacks]] - id = "simple/nested-level-2" - version = "nested-l2-version" - - [[local_info.detection_order.buildpacks.buildpacks.buildpacks]] - id = "simple/layers" - version = "simple-layers-version" - - [[local_info.detection_order.buildpacks]] - id = "read/env" - version = "read-env-version" - optional = true diff --git a/acceptance/testdata/pack_previous_fixtures_overrides/inspect_builder_nested_output_yaml.txt b/acceptance/testdata/pack_previous_fixtures_overrides/inspect_builder_nested_output_yaml.txt deleted file mode 100644 index 5e0a3df8b1..0000000000 --- a/acceptance/testdata/pack_previous_fixtures_overrides/inspect_builder_nested_output_yaml.txt +++ /dev/null @@ -1,116 +0,0 @@ -sharedbuilderinfo: - builder_name: {{.builder_name}} - trusted: false - default: false -remote_info: - created_by: - name: Pack CLI - version: {{.pack_version}} - stack: - id: pack.test.stack - mixins: - - mixinA - - netcat - - mixin3 - - build:mixinTwo - lifecycle: - version: {{.lifecycle_version}} - buildpack_apis: - deprecated: {{.deprecated_buildpack_apis}} - supported: {{.supported_buildpack_apis}} - platform_apis: - deprecated: {{.deprecated_platform_apis}} - supported: {{.supported_platform_apis}} - run_images: - - name: some-registry.com/pack-test/run1 - user_configured: true - - name: pack-test/run - - name: {{.run_image_mirror}} - buildpacks: - - id: noop.buildpack - name: NOOP Buildpack - version: noop.buildpack.later-version - homepage: http://geocities.com/cool-bp - - id: noop.buildpack - name: NOOP Buildpack - version: noop.buildpack.version - - id: read/env - name: Read Env Buildpack - version: read-env-version - - id: simple/layers - name: Simple Layers Buildpack - version: simple-layers-version - - id: simple/nested-level-1 - name: Nested Level One Buildpack - version: nested-l1-version - - id: simple/nested-level-2 - name: Nested Level Two Buildpack - version: nested-l2-version - detection_order: - - buildpacks: - - id: simple/nested-level-1 - buildpacks: - - id: simple/nested-level-2 - version: nested-l2-version - buildpacks: - - id: simple/layers - version: simple-layers-version - - id: read/env - version: read-env-version - optional: true -local_info: - created_by: - name: Pack CLI - version: {{.pack_version}} - stack: - id: pack.test.stack - mixins: - - mixinA - - netcat - - mixin3 - - build:mixinTwo - lifecycle: - version: {{.lifecycle_version}} - buildpack_apis: - deprecated: {{.deprecated_buildpack_apis}} - supported: {{.supported_buildpack_apis}} - platform_apis: - deprecated: {{.deprecated_platform_apis}} - supported: {{.supported_platform_apis}} - run_images: - - name: some-registry.com/pack-test/run1 - user_configured: true - - name: pack-test/run - - name: {{.run_image_mirror}} - buildpacks: - - id: noop.buildpack - name: NOOP Buildpack - version: noop.buildpack.later-version - homepage: http://geocities.com/cool-bp - - id: noop.buildpack - name: NOOP Buildpack - version: noop.buildpack.version - - id: read/env - name: Read Env Buildpack - version: read-env-version - - id: simple/layers - name: Simple Layers Buildpack - version: simple-layers-version - - id: simple/nested-level-1 - name: Nested Level One Buildpack - version: nested-l1-version - - id: simple/nested-level-2 - name: Nested Level Two Buildpack - version: nested-l2-version - detection_order: - - buildpacks: - - id: simple/nested-level-1 - buildpacks: - - id: simple/nested-level-2 - version: nested-l2-version - buildpacks: - - id: simple/layers - version: simple-layers-version - - id: read/env - version: read-env-version - optional: true diff --git a/acceptance/testdata/pack_previous_fixtures_overrides/inspect_builder_output.txt b/acceptance/testdata/pack_previous_fixtures_overrides/inspect_builder_output.txt deleted file mode 100644 index 6113a26f14..0000000000 --- a/acceptance/testdata/pack_previous_fixtures_overrides/inspect_builder_output.txt +++ /dev/null @@ -1,85 +0,0 @@ -Inspecting builder: '{{.builder_name}}' - -REMOTE: - -Created By: - Name: Pack CLI - Version: {{.pack_version}} - -Trusted: {{.trusted}} - -Stack: - ID: pack.test.stack - Mixins: - mixinA - netcat - mixin3 - build:mixinTwo - -Lifecycle: - Version: {{.lifecycle_version}} - Buildpack APIs: - Deprecated: {{ .deprecated_buildpack_apis }} - Supported: {{ .supported_buildpack_apis }} - Platform APIs: - Deprecated: {{ .deprecated_platform_apis }} - Supported: {{ .supported_platform_apis }} - -Run Images: - some-registry.com/pack-test/run1 (user-configured) - pack-test/run - {{.run_image_mirror}} - -Buildpacks: - ID NAME VERSION HOMEPAGE - noop.buildpack NOOP Buildpack noop.buildpack.later-version http://geocities.com/cool-bp - noop.buildpack NOOP Buildpack noop.buildpack.version - - read/env Read Env Buildpack read-env-version - - simple/layers Simple Layers Buildpack simple-layers-version - - -Detection Order: - └ Group #1: - ├ simple/layers - └ read/env@read-env-version (optional) - -LOCAL: - -Created By: - Name: Pack CLI - Version: {{.pack_version}} - -Trusted: {{.trusted}} - -Stack: - ID: pack.test.stack - Mixins: - mixinA - netcat - mixin3 - build:mixinTwo - -Lifecycle: - Version: {{.lifecycle_version}} - Buildpack APIs: - Deprecated: {{ .deprecated_buildpack_apis }} - Supported: {{ .supported_buildpack_apis }} - Platform APIs: - Deprecated: {{ .deprecated_platform_apis }} - Supported: {{ .supported_platform_apis }} - -Run Images: - some-registry.com/pack-test/run1 (user-configured) - pack-test/run - {{.run_image_mirror}} - -Buildpacks: - ID NAME VERSION HOMEPAGE - noop.buildpack NOOP Buildpack noop.buildpack.later-version http://geocities.com/cool-bp - noop.buildpack NOOP Buildpack noop.buildpack.version - - read/env Read Env Buildpack read-env-version - - simple/layers Simple Layers Buildpack simple-layers-version - - -Detection Order: - └ Group #1: - ├ simple/layers - └ read/env@read-env-version (optional) diff --git a/acceptance/testdata/pack_previous_fixtures_overrides/nested-level-1-buildpack_package.toml b/acceptance/testdata/pack_previous_fixtures_overrides/nested-level-1-buildpack_package.toml deleted file mode 100644 index 0d96f04457..0000000000 --- a/acceptance/testdata/pack_previous_fixtures_overrides/nested-level-1-buildpack_package.toml +++ /dev/null @@ -1,8 +0,0 @@ -[buildpack] -uri = "nested-level-1-buildpack.tgz" - -[[dependencies]] - image = "{{.simple_layers_buildpack}}" - -[[dependencies]] - image = "{{.nested_level_2_buildpack}}" \ No newline at end of file diff --git a/acceptance/testdata/pack_previous_fixtures_overrides/nested-level-2-buildpack_package.toml b/acceptance/testdata/pack_previous_fixtures_overrides/nested-level-2-buildpack_package.toml deleted file mode 100644 index 74b73cb1d3..0000000000 --- a/acceptance/testdata/pack_previous_fixtures_overrides/nested-level-2-buildpack_package.toml +++ /dev/null @@ -1,5 +0,0 @@ -[buildpack] -uri = "nested-level-2-buildpack.tgz" - -[[dependencies]] -image = "{{.simple_layers_buildpack}}" \ No newline at end of file diff --git a/acceptance/testdata/pack_previous_fixtures_overrides/nested_builder.toml b/acceptance/testdata/pack_previous_fixtures_overrides/nested_builder.toml deleted file mode 100644 index 4ab9d225bf..0000000000 --- a/acceptance/testdata/pack_previous_fixtures_overrides/nested_builder.toml +++ /dev/null @@ -1,55 +0,0 @@ -[[buildpacks]] - id = "read/env" - version = "read-env-version" - uri = "read-env-buildpack.tgz" - -[[buildpacks]] - # intentionally missing id/version as they are optional - uri = "noop-buildpack.tgz" - -[[buildpacks]] - # noop-buildpack-2 has the same id but a different version compared to noop-buildpack - uri = "noop-buildpack-2.tgz" - - -{{- if .simple_layers_buildpack}} -[[buildpacks]] - image = "{{.simple_layers_buildpack}}" - version = "simple-layers-version" -{{- end}} - -{{- if .nested_level_2_buildpack}} -[[buildpacks]] - image = "{{.nested_level_2_buildpack}}" -{{- end}} - -{{- if .nested_level_1_buildpack}} -[[buildpacks]] - image = "{{.nested_level_1_buildpack}}" -{{- end}} - -[[order]] -{{- if .package_id}} -[[order.group]] - id = "{{.package_id}}" - # intentionlly missing version to test support -{{- end}} - -[[order.group]] - id = "read/env" - version = "read-env-version" - optional = true - -[stack] - id = "pack.test.stack" - build-image = "pack-test/build" - run-image = "pack-test/run" - run-image-mirrors = ["{{.run_image_mirror}}"] - -[lifecycle] -{{- if .lifecycle_uri}} - uri = "{{.lifecycle_uri}}" -{{- end}} -{{- if .lifecycle_version}} - version = "{{.lifecycle_version}}" -{{- end}} \ No newline at end of file diff --git a/acceptance/testdata/pack_previous_fixtures_overrides/simple-layers-buildpack-different-sha_package.toml b/acceptance/testdata/pack_previous_fixtures_overrides/simple-layers-buildpack-different-sha_package.toml deleted file mode 100644 index 4ce41cd5c7..0000000000 --- a/acceptance/testdata/pack_previous_fixtures_overrides/simple-layers-buildpack-different-sha_package.toml +++ /dev/null @@ -1,2 +0,0 @@ -[buildpack] -uri = "simple-layers-buildpack-different-sha.tgz" \ No newline at end of file diff --git a/acceptance/testdata/pack_previous_fixtures_overrides/simple-layers-buildpack_package.toml b/acceptance/testdata/pack_previous_fixtures_overrides/simple-layers-buildpack_package.toml deleted file mode 100644 index 28694e6601..0000000000 --- a/acceptance/testdata/pack_previous_fixtures_overrides/simple-layers-buildpack_package.toml +++ /dev/null @@ -1,2 +0,0 @@ -[buildpack] -uri = "simple-layers-buildpack.tgz" \ No newline at end of file diff --git a/benchmarks/build_test.go b/benchmarks/build_test.go index 025ce798a7..929f7f120e 100644 --- a/benchmarks/build_test.go +++ b/benchmarks/build_test.go @@ -1,15 +1,15 @@ //go:build benchmarks -// +build benchmarks package benchmarks import ( "bytes" "fmt" + "os" "path/filepath" "testing" - dockerCli "github.com/docker/docker/client" + dockerCli "github.com/moby/moby/client" "github.com/pkg/errors" "github.com/spf13/cobra" @@ -21,14 +21,18 @@ import ( ) var ( - baseImg = "some-org/" + h.RandString(10) - trustedImg = baseImg + "-trusted-" - builder = "cnbs/sample-builder:bionic" - mockAppPath = filepath.Join("..", "acceptance", "testdata", "mock_app") + baseImg string + trustedImg string + builder string + mockAppPath string + paketoBuilder string + additionalMockAppPath string + additionalBuildapck string ) func BenchmarkBuild(b *testing.B) { - dockerClient, err := dockerCli.NewClientWithOpts(dockerCli.FromEnv, dockerCli.WithVersion("1.38")) + setEnv() + dockerClient, err := dockerCli.New(dockerCli.FromEnv) if err != nil { b.Error(errors.Wrap(err, "creating docker client")) } @@ -59,6 +63,16 @@ func BenchmarkBuild(b *testing.B) { } }) + b.Run("with Additional Buildpack", func(b *testing.B) { + for i := 0; i < b.N; i++ { + // perform the operation we're analyzing + cmd.SetArgs([]string{fmt.Sprintf("%s%d", trustedImg, i), "-p", additionalMockAppPath, "-B", paketoBuilder, "--buildpack", additionalBuildapck}) + if err = cmd.Execute(); err != nil { + b.Error(errors.Wrapf(err, "running build #%d", i)) + } + } + }) + // Cleanup for i := 0; i < b.N; i++ { if err = h.DockerRmi(dockerClient, fmt.Sprintf("%s%d", baseImg, i)); err != nil { @@ -84,3 +98,25 @@ func createCmd(b *testing.B, docker *dockerCli.Client) *cobra.Command { } return commands.Build(logger, cfg.Config{}, packClient) } + +func setEnv() { + if baseImg = os.Getenv("baseImg"); baseImg == "" { + baseImg = "some-org/" + h.RandString(10) + } + trustedImg = baseImg + "-trusted-" + if builder = os.Getenv("builder"); builder == "" { + builder = "cnbs/sample-builder:bionic" + } + if mockAppPath = os.Getenv("mockAppPath"); mockAppPath == "" { + mockAppPath = filepath.Join("..", "acceptance", "testdata", "mock_app") + } + if paketoBuilder = os.Getenv("paketoBuilder"); paketoBuilder == "" { + paketoBuilder = "paketobuildpacks/builder-jammy-base" + } + if additionalMockAppPath = os.Getenv("additionalMockAppPath"); additionalMockAppPath == "" { + additionalMockAppPath = filepath.Join("..", "samples", "apps", "java-maven") + } + if additionalBuildapck = os.Getenv("additionalBuildapck"); additionalBuildapck == "" { + additionalBuildapck = "paketobuildpacks/java:latest" + } +} diff --git a/builder/buildpack_identifier.go b/builder/buildpack_identifier.go new file mode 100644 index 0000000000..b32e111dc7 --- /dev/null +++ b/builder/buildpack_identifier.go @@ -0,0 +1,5 @@ +package builder + +type BpIdentifier interface { + Id() string +} diff --git a/builder/config_reader.go b/builder/config_reader.go index 39ffc4fba0..116d56bb33 100644 --- a/builder/config_reader.go +++ b/builder/config_reader.go @@ -4,6 +4,7 @@ import ( "fmt" "os" "path/filepath" + "strings" "github.com/BurntSushi/toml" "github.com/pkg/errors" @@ -15,25 +16,31 @@ import ( // Config is a builder configuration file type Config struct { - Description string `toml:"description"` - Buildpacks BuildpackCollection `toml:"buildpacks"` - Order dist.Order `toml:"order"` - Stack StackConfig `toml:"stack"` - Lifecycle LifecycleConfig `toml:"lifecycle"` + Description string `toml:"description"` + Buildpacks ModuleCollection `toml:"buildpacks"` + Extensions ModuleCollection `toml:"extensions"` + Order dist.Order `toml:"order"` + OrderExtensions dist.Order `toml:"order-extensions"` + Stack StackConfig `toml:"stack"` + Lifecycle LifecycleConfig `toml:"lifecycle"` + Run RunConfig `toml:"run"` + Build BuildConfig `toml:"build"` + Targets []dist.Target `toml:"targets"` + System dist.System `toml:"system"` } -// BuildpackCollection is a list of BuildpackConfigs -type BuildpackCollection []BuildpackConfig +// ModuleCollection is a list of ModuleConfigs +type ModuleCollection []ModuleConfig -// BuildpackConfig details the configuration of a Buildpack -type BuildpackConfig struct { - dist.BuildpackInfo +// ModuleConfig details the configuration of a Buildpack or Extension +type ModuleConfig struct { + dist.ModuleInfo dist.ImageOrURI } -func (c *BuildpackConfig) DisplayString() string { - if c.BuildpackInfo.FullName() != "" { - return c.BuildpackInfo.FullName() +func (c *ModuleConfig) DisplayString() string { + if c.FullName() != "" { + return c.FullName() } return c.ImageOrURI.DisplayString() @@ -53,6 +60,41 @@ type LifecycleConfig struct { Version string `toml:"version"` } +// RunConfig set of run image configuration +type RunConfig struct { + Images []RunImageConfig `toml:"images"` +} + +// RunImageConfig run image id and mirrors +type RunImageConfig struct { + Image string `toml:"image"` + Mirrors []string `toml:"mirrors,omitempty"` +} + +// BuildConfig build image configuration +type BuildConfig struct { + Image string `toml:"image"` + Env []BuildConfigEnv `toml:"env"` +} + +type Suffix string + +const ( + NONE Suffix = "" + DEFAULT Suffix = "default" + OVERRIDE Suffix = "override" + APPEND Suffix = "append" + PREPEND Suffix = "prepend" +) + +type BuildConfigEnv struct { + Name string `toml:"name"` + Value string `toml:"value"` + Suffix Suffix `toml:"suffix,omitempty"` + Delim string `toml:"delim,omitempty"` + ExecEnv []string `toml:"exec-env,omitempty"` +} + // ReadConfig reads a builder configuration from the file path provided and returns the // configuration along with any warnings encountered while parsing func ReadConfig(path string) (config Config, warnings []string, err error) { @@ -71,26 +113,57 @@ func ReadConfig(path string) (config Config, warnings []string, err error) { warnings = append(warnings, fmt.Sprintf("empty %s definition", style.Symbol("order"))) } + config.mergeStackWithImages() + return config, warnings, nil } // ValidateConfig validates the config func ValidateConfig(c Config) error { - if c.Stack.ID == "" { - return errors.New("stack.id is required") + if c.Build.Image == "" && c.Stack.BuildImage == "" { + return errors.New("build.image is required") + } else if c.Build.Image != "" && c.Stack.BuildImage != "" && c.Build.Image != c.Stack.BuildImage { + return errors.New("build.image and stack.build-image do not match") } - if c.Stack.BuildImage == "" { - return errors.New("stack.build-image is required") + if len(c.Run.Images) == 0 && (c.Stack.RunImage == "" || c.Stack.ID == "") { + return errors.New("run.images are required") } - if c.Stack.RunImage == "" { - return errors.New("stack.run-image is required") + for _, runImage := range c.Run.Images { + if runImage.Image == "" { + return errors.New("run.images.image is required") + } + } + + if c.Stack.RunImage != "" && c.Run.Images[0].Image != c.Stack.RunImage { + return errors.New("run.images and stack.run-image do not match") } return nil } +func (c *Config) mergeStackWithImages() { + // RFC-0096 + if c.Build.Image != "" { + c.Stack.BuildImage = c.Build.Image + } else if c.Build.Image == "" && c.Stack.BuildImage != "" { + c.Build.Image = c.Stack.BuildImage + } + + if len(c.Run.Images) != 0 { + // use the first run image as the "stack" + c.Stack.RunImage = c.Run.Images[0].Image + c.Stack.RunImageMirrors = c.Run.Images[0].Mirrors + } else if len(c.Run.Images) == 0 && c.Stack.RunImage != "" { + c.Run.Images = []RunImageConfig{{ + Image: c.Stack.RunImage, + Mirrors: c.Stack.RunImageMirrors, + }, + } + } +} + // parseConfig reads a builder configuration from file func parseConfig(file *os.File) (Config, error) { builderConfig := Config{} @@ -111,3 +184,92 @@ func parseConfig(file *os.File) (Config, error) { return builderConfig, nil } + +func ParseBuildConfigEnv(env []BuildConfigEnv, path string) (envMap map[string]string, warnings []string, err error) { + envMap = map[string]string{} + var appendOrPrependWithoutDelim = 0 + for _, v := range env { + if name := v.Name; name == "" || len(name) == 0 { + return nil, nil, errors.Wrapf(errors.Errorf("env name should not be empty"), "parse contents of '%s'", path) + } + if val := v.Value; val == "" || len(val) == 0 { + warnings = append(warnings, fmt.Sprintf("empty value for key/name %s", style.Symbol(v.Name))) + } + suffixName, delimName, err := getBuildConfigEnvFileName(v) + if err != nil { + return envMap, warnings, err + } + if val, e := envMap[suffixName]; e { + warnings = append(warnings, fmt.Sprintf(errors.Errorf("overriding env with name: %s and suffix: %s from %s to %s", style.Symbol(v.Name), style.Symbol(string(v.Suffix)), style.Symbol(val), style.Symbol(v.Value)).Error(), "parse contents of '%s'", path)) + } + if val, e := envMap[delimName]; e { + warnings = append(warnings, fmt.Sprintf(errors.Errorf("overriding env with name: %s and delim: %s from %s to %s", style.Symbol(v.Name), style.Symbol(v.Delim), style.Symbol(val), style.Symbol(v.Value)).Error(), "parse contents of '%s'", path)) + } + if delim := v.Delim; (delim != "" || len(delim) != 0) && (delimName != "" || len(delimName) != 0) { + envMap[delimName] = delim + } + envMap[suffixName] = v.Value + } + + for k := range envMap { + name, suffix, err := getFilePrefixSuffix(k) + if err != nil { + continue + } + if _, ok := envMap[name+".delim"]; (suffix == "append" || suffix == "prepend") && !ok { + warnings = append(warnings, fmt.Sprintf(errors.Errorf("env with name/key %s with suffix %s must to have a %s value", style.Symbol(name), style.Symbol(suffix), style.Symbol("delim")).Error(), "parse contents of '%s'", path)) + appendOrPrependWithoutDelim++ + } + } + if appendOrPrependWithoutDelim > 0 { + return envMap, warnings, errors.Errorf("error parsing [[build.env]] in file '%s'", path) + } + return envMap, warnings, err +} + +func getBuildConfigEnvFileName(env BuildConfigEnv) (suffixName, delimName string, err error) { + suffix, err := getActionType(env.Suffix) + if err != nil { + return suffixName, delimName, err + } + if suffix == "" { + suffixName = env.Name + } else { + suffixName = env.Name + suffix + } + if delim := env.Delim; delim != "" || len(delim) != 0 { + delimName = env.Name + ".delim" + } + return suffixName, delimName, err +} + +func getActionType(suffix Suffix) (suffixString string, err error) { + const delim = "." + switch suffix { + case NONE: + return "", nil + case DEFAULT: + return delim + string(DEFAULT), nil + case OVERRIDE: + return delim + string(OVERRIDE), nil + case APPEND: + return delim + string(APPEND), nil + case PREPEND: + return delim + string(PREPEND), nil + default: + return suffixString, errors.Errorf("unknown action type %s", style.Symbol(string(suffix))) + } +} + +func getFilePrefixSuffix(filename string) (prefix, suffix string, err error) { + val := strings.Split(filename, ".") + if len(val) <= 1 { + return val[0], suffix, errors.Errorf("Suffix might be null") + } + if len(val) == 2 { + suffix = val[1] + } else { + suffix = strings.Join(val[1:], ".") + } + return val[0], suffix, err +} diff --git a/builder/config_reader_test.go b/builder/config_reader_test.go index f08447780d..b5f6a7fc20 100644 --- a/builder/config_reader_test.go +++ b/builder/config_reader_test.go @@ -1,7 +1,6 @@ package builder_test import ( - "io/ioutil" "os" "path/filepath" "testing" @@ -29,7 +28,7 @@ func testConfig(t *testing.T, when spec.G, it spec.S) { ) it.Before(func() { - tmpDir, err = ioutil.TempDir("", "config-test") + tmpDir, err = os.MkdirTemp("", "config-test") h.AssertNil(t, err) builderConfigPath = filepath.Join(tmpDir, "builder.toml") }) @@ -40,7 +39,7 @@ func testConfig(t *testing.T, when spec.G, it spec.S) { when("file is written properly", func() { it.Before(func() { - h.AssertNil(t, ioutil.WriteFile(builderConfigPath, []byte(` + h.AssertNil(t, os.WriteFile(builderConfigPath, []byte(` [[buildpacks]] id = "buildpack/1" version = "0.0.1" @@ -55,6 +54,15 @@ func testConfig(t *testing.T, when spec.G, it spec.S) { [[order]] [[order.group]] id = "buildpack/1" + exec-env = ["production"] + +[[build.env]] + name = "key1" + value = "value1" + suffix = "append" + delim = "%" + exec-env = ["test"] + `), 0666)) }) @@ -77,6 +85,15 @@ func testConfig(t *testing.T, when spec.G, it spec.S) { h.AssertEq(t, builderConfig.Buildpacks[2].ImageName, "") h.AssertEq(t, builderConfig.Order[0].Group[0].ID, "buildpack/1") + h.AssertTrue(t, len(builderConfig.Order[0].Group[0].ExecEnv) == 1) + h.AssertEq(t, builderConfig.Order[0].Group[0].ExecEnv[0], "production") + + h.AssertTrue(t, len(builderConfig.Build.Env) == 1) + h.AssertEq(t, builderConfig.Build.Env[0].Name, "key1") + h.AssertEq(t, builderConfig.Build.Env[0].Value, "value1") + h.AssertEq(t, string(builderConfig.Build.Env[0].Suffix), "append") + h.AssertTrue(t, len(builderConfig.Build.Env[0].ExecEnv) == 1) + h.AssertEq(t, builderConfig.Build.Env[0].ExecEnv[0], "test") }) }) @@ -90,7 +107,7 @@ func testConfig(t *testing.T, when spec.G, it spec.S) { when("detecting warnings", func() { when("'groups' field is used", func() { it.Before(func() { - h.AssertNil(t, ioutil.WriteFile(builderConfigPath, []byte(` + h.AssertNil(t, os.WriteFile(builderConfigPath, []byte(` [[buildpacks]] id = "some.buildpack" version = "some.buildpack.version" @@ -115,7 +132,7 @@ func testConfig(t *testing.T, when spec.G, it spec.S) { when("'order' is missing or empty", func() { it.Before(func() { - h.AssertNil(t, ioutil.WriteFile(builderConfigPath, []byte(` + h.AssertNil(t, os.WriteFile(builderConfigPath, []byte(` [[buildpacks]] id = "some.buildpack" version = "some.buildpack.version" @@ -132,7 +149,7 @@ func testConfig(t *testing.T, when spec.G, it spec.S) { when("unknown buildpack key is present", func() { it.Before(func() { - h.AssertNil(t, ioutil.WriteFile(builderConfigPath, []byte(` + h.AssertNil(t, os.WriteFile(builderConfigPath, []byte(` [[buildpacks]] url = "noop-buildpack.tgz" `), 0666)) @@ -146,7 +163,7 @@ url = "noop-buildpack.tgz" when("unknown array table is present", func() { it.Before(func() { - h.AssertNil(t, ioutil.WriteFile(builderConfigPath, []byte(` + h.AssertNil(t, os.WriteFile(builderConfigPath, []byte(` [[buidlpack]] uri = "noop-buildpack.tgz" `), 0666)) @@ -158,6 +175,39 @@ uri = "noop-buildpack.tgz" }) }) }) + + when("system buildpack is defined", func() { + it.Before(func() { + h.AssertNil(t, os.WriteFile(builderConfigPath, []byte(` +[[system.pre.buildpacks]] + id = "id-1" + version = "1.0" + optional = false + +[[system.post.buildpacks]] + id = "id-2" + version = "2.0" + optional = true +`), 0666)) + }) + + it("returns a builder config", func() { + builderConfig, _, err := builder.ReadConfig(builderConfigPath) + h.AssertNil(t, err) + h.AssertEq(t, len(builderConfig.System.Pre.Buildpacks), 1) + h.AssertEq(t, len(builderConfig.System.Post.Buildpacks), 1) + + // Verify system.pre.buildpacks + h.AssertEq(t, builderConfig.System.Pre.Buildpacks[0].ID, "id-1") + h.AssertEq(t, builderConfig.System.Pre.Buildpacks[0].Version, "1.0") + h.AssertEq(t, builderConfig.System.Pre.Buildpacks[0].Optional, false) + + // Verify system.post.buildpacks + h.AssertEq(t, builderConfig.System.Post.Buildpacks[0].ID, "id-2") + h.AssertEq(t, builderConfig.System.Post.Buildpacks[0].Version, "2.0") + h.AssertEq(t, builderConfig.System.Post.Buildpacks[0].Optional, true) + }) + }) }) when("#ValidateConfig()", func() { @@ -167,13 +217,13 @@ uri = "noop-buildpack.tgz" testBuildImage = "test-build-image" ) - it("returns error if no id", func() { + it("returns error if no stack id and no run images", func() { config := builder.Config{ Stack: builder.StackConfig{ BuildImage: testBuildImage, RunImage: testRunImage, }} - h.AssertError(t, builder.ValidateConfig(config), "stack.id is required") + h.AssertError(t, builder.ValidateConfig(config), "run.images are required") }) it("returns error if no build image", func() { @@ -182,7 +232,7 @@ uri = "noop-buildpack.tgz" ID: testID, RunImage: testRunImage, }} - h.AssertError(t, builder.ValidateConfig(config), "stack.build-image is required") + h.AssertError(t, builder.ValidateConfig(config), "build.image is required") }) it("returns error if no run image", func() { @@ -191,7 +241,150 @@ uri = "noop-buildpack.tgz" ID: testID, BuildImage: testBuildImage, }} - h.AssertError(t, builder.ValidateConfig(config), "stack.run-image is required") + h.AssertError(t, builder.ValidateConfig(config), "run.images are required") + }) + + it("returns error if no run images image", func() { + config := builder.Config{ + Build: builder.BuildConfig{ + Image: testBuildImage, + }, + Run: builder.RunConfig{ + Images: []builder.RunImageConfig{{ + Image: "", + }}, + }} + h.AssertError(t, builder.ValidateConfig(config), "run.images.image is required") + }) + + it("returns error if no stack or run image", func() { + config := builder.Config{ + Build: builder.BuildConfig{ + Image: testBuildImage, + }} + h.AssertError(t, builder.ValidateConfig(config), "run.images are required") + }) + + it("returns error if no stack and no build image", func() { + config := builder.Config{ + Run: builder.RunConfig{ + Images: []builder.RunImageConfig{{ + Image: testBuildImage, + }}, + }} + h.AssertError(t, builder.ValidateConfig(config), "build.image is required") + }) + + it("returns error if no stack, run, or build image", func() { + config := builder.Config{} + h.AssertError(t, builder.ValidateConfig(config), "build.image is required") + }) + }) + when("#ParseBuildConfigEnv()", func() { + it("should return an error when name is not defined", func() { + _, _, err := builder.ParseBuildConfigEnv([]builder.BuildConfigEnv{ + { + Name: "", + Value: "vaiue", + }, + }, "") + h.AssertNotNil(t, err) + }) + it("should warn when the value is nil or empty string", func() { + env, warn, err := builder.ParseBuildConfigEnv([]builder.BuildConfigEnv{ + { + Name: "key", + Value: "", + Suffix: "override", + }, + }, "") + + h.AssertNotNil(t, warn) + h.AssertNil(t, err) + h.AssertMapContains[string, string](t, env, h.NewKeyValue[string, string]("key.override", "")) + }) + it("should return an error when unknown suffix is specified", func() { + _, _, err := builder.ParseBuildConfigEnv([]builder.BuildConfigEnv{ + { + Name: "key", + Value: "", + Suffix: "invalid", + }, + }, "") + + h.AssertNotNil(t, err) + }) + it("should override and show a warning when suffix or delim is defined multiple times", func() { + env, warn, err := builder.ParseBuildConfigEnv([]builder.BuildConfigEnv{ + { + Name: "key1", + Value: "value1", + Suffix: "append", + Delim: "%", + }, + { + Name: "key1", + Value: "value2", + Suffix: "append", + Delim: ",", + }, + { + Name: "key1", + Value: "value3", + Suffix: "default", + Delim: ";", + }, + { + Name: "key1", + Value: "value4", + Suffix: "prepend", + Delim: ":", + }, + }, "") + + h.AssertNotNil(t, warn) + h.AssertNil(t, err) + h.AssertMapContains[string, string]( + t, + env, + h.NewKeyValue[string, string]("key1.append", "value2"), + h.NewKeyValue[string, string]("key1.default", "value3"), + h.NewKeyValue[string, string]("key1.prepend", "value4"), + h.NewKeyValue[string, string]("key1.delim", ":"), + ) + h.AssertMapNotContains[string, string]( + t, + env, + h.NewKeyValue[string, string]("key1.append", "value1"), + h.NewKeyValue[string, string]("key1.delim", "%"), + h.NewKeyValue[string, string]("key1.delim", ","), + h.NewKeyValue[string, string]("key1.delim", ";"), + ) + }) + it("should return an error when `suffix` is defined as `append` or `prepend` without a `delim`", func() { + _, warn, err := builder.ParseBuildConfigEnv([]builder.BuildConfigEnv{ + { + Name: "key", + Value: "value", + Suffix: "append", + }, + }, "") + + h.AssertNotNil(t, warn) + h.AssertNotNil(t, err) + }) + it("when suffix is NONE or omitted should default to `override`", func() { + env, warn, err := builder.ParseBuildConfigEnv([]builder.BuildConfigEnv{ + { + Name: "key", + Value: "value", + Suffix: "", + }, + }, "") + + h.AssertNotNil(t, warn) + h.AssertNil(t, err) + h.AssertMapContains[string, string](t, env, h.NewKeyValue[string, string]("key", "value")) }) }) } diff --git a/builder/detection_order.go b/builder/detection_order.go index 2063f02645..07ff458816 100644 --- a/builder/detection_order.go +++ b/builder/detection_order.go @@ -5,7 +5,7 @@ import ( ) type DetectionOrderEntry struct { - dist.BuildpackRef `yaml:",inline"` + dist.ModuleRef `yaml:",inline"` Cyclical bool `json:"cyclic,omitempty" yaml:"cyclic,omitempty" toml:"cyclic,omitempty"` GroupDetectionOrder DetectionOrder `json:"buildpacks,omitempty" yaml:"buildpacks,omitempty" toml:"buildpacks,omitempty"` } diff --git a/buildpackage/config_reader.go b/buildpackage/config_reader.go index 8dbbafa276..ea5af009fe 100644 --- a/buildpackage/config_reader.go +++ b/buildpackage/config_reader.go @@ -17,8 +17,13 @@ const defaultOS = "linux" // Config encapsulates the possible configuration options for buildpackage creation. type Config struct { Buildpack dist.BuildpackURI `toml:"buildpack"` + Extension dist.BuildpackURI `toml:"extension"` Dependencies []dist.ImageOrURI `toml:"dependencies"` - Platform dist.Platform `toml:"platform"` + // deprecated + Platform dist.Platform `toml:"platform"` + + // Define targets for composite buildpacks + Targets []dist.Target `toml:"targets"` } func DefaultConfig() Config { @@ -32,6 +37,17 @@ func DefaultConfig() Config { } } +func DefaultExtensionConfig() Config { + return Config{ + Extension: dist.BuildpackURI{ + URI: ".", + }, + Platform: dist.Platform{ + OS: defaultOS, + }, + } +} + // NewConfigReader returns an instance of ConfigReader. It does not take any parameters. func NewConfigReader() *ConfigReader { return &ConfigReader{} @@ -61,8 +77,11 @@ func (r *ConfigReader) Read(path string) (Config, error) { ) } - if packageConfig.Buildpack.URI == "" { - return packageConfig, errors.Errorf("missing %s configuration", style.Symbol("buildpack.uri")) + if packageConfig.Buildpack.URI == "" && packageConfig.Extension.URI == "" { + if packageConfig.Buildpack.URI == "" { + return packageConfig, errors.Errorf("missing %s configuration", style.Symbol("buildpack.uri")) + } + return packageConfig, errors.Errorf("missing %s configuration", style.Symbol("extension.uri")) } if packageConfig.Platform.OS == "" { @@ -102,6 +121,17 @@ func (r *ConfigReader) Read(path string) (Config, error) { return packageConfig, nil } +func (r *ConfigReader) ReadBuildpackDescriptor(path string) (dist.BuildpackDescriptor, error) { + buildpackCfg := dist.BuildpackDescriptor{} + + _, err := toml.DecodeFile(path, &buildpackCfg) + if err != nil { + return dist.BuildpackDescriptor{}, err + } + + return buildpackCfg, nil +} + func validateURI(uri, relativeBaseDir string) error { locatorType, err := buildpack.GetLocatorType(uri, relativeBaseDir, nil) if err != nil { diff --git a/buildpackage/config_reader_test.go b/buildpackage/config_reader_test.go index b7a29ba693..eee0bdaae0 100644 --- a/buildpackage/config_reader_test.go +++ b/buildpackage/config_reader_test.go @@ -1,7 +1,6 @@ package buildpackage_test import ( - "io/ioutil" "os" "path/filepath" "testing" @@ -11,6 +10,7 @@ import ( "github.com/sclevine/spec/report" "github.com/buildpacks/pack/buildpackage" + "github.com/buildpacks/pack/pkg/dist" h "github.com/buildpacks/pack/testhelpers" ) @@ -26,7 +26,7 @@ func testBuildpackageConfigReader(t *testing.T, when spec.G, it spec.S) { it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "buildpackage-config-test") + tmpDir, err = os.MkdirTemp("", "buildpackage-config-test") h.AssertNil(t, err) }) @@ -34,10 +34,38 @@ func testBuildpackageConfigReader(t *testing.T, when spec.G, it spec.S) { os.RemoveAll(tmpDir) }) + it("returns default buildpack config", func() { + expected := buildpackage.Config{ + Buildpack: dist.BuildpackURI{ + URI: ".", + }, + Platform: dist.Platform{ + OS: "linux", + }, + } + actual := buildpackage.DefaultConfig() + + h.AssertEq(t, actual, expected) + }) + + it("returns default extension config", func() { + expected := buildpackage.Config{ + Extension: dist.BuildpackURI{ + URI: ".", + }, + Platform: dist.Platform{ + OS: "linux", + }, + } + actual := buildpackage.DefaultExtensionConfig() + + h.AssertEq(t, actual, expected) + }) + it("returns correct config when provided toml file is valid", func() { configFile := filepath.Join(tmpDir, "package.toml") - err := ioutil.WriteFile(configFile, []byte(validPackageToml), os.ModePerm) + err := os.WriteFile(configFile, []byte(validPackageToml), os.ModePerm) h.AssertNil(t, err) packageConfigReader := buildpackage.NewConfigReader() @@ -54,7 +82,7 @@ func testBuildpackageConfigReader(t *testing.T, when spec.G, it spec.S) { it("returns a config with 'linux' as default when platform is missing", func() { configFile := filepath.Join(tmpDir, "package.toml") - err := ioutil.WriteFile(configFile, []byte(validPackageWithoutPlatformToml), os.ModePerm) + err := os.WriteFile(configFile, []byte(validPackageWithoutPlatformToml), os.ModePerm) h.AssertNil(t, err) packageConfigReader := buildpackage.NewConfigReader() @@ -68,7 +96,7 @@ func testBuildpackageConfigReader(t *testing.T, when spec.G, it spec.S) { it("returns an error when toml decode fails", func() { configFile := filepath.Join(tmpDir, "package.toml") - err := ioutil.WriteFile(configFile, []byte(brokenPackageToml), os.ModePerm) + err := os.WriteFile(configFile, []byte(brokenPackageToml), os.ModePerm) h.AssertNil(t, err) packageConfigReader := buildpackage.NewConfigReader() @@ -82,7 +110,7 @@ func testBuildpackageConfigReader(t *testing.T, when spec.G, it spec.S) { it("returns an error when buildpack uri is invalid", func() { configFile := filepath.Join(tmpDir, "package.toml") - err := ioutil.WriteFile(configFile, []byte(invalidBPURIPackageToml), os.ModePerm) + err := os.WriteFile(configFile, []byte(invalidBPURIPackageToml), os.ModePerm) h.AssertNil(t, err) packageConfigReader := buildpackage.NewConfigReader() @@ -96,7 +124,7 @@ func testBuildpackageConfigReader(t *testing.T, when spec.G, it spec.S) { it("returns an error when platform os is invalid", func() { configFile := filepath.Join(tmpDir, "package.toml") - err := ioutil.WriteFile(configFile, []byte(invalidPlatformOSPackageToml), os.ModePerm) + err := os.WriteFile(configFile, []byte(invalidPlatformOSPackageToml), os.ModePerm) h.AssertNil(t, err) packageConfigReader := buildpackage.NewConfigReader() @@ -110,7 +138,7 @@ func testBuildpackageConfigReader(t *testing.T, when spec.G, it spec.S) { it("returns an error when dependency uri is invalid", func() { configFile := filepath.Join(tmpDir, "package.toml") - err := ioutil.WriteFile(configFile, []byte(invalidDepURIPackageToml), os.ModePerm) + err := os.WriteFile(configFile, []byte(invalidDepURIPackageToml), os.ModePerm) h.AssertNil(t, err) packageConfigReader := buildpackage.NewConfigReader() @@ -124,7 +152,7 @@ func testBuildpackageConfigReader(t *testing.T, when spec.G, it spec.S) { it("returns an error when unknown array table is present", func() { configFile := filepath.Join(tmpDir, "package.toml") - err := ioutil.WriteFile(configFile, []byte(invalidDepTablePackageToml), os.ModePerm) + err := os.WriteFile(configFile, []byte(invalidDepTablePackageToml), os.ModePerm) h.AssertNil(t, err) packageConfigReader := buildpackage.NewConfigReader() @@ -140,7 +168,7 @@ func testBuildpackageConfigReader(t *testing.T, when spec.G, it spec.S) { it("returns an error when unknown buildpack key is present", func() { configFile := filepath.Join(tmpDir, "package.toml") - err := ioutil.WriteFile(configFile, []byte(unknownBPKeyPackageToml), os.ModePerm) + err := os.WriteFile(configFile, []byte(unknownBPKeyPackageToml), os.ModePerm) h.AssertNil(t, err) packageConfigReader := buildpackage.NewConfigReader() @@ -155,7 +183,7 @@ func testBuildpackageConfigReader(t *testing.T, when spec.G, it spec.S) { it("returns an error when multiple unknown keys are present", func() { configFile := filepath.Join(tmpDir, "package.toml") - err := ioutil.WriteFile(configFile, []byte(multipleUnknownKeysPackageToml), os.ModePerm) + err := os.WriteFile(configFile, []byte(multipleUnknownKeysPackageToml), os.ModePerm) h.AssertNil(t, err) packageConfigReader := buildpackage.NewConfigReader() @@ -171,7 +199,7 @@ func testBuildpackageConfigReader(t *testing.T, when spec.G, it spec.S) { it("returns an error when both dependency options are configured", func() { configFile := filepath.Join(tmpDir, "package.toml") - err := ioutil.WriteFile(configFile, []byte(conflictingDependencyKeysPackageToml), os.ModePerm) + err := os.WriteFile(configFile, []byte(conflictingDependencyKeysPackageToml), os.ModePerm) h.AssertNil(t, err) packageConfigReader := buildpackage.NewConfigReader() @@ -184,7 +212,7 @@ func testBuildpackageConfigReader(t *testing.T, when spec.G, it spec.S) { it("returns an error no buildpack is configured", func() { configFile := filepath.Join(tmpDir, "package.toml") - err := ioutil.WriteFile(configFile, []byte(missingBuildpackPackageToml), os.ModePerm) + err := os.WriteFile(configFile, []byte(missingBuildpackPackageToml), os.ModePerm) h.AssertNil(t, err) packageConfigReader := buildpackage.NewConfigReader() @@ -194,6 +222,39 @@ func testBuildpackageConfigReader(t *testing.T, when spec.G, it spec.S) { h.AssertError(t, err, "missing 'buildpack.uri' configuration") }) }) + + when("#ReadBuildpackDescriptor", func() { + var tmpDir string + + it.Before(func() { + var err error + tmpDir, err = os.MkdirTemp("", "buildpack-descriptor-test") + h.AssertNil(t, err) + }) + + it.After(func() { + _ = os.RemoveAll(tmpDir) + }) + + it("returns exec-env when a composite buildpack toml file is provided", func() { + buildPackTomlFilePath := filepath.Join(tmpDir, "buildpack-1.toml") + + err := os.WriteFile(buildPackTomlFilePath, []byte(validCompositeBuildPackTomlWithExecEnv), os.ModePerm) + h.AssertNil(t, err) + + packageConfigReader := buildpackage.NewConfigReader() + + buildpackDescriptor, err := packageConfigReader.ReadBuildpackDescriptor(buildPackTomlFilePath) + h.AssertNil(t, err) + + h.AssertTrue(t, len(buildpackDescriptor.Order()) == 1) + h.AssertTrue(t, len(buildpackDescriptor.Order()[0].Group) == 2) + h.AssertTrue(t, len(buildpackDescriptor.Order()[0].Group[0].ExecEnv) == 1) + h.AssertTrue(t, len(buildpackDescriptor.Order()[0].Group[1].ExecEnv) == 1) + h.AssertEq(t, buildpackDescriptor.Order()[0].Group[0].ExecEnv[0], "production.1") + h.AssertEq(t, buildpackDescriptor.Order()[0].Group[1].ExecEnv[0], "production.2") + }) + }) } const validPackageToml = ` @@ -278,3 +339,24 @@ const missingBuildpackPackageToml = ` [[dependencies]] uri = "bp/b" ` + +const validCompositeBuildPackTomlWithExecEnv = ` +api = "0.15" + +[buildpack] +id = "samples/hello-universe" +version = "0.0.1" +name = "Hello Universe Buildpack" + +# Order used for detection +[[order]] +[[order.group]] +id = "samples/hello-world" +version = "0.0.1" +exec-env = ["production.1"] + +[[order.group]] +id = "samples/hello-moon" +version = "0.0.1" +exec-env = ["production.2"] +` diff --git a/cmd/cmd.go b/cmd/cmd.go index 9cf792c4bd..9b1c391366 100644 --- a/cmd/cmd.go +++ b/cmd/cmd.go @@ -23,8 +23,9 @@ type ConfigurableLogger interface { WantVerbose(f bool) } -//nolint:staticcheck // NewPackCommand generates a Pack command +// +//nolint:staticcheck func NewPackCommand(logger ConfigurableLogger) (*cobra.Command, error) { cobra.EnableCommandSorting = false cfg, cfgPath, err := initConfig() @@ -42,15 +43,16 @@ func NewPackCommand(logger ConfigurableLogger) (*cobra.Command, error) { Short: "CLI for building apps using Cloud Native Buildpacks", PersistentPreRun: func(cmd *cobra.Command, args []string) { if fs := cmd.Flags(); fs != nil { - if flag, err := fs.GetBool("no-color"); err == nil && flag { - color.Disable(flag) - } - - _, canDisplayColor := term.IsTerminal(logging.GetWriterForLevel(logger, logging.InfoLevel)) - if !canDisplayColor { - color.Disable(true) + if forceColor, err := fs.GetBool("force-color"); err == nil && !forceColor { + if flag, err := fs.GetBool("no-color"); err == nil && flag { + color.Disable(flag) + } + + _, canDisplayColor := term.IsTerminal(logging.GetWriterForLevel(logger, logging.InfoLevel)) + if !canDisplayColor { + color.Disable(true) + } } - if flag, err := fs.GetBool("quiet"); err == nil { logger.WantQuiet(flag) } @@ -65,6 +67,7 @@ func NewPackCommand(logger ConfigurableLogger) (*cobra.Command, error) { } rootCmd.PersistentFlags().Bool("no-color", false, "Disable color output") + rootCmd.PersistentFlags().Bool("force-color", false, "Force color output") rootCmd.PersistentFlags().Bool("timestamps", false, "Enable timestamps in output") rootCmd.PersistentFlags().BoolP("quiet", "q", false, "Show less output") rootCmd.PersistentFlags().BoolP("verbose", "v", false, "Show more output") @@ -75,6 +78,7 @@ func NewPackCommand(logger ConfigurableLogger) (*cobra.Command, error) { rootCmd.AddCommand(commands.Build(logger, cfg, packClient)) rootCmd.AddCommand(commands.NewBuilderCommand(logger, cfg, packClient)) rootCmd.AddCommand(commands.NewBuildpackCommand(logger, cfg, packClient, buildpackage.NewConfigReader())) + rootCmd.AddCommand(commands.NewExtensionCommand(logger, cfg, packClient, buildpackage.NewConfigReader())) rootCmd.AddCommand(commands.NewConfigCommand(logger, cfg, cfgPath, packClient)) rootCmd.AddCommand(commands.InspectImage(logger, imagewriter.NewFactory(), cfg, packClient)) rootCmd.AddCommand(commands.NewStackCommand(logger)) @@ -92,7 +96,6 @@ func NewPackCommand(logger ConfigurableLogger) (*cobra.Command, error) { rootCmd.AddCommand(commands.ListTrustedBuilders(logger, cfg)) rootCmd.AddCommand(commands.CreateBuilder(logger, cfg, packClient)) rootCmd.AddCommand(commands.PackageBuildpack(logger, cfg, packClient, buildpackage.NewConfigReader())) - rootCmd.AddCommand(commands.SuggestStacks(logger)) if cfg.Experimental { rootCmd.AddCommand(commands.AddBuildpackRegistry(logger, cfg, cfgPath)) @@ -101,6 +104,7 @@ func NewPackCommand(logger ConfigurableLogger) (*cobra.Command, error) { rootCmd.AddCommand(commands.SetDefaultRegistry(logger, cfg, cfgPath)) rootCmd.AddCommand(commands.RemoveRegistry(logger, cfg, cfgPath)) rootCmd.AddCommand(commands.YankBuildpack(logger, cfg, packClient)) + rootCmd.AddCommand(commands.NewManifestCommand(logger, packClient)) } packHome, err := config.PackHome() @@ -134,9 +138,19 @@ func initConfig() (config.Config, string, error) { } func initClient(logger logging.Logger, cfg config.Config) (*client.Client, error) { + if err := client.ProcessDockerContext(logger); err != nil { + return nil, err + } + dc, err := tryInitSSHDockerClient() if err != nil { return nil, err } - return client.NewClient(client.WithLogger(logger), client.WithExperimental(cfg.Experimental), client.WithRegistryMirrors(cfg.RegistryMirrors), client.WithDockerClient(dc)) + + // If we got a docker client from SSH, use it directly + if dc != nil { + return client.NewClient(client.WithLogger(logger), client.WithExperimental(cfg.Experimental), client.WithRegistryMirrors(cfg.RegistryMirrors), client.WithDockerClient(dc)) + } + + return client.NewClient(client.WithLogger(logger), client.WithExperimental(cfg.Experimental), client.WithRegistryMirrors(cfg.RegistryMirrors)) } diff --git a/cmd/docker_init.go b/cmd/docker_init.go index 51b33527c5..13b5b892ad 100644 --- a/cmd/docker_init.go +++ b/cmd/docker_init.go @@ -12,15 +12,14 @@ import ( "os" "strings" - dockerClient "github.com/docker/docker/client" + dockerClient "github.com/moby/moby/client" "golang.org/x/crypto/ssh" "golang.org/x/term" "github.com/buildpacks/pack/internal/sshdialer" - "github.com/buildpacks/pack/pkg/client" ) -func tryInitSSHDockerClient() (dockerClient.CommonAPIClient, error) { +func tryInitSSHDockerClient() (*dockerClient.Client, error) { dockerHost := os.Getenv("DOCKER_HOST") _url, err := url.Parse(dockerHost) isSSH := err == nil && _url.Scheme == "ssh" @@ -50,13 +49,12 @@ func tryInitSSHDockerClient() (dockerClient.CommonAPIClient, error) { } dockerClientOpts := []dockerClient.Opt{ - dockerClient.WithVersion(client.DockerAPIVersion), dockerClient.WithHTTPClient(httpClient), - dockerClient.WithHost("http://dummy/"), + dockerClient.WithHost("http://dummy"), dockerClient.WithDialContext(dialContext), } - return dockerClient.NewClientWithOpts(dockerClientOpts...) + return dockerClient.New(dockerClientOpts...) } // readSecret prompts for a secret and returns value input by user from stdin @@ -70,7 +68,7 @@ func readSecret(prompt string) (pw []byte, err error) { fmt.Fprint(os.Stderr, prompt) pw, err = term.ReadPassword(fd) fmt.Fprintln(os.Stderr) - return + return pw, err } var b [1]byte diff --git a/go.mod b/go.mod index d241ce929a..52246931df 100644 --- a/go.mod +++ b/go.mod @@ -1,93 +1,159 @@ module github.com/buildpacks/pack require ( - github.com/BurntSushi/toml v1.1.0 + github.com/BurntSushi/toml v1.5.0 github.com/Masterminds/semver v1.5.0 + github.com/Microsoft/go-winio v0.6.2 github.com/apex/log v1.9.0 - github.com/buildpacks/imgutil v0.0.0-20211203200417-76206845baac - github.com/buildpacks/lifecycle v0.13.5 - github.com/docker/cli v20.10.14+incompatible - github.com/docker/docker v20.10.14+incompatible - github.com/docker/go-connections v0.4.0 - github.com/dustin/go-humanize v1.0.0 - github.com/gdamore/tcell/v2 v2.5.0 - github.com/ghodss/yaml v1.0.0 + github.com/buildpacks/imgutil v0.0.0-20251202182233-51c1c8c186ea + github.com/buildpacks/lifecycle v0.21.0 + github.com/chainguard-dev/kaniko v1.25.7 + github.com/containerd/errdefs v1.0.0 + github.com/docker/cli v29.2.1+incompatible + github.com/docker/docker v28.5.2+incompatible + github.com/dustin/go-humanize v1.0.1 + github.com/gdamore/tcell/v2 v2.13.8 + github.com/go-git/go-git/v5 v5.16.5 github.com/golang/mock v1.6.0 - github.com/google/go-cmp v0.5.7 - github.com/google/go-containerregistry v0.8.0 + github.com/google/go-cmp v0.7.0 + github.com/google/go-containerregistry v0.20.7 github.com/google/go-github/v30 v30.1.0 github.com/hectane/go-acl v0.0.0-20190604041725-da78bae5fc95 github.com/heroku/color v0.0.6 github.com/mitchellh/ioprogress v0.0.0-20180201004757-6a23b12fa88e - github.com/onsi/gomega v1.19.0 - github.com/opencontainers/image-spec v1.0.2 - github.com/pelletier/go-toml v1.9.4 + github.com/moby/go-archive v0.2.0 + github.com/moby/moby/api v1.53.0 + github.com/moby/moby/client v0.2.2 + github.com/onsi/gomega v1.39.1 + github.com/opencontainers/go-digest v1.0.0 + github.com/opencontainers/image-spec v1.1.1 + github.com/pelletier/go-toml v1.9.5 github.com/pkg/errors v0.9.1 - github.com/rivo/tview v0.0.0-20210624165335-29d673af0ce2 - github.com/sabhiram/go-gitignore v0.0.0-20201211074657-223ce5d391b0 + github.com/rivo/tview v0.42.0 + github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06 github.com/sclevine/spec v1.4.0 - github.com/spf13/cobra v1.4.0 - golang.org/x/crypto v0.0.0-20210817164053-32db794688a5 - golang.org/x/mod v0.5.1 - golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 - golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 - gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce - gopkg.in/src-d/go-git.v4 v4.13.1 - gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b + github.com/spf13/cobra v1.10.2 + golang.org/x/crypto v0.48.0 + golang.org/x/mod v0.33.0 + golang.org/x/oauth2 v0.35.0 + golang.org/x/sync v0.19.0 + golang.org/x/sys v0.41.0 + golang.org/x/term v0.40.0 + golang.org/x/text v0.34.0 + gopkg.in/yaml.v3 v3.0.1 ) require ( - github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect - github.com/Microsoft/go-winio v0.5.1 // indirect - github.com/Microsoft/hcsshim v0.8.23 // indirect - github.com/bits-and-blooms/bitset v1.2.0 // indirect - github.com/containerd/cgroups v1.0.1 // indirect - github.com/containerd/containerd v1.5.8 // indirect - github.com/containerd/stargz-snapshotter/estargz v0.10.1 // indirect - github.com/docker/distribution v2.7.1+incompatible // indirect - github.com/docker/docker-credential-helpers v0.6.4 // indirect - github.com/docker/go-units v0.4.0 // indirect - github.com/emirpasic/gods v1.12.0 // indirect - github.com/gdamore/encoding v1.0.0 // indirect + cyphar.com/go-pathrs v0.2.1 // indirect + dario.cat/mergo v1.0.2 // indirect + github.com/Azure/azure-sdk-for-go v68.0.0+incompatible // indirect + github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c // indirect + github.com/Azure/go-autorest v14.2.0+incompatible // indirect + github.com/Azure/go-autorest/autorest v0.11.30 // indirect + github.com/Azure/go-autorest/autorest/adal v0.9.24 // indirect + github.com/Azure/go-autorest/autorest/azure/auth v0.5.13 // indirect + github.com/Azure/go-autorest/autorest/azure/cli v0.4.7 // indirect + github.com/Azure/go-autorest/autorest/date v0.3.1 // indirect + github.com/Azure/go-autorest/logger v0.2.2 // indirect + github.com/Azure/go-autorest/tracing v0.6.1 // indirect + github.com/ProtonMail/go-crypto v1.3.0 // indirect + github.com/agext/levenshtein v1.2.3 // indirect + github.com/aws/aws-sdk-go-v2 v1.41.1 // indirect + github.com/aws/aws-sdk-go-v2/config v1.32.7 // indirect + github.com/aws/aws-sdk-go-v2/credentials v1.19.7 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.17 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.17 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.17 // indirect + github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 // indirect + github.com/aws/aws-sdk-go-v2/service/ecr v1.51.2 // indirect + github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.38.2 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.4 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.17 // indirect + github.com/aws/aws-sdk-go-v2/service/signin v1.0.5 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.30.9 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.13 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.41.6 // indirect + github.com/aws/smithy-go v1.24.0 // indirect + github.com/awslabs/amazon-ecr-credential-helper/ecr-login v0.11.0 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/chrismellard/docker-credential-acr-env v0.0.0-20230304212654-82a0ddb27589 // indirect + github.com/cloudflare/circl v1.6.1 // indirect + github.com/containerd/errdefs/pkg v0.3.0 // indirect + github.com/containerd/log v0.1.0 // indirect + github.com/containerd/stargz-snapshotter/estargz v0.18.1 // indirect + github.com/containerd/typeurl/v2 v2.2.3 // indirect + github.com/cyphar/filepath-securejoin v0.6.0 // indirect + github.com/dimchansky/utfbom v1.1.1 // indirect + github.com/distribution/reference v0.6.0 // indirect + github.com/docker/distribution v2.8.3+incompatible // indirect + github.com/docker/docker-credential-helpers v0.9.4 // indirect + github.com/docker/go-connections v0.6.0 // indirect + github.com/docker/go-metrics v0.0.1 // indirect + github.com/docker/go-units v0.5.0 // indirect + github.com/emirpasic/gods v1.18.1 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/gdamore/encoding v1.0.1 // indirect + github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect + github.com/go-git/go-billy/v5 v5.7.0 // indirect + github.com/go-logr/logr v1.4.3 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-viper/mapstructure/v2 v2.4.0 // indirect github.com/gogo/protobuf v1.3.2 // indirect - github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect - github.com/golang/protobuf v1.5.2 // indirect - github.com/google/go-querystring v1.0.0 // indirect - github.com/inconshreveable/mousetrap v1.0.0 // indirect + github.com/golang-jwt/jwt/v4 v4.5.2 // indirect + github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect + github.com/google/go-querystring v1.1.0 // indirect + github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/gorilla/mux v1.8.1 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect - github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd // indirect - github.com/klauspost/compress v1.13.6 // indirect - github.com/lucasb-eyer/go-colorful v1.2.0 // indirect - github.com/mattn/go-colorable v0.1.12 // indirect - github.com/mattn/go-isatty v0.0.14 // indirect - github.com/mattn/go-runewidth v0.0.13 // indirect + github.com/kevinburke/ssh_config v1.2.0 // indirect + github.com/klauspost/compress v1.18.2 // indirect + github.com/lucasb-eyer/go-colorful v1.3.0 // indirect + github.com/mattn/go-colorable v0.1.14 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect - github.com/moby/sys/mount v0.2.0 // indirect - github.com/moby/sys/mountinfo v0.4.1 // indirect - github.com/moby/term v0.0.0-20210619224110-3f7ff695adc6 // indirect + github.com/moby/buildkit v0.26.3 // indirect + github.com/moby/docker-image-spec v1.3.1 // indirect + github.com/moby/patternmatcher v0.6.0 // indirect + github.com/moby/sys/atomicwriter v0.1.0 // indirect + github.com/moby/sys/sequential v0.6.0 // indirect + github.com/moby/sys/user v0.4.0 // indirect + github.com/moby/sys/userns v0.1.0 // indirect + github.com/moby/term v0.5.2 // indirect github.com/morikuni/aec v1.0.0 // indirect - github.com/opencontainers/go-digest v1.0.0 // indirect - github.com/opencontainers/runc v1.0.2 // indirect - github.com/opencontainers/selinux v1.8.2 // indirect - github.com/rivo/uniseg v0.2.0 // indirect - github.com/sergi/go-diff v1.1.0 // indirect - github.com/sirupsen/logrus v1.8.1 // indirect - github.com/spf13/pflag v1.0.5 // indirect - github.com/src-d/gcfg v1.4.0 // indirect - github.com/vbatts/tar-split v0.11.2 // indirect - github.com/xanzy/ssh-agent v0.3.0 // indirect - go.opencensus.io v0.23.0 // indirect - golang.org/x/net v0.0.0-20220225172249-27dd8689420f // indirect - golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect - golang.org/x/sys v0.0.0-20220318055525-2edf467146b5 // indirect - golang.org/x/text v0.3.7 // indirect - google.golang.org/appengine v1.6.7 // indirect - google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa // indirect - google.golang.org/grpc v1.43.0 // indirect - google.golang.org/protobuf v1.27.1 // indirect - gopkg.in/src-d/go-billy.v4 v4.3.2 // indirect + github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect + github.com/opencontainers/selinux v1.13.0 // indirect + github.com/pjbgf/sha1cd v0.3.2 // indirect + github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect + github.com/prometheus/client_golang v1.23.2 // indirect + github.com/prometheus/client_model v0.6.2 // indirect + github.com/prometheus/common v0.66.1 // indirect + github.com/prometheus/procfs v0.16.1 // indirect + github.com/rivo/uniseg v0.4.7 // indirect + github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect + github.com/sirupsen/logrus v1.9.4 // indirect + github.com/skeema/knownhosts v1.3.1 // indirect + github.com/spf13/pflag v1.0.10 // indirect + github.com/tonistiigi/go-csvvalue v0.0.0-20240814133006-030d3b2625d0 // indirect + github.com/vbatts/tar-split v0.12.2 // indirect + github.com/xanzy/ssh-agent v0.3.3 // indirect + github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect + github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect + github.com/xeipuuv/gojsonschema v1.2.0 // indirect + go.opentelemetry.io/auto/sdk v1.2.1 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect + go.opentelemetry.io/otel v1.39.0 // indirect + go.opentelemetry.io/otel/metric v1.39.0 // indirect + go.opentelemetry.io/otel/trace v1.39.0 // indirect + go.yaml.in/yaml/v2 v2.4.2 // indirect + go.yaml.in/yaml/v3 v3.0.4 // indirect + golang.org/x/net v0.50.0 // indirect + google.golang.org/protobuf v1.36.11 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect - gopkg.in/yaml.v2 v2.4.0 // indirect ) -go 1.17 +replace github.com/BurntSushi/toml => github.com/BurntSushi/toml v1.3.2 + +go 1.25.5 diff --git a/go.sum b/go.sum index b5fb77c8d1..ba2d601e3f 100644 --- a/go.sum +++ b/go.sum @@ -1,885 +1,392 @@ -bazil.org/fuse v0.0.0-20160811212531-371fbbdaa898/go.mod h1:Xbm+BRKSBEpa4q4hTSxohYNQpsxXPbPry4JJWOB3LB8= -cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= -cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= -cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= -cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= -cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= -cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= -cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= -cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= -cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= -cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= -cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= -cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= -cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= -cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= -cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= -cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= -cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= -cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= -cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= -cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= -cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= -cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= -cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= -cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= -cloud.google.com/go v0.98.0/go.mod h1:ua6Ush4NALrHk5QXDWnjvZHN93OuF0HfuEPq9I1X0cM= -cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= -cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= -cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= -cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= -cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= -cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= -cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= -cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= -cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= -cloud.google.com/go/firestore v1.6.1/go.mod h1:asNXNOzBdyVQmEU+ggO8UPodTkEVFW5Qx+rwHnAz+EY= -cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= -cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= -cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= -cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= -cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= -cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= -cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= -cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/Azure/azure-sdk-for-go v16.2.1+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= -github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8= -github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= -github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= -github.com/Azure/go-autorest v10.8.1+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= +cyphar.com/go-pathrs v0.2.1 h1:9nx1vOgwVvX1mNBWDu93+vaceedpbsDqo+XuBGL40b8= +cyphar.com/go-pathrs v0.2.1/go.mod h1:y8f1EMG7r+hCuFf/rXsKqMJrJAUoADZGNh5/vZPKcGc= +dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8= +dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6 h1:He8afgbRMd7mFxO99hRNu+6tazq8nFF9lIwo9JFroBk= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8= +github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU= +github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c h1:udKWzYgxTojEKWjV8V+WSxDXJ4NFATAsZjh8iIbsQIg= +github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/Azure/go-autorest v14.2.0+incompatible h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs= github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= -github.com/Azure/go-autorest/autorest v0.11.1/go.mod h1:JFgpikqFJ/MleTTxwepExTKnFUKKszPS8UavbQYUMuw= -github.com/Azure/go-autorest/autorest/adal v0.9.0/go.mod h1:/c022QCutn2P7uY+/oQWWNcK9YU+MH96NgK+jErpbcg= -github.com/Azure/go-autorest/autorest/adal v0.9.5/go.mod h1:B7KF7jKIeC9Mct5spmyCB/A8CG/sEz1vwIRGv/bbw7A= +github.com/Azure/go-autorest/autorest v0.11.28/go.mod h1:MrkzG3Y3AH668QyF9KRk5neJnGgmhQ6krbhR8Q5eMvA= +github.com/Azure/go-autorest/autorest v0.11.30 h1:iaZ1RGz/ALZtN5eq4Nr1SOFSlf2E4pDI3Tcsl+dZPVE= +github.com/Azure/go-autorest/autorest v0.11.30/go.mod h1:t1kpPIOpIVX7annvothKvb0stsrXa37i7b+xpmBW8Fs= +github.com/Azure/go-autorest/autorest/adal v0.9.18/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ= +github.com/Azure/go-autorest/autorest/adal v0.9.22/go.mod h1:XuAbAEUv2Tta//+voMI038TrJBqjKam0me7qR+L8Cmk= +github.com/Azure/go-autorest/autorest/adal v0.9.24 h1:BHZfgGsGwdkHDyZdtQRQk1WeUdW0m2WPAwuHZwUi5i4= +github.com/Azure/go-autorest/autorest/adal v0.9.24/go.mod h1:7T1+g0PYFmACYW5LlG2fcoPiPlFHjClyRGL7dRlP5c8= +github.com/Azure/go-autorest/autorest/azure/auth v0.5.13 h1:Ov8avRZi2vmrE2JcXw+tu5K/yB41r7xK9GZDiBF7NdM= +github.com/Azure/go-autorest/autorest/azure/auth v0.5.13/go.mod h1:5BAVfWLWXihP47vYrPuBKKf4cS0bXI+KM9Qx6ETDJYo= +github.com/Azure/go-autorest/autorest/azure/cli v0.4.6/go.mod h1:piCfgPho7BiIDdEQ1+g4VmKyD5y+p/XtSNqE6Hc4QD0= +github.com/Azure/go-autorest/autorest/azure/cli v0.4.7 h1:Q9R3utmFg9K1B4OYtAZ7ZUUvIUdzQt7G2MN5Hi/d670= +github.com/Azure/go-autorest/autorest/azure/cli v0.4.7/go.mod h1:bVrAueELJ0CKLBpUHDIvD516TwmHmzqwCpvONWRsw3s= github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSYnokU+TrmwEsOqdt8Y6sso74= -github.com/Azure/go-autorest/autorest/mocks v0.4.0/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k= +github.com/Azure/go-autorest/autorest/date v0.3.1 h1:o9Z8Jyt+VJJTCZ/UORishuHOusBwolhjokt9s5k8I4w= +github.com/Azure/go-autorest/autorest/date v0.3.1/go.mod h1:Dz/RDmXlfiFFS/eW+b/xMUSFs1tboPVy6UjgADToWDM= github.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k= -github.com/Azure/go-autorest/logger v0.2.0/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= +github.com/Azure/go-autorest/autorest/mocks v0.4.2 h1:PGN4EDXnuQbojHbU0UWoNvmu9AGVwYHG9/fkDYhtAfw= +github.com/Azure/go-autorest/autorest/mocks v0.4.2/go.mod h1:Vy7OitM9Kei0i1Oj+LvyAWMXJHeKH1MVlzFugfVrmyU= +github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= +github.com/Azure/go-autorest/logger v0.2.2 h1:hYqBsEBywrrOSW24kkOCXRcKfKhK76OzLTfF+MYDE2o= +github.com/Azure/go-autorest/logger v0.2.2/go.mod h1:I5fg9K52o+iuydlWfa9T5K6WFos9XYr9dYTFzpqgibw= github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/toml v1.0.0/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= -github.com/BurntSushi/toml v1.1.0 h1:ksErzDEI1khOiGPgpwuI7x2ebx/uXQNw7xJpn9Eq1+I= -github.com/BurntSushi/toml v1.1.0/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= -github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= +github.com/Azure/go-autorest/tracing v0.6.1 h1:YUMSrC/CeD1ZnnXcNYU4a/fzsO35u2Fsful9L/2nyR0= +github.com/Azure/go-autorest/tracing v0.6.1/go.mod h1:/3EgjbsjraOqiicERAeu3m7/z0x1TzjQGAwDrJrXGkc= +github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8= +github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= -github.com/Microsoft/go-winio v0.4.11/go.mod h1:VhR8bwka0BXejwEJY73c50VrPtXAaKcyvVC4A4RozmA= -github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA= -github.com/Microsoft/go-winio v0.4.15-0.20190919025122-fc70bd9a86b5/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59jBh3ajtinzw= -github.com/Microsoft/go-winio v0.4.16-0.20201130162521-d1ffc52c7331/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0= -github.com/Microsoft/go-winio v0.4.16/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0= -github.com/Microsoft/go-winio v0.4.17-0.20210211115548-6eac466e5fa3/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= -github.com/Microsoft/go-winio v0.4.17-0.20210324224401-5516f17a5958/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= -github.com/Microsoft/go-winio v0.4.17/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= -github.com/Microsoft/go-winio v0.5.1 h1:aPJp2QD7OOrhO5tQXqQoGSJc+DjDtWTGLOmNyAm6FgY= -github.com/Microsoft/go-winio v0.5.1/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= -github.com/Microsoft/hcsshim v0.8.6/go.mod h1:Op3hHsoHPAvb6lceZHDtd9OkTew38wNoXnJs8iY7rUg= -github.com/Microsoft/hcsshim v0.8.7-0.20190325164909-8abdbb8205e4/go.mod h1:Op3hHsoHPAvb6lceZHDtd9OkTew38wNoXnJs8iY7rUg= -github.com/Microsoft/hcsshim v0.8.7/go.mod h1:OHd7sQqRFrYd3RmSgbgji+ctCwkbq2wbEYNSzOYtcBQ= -github.com/Microsoft/hcsshim v0.8.9/go.mod h1:5692vkUqntj1idxauYlpoINNKeqCiG6Sg38RRsjT5y8= -github.com/Microsoft/hcsshim v0.8.14/go.mod h1:NtVKoYxQuTLx6gEq0L96c9Ju4JbRJ4nY2ow3VK6a9Lg= -github.com/Microsoft/hcsshim v0.8.15/go.mod h1:x38A4YbHbdxJtc0sF6oIz+RG0npwSCAvn69iY6URG00= -github.com/Microsoft/hcsshim v0.8.16/go.mod h1:o5/SZqmR7x9JNKsW3pu+nqHm0MF8vbA+VxGOoXdC600= -github.com/Microsoft/hcsshim v0.8.21/go.mod h1:+w2gRZ5ReXQhFOrvSQeNfhrYB/dg3oDwTOcER2fw4I4= -github.com/Microsoft/hcsshim v0.8.23 h1:47MSwtKGXet80aIn+7h4YI6fwPmwIghAnsx2aOUrG2M= -github.com/Microsoft/hcsshim v0.8.23/go.mod h1:4zegtUJth7lAvFyc6cH2gGQ5B3OFQim01nnU2M8jKDg= -github.com/Microsoft/hcsshim/test v0.0.0-20201218223536-d3e5debf77da/go.mod h1:5hlzMzRKMLyo42nCZ9oml8AdTlq/0cvIaBv6tK1RehU= -github.com/Microsoft/hcsshim/test v0.0.0-20210227013316-43a75bb4edd3/go.mod h1:mw7qgWloBUl75W/gVH3cQszUg1+gUITj7D6NY7ywVnY= -github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ= -github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= -github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= -github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ= -github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7 h1:uSoVVbwJiQipAclBbw+8quDsfcvFjOpI5iCf4p/cqCs= -github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs= +github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1Xbatp0= +github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= +github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/ProtonMail/go-crypto v1.3.0 h1:ILq8+Sf5If5DCpHQp4PbZdS1J7HDFRXz/+xKBiRGFrw= +github.com/ProtonMail/go-crypto v1.3.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE= +github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo= +github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alexflint/go-filemutex v0.0.0-20171022225611-72bdc8eae2ae/go.mod h1:CgnQgUtFrFz9mxFNtED3jI5tLDjKlOM+oUF/sTk6ps0= -github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239 h1:kFOfPq6dUM1hTo4JG6LR5AXSUEsOjtdm0kw0FtQtMJA= -github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c= -github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= +github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= github.com/apex/log v1.9.0 h1:FHtw/xuaM8AgmvDDTI9fiwoAL25Sq2cxojnZICUU8l0= github.com/apex/log v1.9.0/go.mod h1:m82fZlWIuiWzWP04XCTXmnX0xRkYYbCdYn8jbJeLBEA= github.com/apex/logs v1.0.0/go.mod h1:XzxuLZ5myVHDy9SAmYpamKKRNApGj54PfYLcFrXqDwo= github.com/aphistic/golf v0.0.0-20180712155816-02c07f170c5a/go.mod h1:3NqKYiepwy8kCu4PNA+aP7WUV72eXWJeP9/r3/K9aLE= github.com/aphistic/sweet v0.2.0/go.mod h1:fWDlIh/isSE9n6EPsRmC0det+whmX6dJid3stzu0Xys= -github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= -github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= -github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= -github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc= -github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= -github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= -github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= -github.com/aws/aws-sdk-go v1.15.11/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0= github.com/aws/aws-sdk-go v1.20.6/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= +github.com/aws/aws-sdk-go-v2 v1.41.1 h1:ABlyEARCDLN034NhxlRUSZr4l71mh+T5KAeGh6cerhU= +github.com/aws/aws-sdk-go-v2 v1.41.1/go.mod h1:MayyLB8y+buD9hZqkCW3kX1AKq07Y5pXxtgB+rRFhz0= +github.com/aws/aws-sdk-go-v2/config v1.32.7 h1:vxUyWGUwmkQ2g19n7JY/9YL8MfAIl7bTesIUykECXmY= +github.com/aws/aws-sdk-go-v2/config v1.32.7/go.mod h1:2/Qm5vKUU/r7Y+zUk/Ptt2MDAEKAfUtKc1+3U1Mo3oY= +github.com/aws/aws-sdk-go-v2/credentials v1.19.7 h1:tHK47VqqtJxOymRrNtUXN5SP/zUTvZKeLx4tH6PGQc8= +github.com/aws/aws-sdk-go-v2/credentials v1.19.7/go.mod h1:qOZk8sPDrxhf+4Wf4oT2urYJrYt3RejHSzgAquYeppw= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.17 h1:I0GyV8wiYrP8XpA70g1HBcQO1JlQxCMTW9npl5UbDHY= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.17/go.mod h1:tyw7BOl5bBe/oqvoIeECFJjMdzXoa/dfVz3QQ5lgHGA= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.17 h1:xOLELNKGp2vsiteLsvLPwxC+mYmO6OZ8PYgiuPJzF8U= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.17/go.mod h1:5M5CI3D12dNOtH3/mk6minaRwI2/37ifCURZISxA/IQ= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.17 h1:WWLqlh79iO48yLkj1v3ISRNiv+3KdQoZ6JWyfcsyQik= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.17/go.mod h1:EhG22vHRrvF8oXSTYStZhJc1aUgKtnJe+aOiFEV90cM= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 h1:WKuaxf++XKWlHWu9ECbMlha8WOEGm0OUEZqm4K/Gcfk= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4/go.mod h1:ZWy7j6v1vWGmPReu0iSGvRiise4YI5SkR3OHKTZ6Wuc= +github.com/aws/aws-sdk-go-v2/service/ecr v1.51.2 h1:aq2N/9UkbEyljIQ7OFcudEgUsJzO8MYucmfsM/k/dmc= +github.com/aws/aws-sdk-go-v2/service/ecr v1.51.2/go.mod h1:1NVD1KuMjH2GqnPwMotPndQaT/MreKkWpjkF12d6oKU= +github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.38.2 h1:9fe6w8bydUwNAhFVmjo+SRqAJjbBMOyILL/6hTTVkyA= +github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.38.2/go.mod h1:x7gU4CAyAz4BsM9hlRkhHiYw2GIr1QCmN45uwQw9l/E= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.4 h1:0ryTNEdJbzUCEWkVXEXoqlXV72J5keC1GvILMOuD00E= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.4/go.mod h1:HQ4qwNZh32C3CBeO6iJLQlgtMzqeG17ziAA/3KDJFow= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.17 h1:RuNSMoozM8oXlgLG/n6WLaFGoea7/CddrCfIiSA+xdY= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.17/go.mod h1:F2xxQ9TZz5gDWsclCtPQscGpP0VUOc8RqgFM3vDENmU= +github.com/aws/aws-sdk-go-v2/service/signin v1.0.5 h1:VrhDvQib/i0lxvr3zqlUwLwJP4fpmpyD9wYG1vfSu+Y= +github.com/aws/aws-sdk-go-v2/service/signin v1.0.5/go.mod h1:k029+U8SY30/3/ras4G/Fnv/b88N4mAfliNn08Dem4M= +github.com/aws/aws-sdk-go-v2/service/sso v1.30.9 h1:v6EiMvhEYBoHABfbGB4alOYmCIrcgyPPiBE1wZAEbqk= +github.com/aws/aws-sdk-go-v2/service/sso v1.30.9/go.mod h1:yifAsgBxgJWn3ggx70A3urX2AN49Y5sJTD1UQFlfqBw= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.13 h1:gd84Omyu9JLriJVCbGApcLzVR3XtmC4ZDPcAI6Ftvds= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.13/go.mod h1:sTGThjphYE4Ohw8vJiRStAcu3rbjtXRsdNB0TvZ5wwo= +github.com/aws/aws-sdk-go-v2/service/sts v1.41.6 h1:5fFjR/ToSOzB2OQ/XqWpZBmNvmP/pJ1jOWYlFDJTjRQ= +github.com/aws/aws-sdk-go-v2/service/sts v1.41.6/go.mod h1:qgFDZQSD/Kys7nJnVqYlWKnh0SSdMjAi0uSwON4wgYQ= +github.com/aws/smithy-go v1.24.0 h1:LpilSUItNPFr1eY85RYgTIg5eIEPtvFbskaFcmmIUnk= +github.com/aws/smithy-go v1.24.0/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0= +github.com/awslabs/amazon-ecr-credential-helper/ecr-login v0.11.0 h1:GOPttfOAf5qAgx7r6b+zCWZrvCsfKffkL4H6mSYx1kA= +github.com/awslabs/amazon-ecr-credential-helper/ecr-login v0.11.0/go.mod h1:a2HN6+p7k0JLDO8514sMr0l4cnrR52z4sWoZ/Uc82ho= github.com/aybabtme/rgbterm v0.0.0-20170906152045-cc83f3b3ce59/go.mod h1:q/89r3U2H7sSsE2t6Kca0lfwTK8JdoNGS/yzM/4iH5I= -github.com/beorn7/perks v0.0.0-20160804104726-4c0e84591b9a/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= -github.com/bitly/go-simplejson v0.5.0/go.mod h1:cXHtHw4XUPsvGaxgjIAn8PhEWG9NfngEKAMDJEczWVA= -github.com/bits-and-blooms/bitset v1.2.0 h1:Kn4yilvwNtMACtf1eYDlG8H77R07mZSPbMjLyS07ChA= -github.com/bits-and-blooms/bitset v1.2.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA= -github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= -github.com/blang/semver v3.1.0+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= -github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= -github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4= -github.com/bshuster-repo/logrus-logstash-hook v0.4.1/go.mod h1:zsTqEiSzDgAa/8GZR7E1qaXrhYNDKBYy5/dWPTIflbk= -github.com/buger/jsonparser v0.0.0-20180808090653-f4dd9f5a6b44/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s= -github.com/bugsnag/bugsnag-go v0.0.0-20141110184014-b1d153021fcd/go.mod h1:2oa8nejYd4cQ/b0hMIopN0lCRxU0bueqREvZLWFrtK8= -github.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b/go.mod h1:obH5gd0BsqsP2LwDJ9aOkm/6J86V6lyAXCoQWGw3K50= -github.com/bugsnag/panicwrap v0.0.0-20151223152923-e2c28503fcd0/go.mod h1:D/8v3kj0zr8ZAKg1AQ6crr+5VwKN5eIywRkfhyM/+dE= -github.com/buildpacks/imgutil v0.0.0-20211203200417-76206845baac h1:XrKr6axRUBHEQdyyo7uffYDwWurOdeyH8MpNRJuBdIw= -github.com/buildpacks/imgutil v0.0.0-20211203200417-76206845baac/go.mod h1:YZReWjuSxwyvuN92Vlcul+WgaCXylpecgFn7T3rNang= -github.com/buildpacks/lifecycle v0.13.5 h1:8sreGjrBu6CJDZqc6Opi+VJSqVcS1hiEqzJEhn7bqBY= -github.com/buildpacks/lifecycle v0.13.5/go.mod h1:+X6d6xoEhNitwvSbpTcnw/mvKNTYAx2ZeR9GDPbbsds= -github.com/cenkalti/backoff/v4 v4.1.1/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= -github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= -github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/checkpoint-restore/go-criu/v4 v4.1.0/go.mod h1:xUQBLp4RLc5zJtWY++yjOoMoB5lihDt7fai+75m+rGw= -github.com/checkpoint-restore/go-criu/v5 v5.0.0/go.mod h1:cfwC0EG7HMUenopBsUf9d89JlCLQIfgVcNsNN0t6T2M= -github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= -github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= -github.com/cilium/ebpf v0.0.0-20200110133405-4032b1d8aae3/go.mod h1:MA5e5Lr8slmEg9bt0VpxxWqJlO4iwu3FBdHUzV7wQVg= -github.com/cilium/ebpf v0.0.0-20200702112145-1c8d4c9ef775/go.mod h1:7cR51M8ViRLIdUjrmSXlK9pkrsDlLHbO8jiB8X8JnOc= -github.com/cilium/ebpf v0.2.0/go.mod h1:To2CFviqOWL/M0gIMsvSMlqe7em/l1ALkX1PyjrX2Qs= -github.com/cilium/ebpf v0.4.0/go.mod h1:4tRaxcgiL706VnOzHOdBlY8IEAIdxINsQBcU4xJJXRs= -github.com/cilium/ebpf v0.6.2/go.mod h1:4tRaxcgiL706VnOzHOdBlY8IEAIdxINsQBcU4xJJXRs= -github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= -github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= -github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= -github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= -github.com/containerd/aufs v0.0.0-20200908144142-dab0cbea06f4/go.mod h1:nukgQABAEopAHvB6j7cnP5zJ+/3aVcE7hCYqvIwAHyE= -github.com/containerd/aufs v0.0.0-20201003224125-76a6863f2989/go.mod h1:AkGGQs9NM2vtYHaUen+NljV0/baGCAPELGm2q9ZXpWU= -github.com/containerd/aufs v0.0.0-20210316121734-20793ff83c97/go.mod h1:kL5kd6KM5TzQjR79jljyi4olc1Vrx6XBlcyj3gNv2PU= -github.com/containerd/aufs v1.0.0/go.mod h1:kL5kd6KM5TzQjR79jljyi4olc1Vrx6XBlcyj3gNv2PU= -github.com/containerd/btrfs v0.0.0-20201111183144-404b9149801e/go.mod h1:jg2QkJcsabfHugurUvvPhS3E08Oxiuh5W/g1ybB4e0E= -github.com/containerd/btrfs v0.0.0-20210316141732-918d888fb676/go.mod h1:zMcX3qkXTAi9GI50+0HOeuV8LU2ryCE/V2vG/ZBiTss= -github.com/containerd/btrfs v1.0.0/go.mod h1:zMcX3qkXTAi9GI50+0HOeuV8LU2ryCE/V2vG/ZBiTss= -github.com/containerd/cgroups v0.0.0-20190717030353-c4b9ac5c7601/go.mod h1:X9rLEHIqSf/wfK8NsPqxJmeZgW4pcfzdXITDrUSJ6uI= -github.com/containerd/cgroups v0.0.0-20190919134610-bf292b21730f/go.mod h1:OApqhQ4XNSNC13gXIwDjhOQxjWa/NxkwZXJ1EvqT0ko= -github.com/containerd/cgroups v0.0.0-20200531161412-0dbf7f05ba59/go.mod h1:pA0z1pT8KYB3TCXK/ocprsh7MAkoW8bZVzPdih9snmM= -github.com/containerd/cgroups v0.0.0-20200710171044-318312a37340/go.mod h1:s5q4SojHctfxANBDvMeIaIovkq29IP48TKAxnhYRxvo= -github.com/containerd/cgroups v0.0.0-20200824123100-0b889c03f102/go.mod h1:s5q4SojHctfxANBDvMeIaIovkq29IP48TKAxnhYRxvo= -github.com/containerd/cgroups v0.0.0-20210114181951-8a68de567b68/go.mod h1:ZJeTFisyysqgcCdecO57Dj79RfL0LNeGiFUqLYQRYLE= -github.com/containerd/cgroups v1.0.1 h1:iJnMvco9XGvKUvNQkv88bE4uJXxRQH18efbKo9w5vHQ= -github.com/containerd/cgroups v1.0.1/go.mod h1:0SJrPIenamHDcZhEcJMNBB85rHcUsw4f25ZfBiPYRkU= -github.com/containerd/console v0.0.0-20180822173158-c12b1e7919c1/go.mod h1:Tj/on1eG8kiEhd0+fhSDzsPAFESxzBBvdyEgyryXffw= -github.com/containerd/console v0.0.0-20181022165439-0650fd9eeb50/go.mod h1:Tj/on1eG8kiEhd0+fhSDzsPAFESxzBBvdyEgyryXffw= -github.com/containerd/console v0.0.0-20191206165004-02ecf6a7291e/go.mod h1:8Pf4gM6VEbTNRIT26AyyU7hxdQU3MvAvxVI0sc00XBE= -github.com/containerd/console v1.0.1/go.mod h1:XUsP6YE/mKtz6bxc+I8UiKKTP04qjQL4qcS3XoQ5xkw= -github.com/containerd/console v1.0.2/go.mod h1:ytZPjGgY2oeTkAONYafi2kSj0aYggsf8acV1PGKCbzQ= -github.com/containerd/containerd v1.2.10/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= -github.com/containerd/containerd v1.3.0-beta.2.0.20190828155532-0293cbd26c69/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= -github.com/containerd/containerd v1.3.0/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= -github.com/containerd/containerd v1.3.1-0.20191213020239-082f7e3aed57/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= -github.com/containerd/containerd v1.3.2/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= -github.com/containerd/containerd v1.4.0-beta.2.0.20200729163537-40b22ef07410/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= -github.com/containerd/containerd v1.4.1/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= -github.com/containerd/containerd v1.4.3/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= -github.com/containerd/containerd v1.4.9/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= -github.com/containerd/containerd v1.5.0-beta.1/go.mod h1:5HfvG1V2FsKesEGQ17k5/T7V960Tmcumvqn8Mc+pCYQ= -github.com/containerd/containerd v1.5.0-beta.3/go.mod h1:/wr9AVtEM7x9c+n0+stptlo/uBBoBORwEx6ardVcmKU= -github.com/containerd/containerd v1.5.0-beta.4/go.mod h1:GmdgZd2zA2GYIBZ0w09ZvgqEq8EfBp/m3lcVZIvPHhI= -github.com/containerd/containerd v1.5.0-rc.0/go.mod h1:V/IXoMqNGgBlabz3tHD2TWDoTJseu1FGOKuoA4nNb2s= -github.com/containerd/containerd v1.5.1/go.mod h1:0DOxVqwDy2iZvrZp2JUx/E+hS0UNTVn7dJnIOwtYR4g= -github.com/containerd/containerd v1.5.7/go.mod h1:gyvv6+ugqY25TiXxcZC3L5yOeYgEw0QMhscqVp1AR9c= -github.com/containerd/containerd v1.5.8 h1:NmkCC1/QxyZFBny8JogwLpOy2f+VEbO/f6bV2Mqtwuw= -github.com/containerd/containerd v1.5.8/go.mod h1:YdFSv5bTFLpG2HIYmfqDpSYYTDX+mc5qtSuYx1YUb/s= -github.com/containerd/continuity v0.0.0-20190426062206-aaeac12a7ffc/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= -github.com/containerd/continuity v0.0.0-20190815185530-f2a389ac0a02/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= -github.com/containerd/continuity v0.0.0-20191127005431-f65d91d395eb/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= -github.com/containerd/continuity v0.0.0-20200710164510-efbc4488d8fe/go.mod h1:cECdGN1O8G9bgKTlLhuPJimka6Xb/Gg7vYzCTNVxhvo= -github.com/containerd/continuity v0.0.0-20201208142359-180525291bb7/go.mod h1:kR3BEg7bDFaEddKm54WSmrol1fKWDU1nKYkgrcgZT7Y= -github.com/containerd/continuity v0.0.0-20210208174643-50096c924a4e/go.mod h1:EXlVlkqNba9rJe3j7w3Xa924itAMLgZH4UD/Q4PExuQ= -github.com/containerd/continuity v0.1.0 h1:UFRRY5JemiAhPZrr/uE0n8fMTLcZsUvySPr1+D7pgr8= -github.com/containerd/continuity v0.1.0/go.mod h1:ICJu0PwR54nI0yPEnJ6jcS+J7CZAUXrLh8lPo2knzsM= -github.com/containerd/fifo v0.0.0-20180307165137-3d5202aec260/go.mod h1:ODA38xgv3Kuk8dQz2ZQXpnv/UZZUHUCL7pnLehbXgQI= -github.com/containerd/fifo v0.0.0-20190226154929-a9fb20d87448/go.mod h1:ODA38xgv3Kuk8dQz2ZQXpnv/UZZUHUCL7pnLehbXgQI= -github.com/containerd/fifo v0.0.0-20200410184934-f15a3290365b/go.mod h1:jPQ2IAeZRCYxpS/Cm1495vGFww6ecHmMk1YJH2Q5ln0= -github.com/containerd/fifo v0.0.0-20201026212402-0724c46b320c/go.mod h1:jPQ2IAeZRCYxpS/Cm1495vGFww6ecHmMk1YJH2Q5ln0= -github.com/containerd/fifo v0.0.0-20210316144830-115abcc95a1d/go.mod h1:ocF/ME1SX5b1AOlWi9r677YJmCPSwwWnQ9O123vzpE4= -github.com/containerd/fifo v1.0.0/go.mod h1:ocF/ME1SX5b1AOlWi9r677YJmCPSwwWnQ9O123vzpE4= -github.com/containerd/go-cni v1.0.1/go.mod h1:+vUpYxKvAF72G9i1WoDOiPGRtQpqsNW/ZHtSlv++smU= -github.com/containerd/go-cni v1.0.2/go.mod h1:nrNABBHzu0ZwCug9Ije8hL2xBCYh/pjfMb1aZGrrohk= -github.com/containerd/go-runc v0.0.0-20180907222934-5a6d9f37cfa3/go.mod h1:IV7qH3hrUgRmyYrtgEeGWJfWbgcHL9CSRruz2Vqcph0= -github.com/containerd/go-runc v0.0.0-20190911050354-e029b79d8cda/go.mod h1:IV7qH3hrUgRmyYrtgEeGWJfWbgcHL9CSRruz2Vqcph0= -github.com/containerd/go-runc v0.0.0-20200220073739-7016d3ce2328/go.mod h1:PpyHrqVs8FTi9vpyHwPwiNEGaACDxT/N/pLcvMSRA9g= -github.com/containerd/go-runc v0.0.0-20201020171139-16b287bc67d0/go.mod h1:cNU0ZbCgCQVZK4lgG3P+9tn9/PaJNmoDXPpoJhDR+Ok= -github.com/containerd/go-runc v1.0.0/go.mod h1:cNU0ZbCgCQVZK4lgG3P+9tn9/PaJNmoDXPpoJhDR+Ok= -github.com/containerd/imgcrypt v1.0.1/go.mod h1:mdd8cEPW7TPgNG4FpuP3sGBiQ7Yi/zak9TYCG3juvb0= -github.com/containerd/imgcrypt v1.0.4-0.20210301171431-0ae5c75f59ba/go.mod h1:6TNsg0ctmizkrOgXRNQjAPFWpMYRWuiB6dSF4Pfa5SA= -github.com/containerd/imgcrypt v1.1.1-0.20210312161619-7ed62a527887/go.mod h1:5AZJNI6sLHJljKuI9IHnw1pWqo/F0nGDOuR9zgTs7ow= -github.com/containerd/imgcrypt v1.1.1/go.mod h1:xpLnwiQmEUJPvQoAapeb2SNCxz7Xr6PJrXQb0Dpc4ms= -github.com/containerd/nri v0.0.0-20201007170849-eb1350a75164/go.mod h1:+2wGSDGFYfE5+So4M5syatU0N0f0LbWpuqyMi4/BE8c= -github.com/containerd/nri v0.0.0-20210316161719-dbaa18c31c14/go.mod h1:lmxnXF6oMkbqs39FiCt1s0R2HSMhcLel9vNL3m4AaeY= -github.com/containerd/nri v0.1.0/go.mod h1:lmxnXF6oMkbqs39FiCt1s0R2HSMhcLel9vNL3m4AaeY= -github.com/containerd/stargz-snapshotter/estargz v0.10.0/go.mod h1:aE5PCyhFMwR8sbrErO5eM2GcvkyXTTJremG883D4qF0= -github.com/containerd/stargz-snapshotter/estargz v0.10.1 h1:hd1EoVjI2Ax8Cr64tdYqnJ4i4pZU49FkEf5kU8KxQng= -github.com/containerd/stargz-snapshotter/estargz v0.10.1/go.mod h1:aE5PCyhFMwR8sbrErO5eM2GcvkyXTTJremG883D4qF0= -github.com/containerd/ttrpc v0.0.0-20190828154514-0e0f228740de/go.mod h1:PvCDdDGpgqzQIzDW1TphrGLssLDZp2GuS+X5DkEJB8o= -github.com/containerd/ttrpc v0.0.0-20190828172938-92c8520ef9f8/go.mod h1:PvCDdDGpgqzQIzDW1TphrGLssLDZp2GuS+X5DkEJB8o= -github.com/containerd/ttrpc v0.0.0-20191028202541-4f1b8fe65a5c/go.mod h1:LPm1u0xBw8r8NOKoOdNMeVHSawSsltak+Ihv+etqsE8= -github.com/containerd/ttrpc v1.0.1/go.mod h1:UAxOpgT9ziI0gJrmKvgcZivgxOp8iFPSk8httJEt98Y= -github.com/containerd/ttrpc v1.0.2/go.mod h1:UAxOpgT9ziI0gJrmKvgcZivgxOp8iFPSk8httJEt98Y= -github.com/containerd/ttrpc v1.1.0/go.mod h1:XX4ZTnoOId4HklF4edwc4DcqskFZuvXB1Evzy5KFQpQ= -github.com/containerd/typeurl v0.0.0-20180627222232-a93fcdb778cd/go.mod h1:Cm3kwCdlkCfMSHURc+r6fwoGH6/F1hH3S4sg0rLFWPc= -github.com/containerd/typeurl v0.0.0-20190911142611-5eb25027c9fd/go.mod h1:GeKYzf2pQcqv7tJ0AoCuuhtnqhva5LNU3U+OyKxxJpk= -github.com/containerd/typeurl v1.0.1/go.mod h1:TB1hUtrpaiO88KEK56ijojHS1+NeF0izUACaJW2mdXg= -github.com/containerd/typeurl v1.0.2/go.mod h1:9trJWW2sRlGub4wZJRTW83VtbOLS6hwcDZXTn6oPz9s= -github.com/containerd/zfs v0.0.0-20200918131355-0a33824f23a2/go.mod h1:8IgZOBdv8fAgXddBT4dBXJPtxyRsejFIpXoklgxgEjw= -github.com/containerd/zfs v0.0.0-20210301145711-11e8f1707f62/go.mod h1:A9zfAbMlQwE+/is6hi0Xw8ktpL+6glmqZYtevJgaB8Y= -github.com/containerd/zfs v0.0.0-20210315114300-dde8f0fda960/go.mod h1:m+m51S1DvAP6r3FcmYCp54bQ34pyOwTieQDNRIRHsFY= -github.com/containerd/zfs v0.0.0-20210324211415-d5c4544f0433/go.mod h1:m+m51S1DvAP6r3FcmYCp54bQ34pyOwTieQDNRIRHsFY= -github.com/containerd/zfs v1.0.0/go.mod h1:m+m51S1DvAP6r3FcmYCp54bQ34pyOwTieQDNRIRHsFY= -github.com/containernetworking/cni v0.7.1/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY= -github.com/containernetworking/cni v0.8.0/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY= -github.com/containernetworking/cni v0.8.1/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY= -github.com/containernetworking/plugins v0.8.6/go.mod h1:qnw5mN19D8fIwkqW7oHHYDHVlzhJpcY6TQxn/fUyDDM= -github.com/containernetworking/plugins v0.9.1/go.mod h1:xP/idU2ldlzN6m4p5LmGiwRDjeJr6FLK6vuiUwoH7P8= -github.com/containers/ocicrypt v1.0.1/go.mod h1:MeJDzk1RJHv89LjsH0Sp5KTY3ZYkjXO/C+bKAeWFIrc= -github.com/containers/ocicrypt v1.1.0/go.mod h1:b8AOe0YR67uU8OqfVNcznfFpAzu3rdgUV4GP9qXPfu4= -github.com/containers/ocicrypt v1.1.1/go.mod h1:Dm55fwWm1YZAjYRaJ94z2mfZikIyIN4B0oB3dj3jFxY= -github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= -github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= -github.com/coreos/go-iptables v0.4.5/go.mod h1:/mVI274lEDI2ns62jHCDnCyBF9Iwsmekav8Dbxlm1MU= -github.com/coreos/go-iptables v0.5.0/go.mod h1:/mVI274lEDI2ns62jHCDnCyBF9Iwsmekav8Dbxlm1MU= -github.com/coreos/go-oidc v2.1.0+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc= -github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= -github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= -github.com/coreos/go-systemd v0.0.0-20161114122254-48702e0da86b/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/coreos/go-systemd/v22 v22.0.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk= -github.com/coreos/go-systemd/v22 v22.1.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk= -github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= -github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= -github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= -github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= -github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= -github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= -github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= -github.com/creack/pty v1.1.11 h1:07n33Z8lZxZ2qwegKbObQohDhXDQxiMMz1NOUGYlesw= -github.com/creack/pty v1.1.11/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= -github.com/cyphar/filepath-securejoin v0.2.2/go.mod h1:FpkQEhXnPnOthhzymB7CGsFk2G9VLXONKD9G7QGMM+4= -github.com/d2g/dhcp4 v0.0.0-20170904100407-a1d1b6c41b1c/go.mod h1:Ct2BUK8SB0YC1SMSibvLzxjeJLnrYEVLULFNiHY9YfQ= -github.com/d2g/dhcp4client v1.0.0/go.mod h1:j0hNfjhrt2SxUOw55nL0ATM/z4Yt3t2Kd1mW34z5W5s= -github.com/d2g/dhcp4server v0.0.0-20181031114812-7d4a0a7f59a5/go.mod h1:Eo87+Kg/IX2hfWJfwxMzLyuSZyxSoAug2nGa1G2QAi8= -github.com/d2g/hardwareaddr v0.0.0-20190221164911-e7d9fbe030e4/go.mod h1:bMl4RjIciD2oAxI7DmWRx6gbeqrkoLqv3MV0vzNad+I= -github.com/danieljoos/wincred v1.1.0/go.mod h1:XYlo+eRTsVA9aHGp7NGjFkPla4m+DCL7hqDjlFjiygg= +github.com/buildpacks/imgutil v0.0.0-20251202182233-51c1c8c186ea h1:91PTHjeL3uzjr2/jk1SJuFZp3ObodKawy79BKdio+VE= +github.com/buildpacks/imgutil v0.0.0-20251202182233-51c1c8c186ea/go.mod h1:yw2U9Ec8KUk7jWY97K0+e6GqrUAr05uTK6LwOEZyupw= +github.com/buildpacks/lifecycle v0.21.0 h1:s2okNv1I4rETBC4CRm4ly7DRr5eTqx1bpKXyf+ywVms= +github.com/buildpacks/lifecycle v0.21.0/go.mod h1:5Rb9kld2v1XYQC14fJrIGESqmuJvSYHVlUxAW5MAxNU= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chainguard-dev/kaniko v1.25.7 h1:d5oWlbMlMA2qt612Gq7Zl+r/P66pA/REHyw5IkWoc4U= +github.com/chainguard-dev/kaniko v1.25.7/go.mod h1:4f2ZX2HUS7Ib4jfSseXljMYR3PBZIFUEvOKCjAkYHrY= +github.com/chrismellard/docker-credential-acr-env v0.0.0-20230304212654-82a0ddb27589 h1:krfRl01rzPzxSxyLyrChD+U+MzsBXbm0OwYYB67uF+4= +github.com/chrismellard/docker-credential-acr-env v0.0.0-20230304212654-82a0ddb27589/go.mod h1:OuDyvmLnMCwa2ep4Jkm6nyA0ocJuZlGyk2gGseVzERM= +github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0= +github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= +github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI= +github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M= +github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE= +github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/containerd/stargz-snapshotter/estargz v0.18.1 h1:cy2/lpgBXDA3cDKSyEfNOFMA/c10O1axL69EU7iirO8= +github.com/containerd/stargz-snapshotter/estargz v0.18.1/go.mod h1:ALIEqa7B6oVDsrF37GkGN20SuvG/pIMm7FwP7ZmRb0Q= +github.com/containerd/typeurl/v2 v2.2.3 h1:yNA/94zxWdvYACdYO8zofhrTVuQY73fFU1y++dYSw40= +github.com/containerd/typeurl/v2 v2.2.3/go.mod h1:95ljDnPfD3bAbDJRugOiShd/DlAAsxGtUBhJxIn7SCk= +github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= +github.com/creack/pty v1.1.24 h1:bJrF4RRfyJnbTJqzRLHzcGaZK1NeM5kTC9jGgovnR1s= +github.com/creack/pty v1.1.24/go.mod h1:08sCNb52WyoAwi2QDyzUCTgcvVFhUzewun7wtTfvcwE= +github.com/cyphar/filepath-securejoin v0.6.0 h1:BtGB77njd6SVO6VztOHfPxKitJvd/VPT+OFBFMOi1Is= +github.com/cyphar/filepath-securejoin v0.6.0/go.mod h1:A8hd4EnAeyujCJRrICiOWqjS1AX0a9kM5XL+NwKoYSc= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/denverdino/aliyungo v0.0.0-20190125010748-a747050bb1ba/go.mod h1:dV8lFg6daOBZbT6/BDGIz6Y3WFGn8juu6G+CQ6LHtl0= -github.com/dgrijalva/jwt-go v0.0.0-20170104182250-a601269ab70c/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= -github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= -github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= -github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E= -github.com/docker/cli v20.10.10+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= -github.com/docker/cli v20.10.11+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= -github.com/docker/cli v20.10.12+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= -github.com/docker/cli v20.10.14+incompatible h1:dSBKJOVesDgHo7rbxlYjYsXe7gPzrTT+/cKQgpDAazg= -github.com/docker/cli v20.10.14+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= -github.com/docker/distribution v0.0.0-20190905152932-14b96e55d84c/go.mod h1:0+TTO4EOBfRPhZXAeF1Vu+W3hHZ8eLp8PgKVZlcvtFY= -github.com/docker/distribution v2.7.1-0.20190205005809-0d3efadf0154+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/distribution v2.7.1+incompatible h1:a5mlkVzth6W5A4fOsS3D2EO5BUmsJpcB+cRlLU7cSug= -github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/docker v20.10.10+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/docker v20.10.11+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/docker v20.10.12+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/docker v20.10.14+incompatible h1:+T9/PRYWNDo5SZl5qS1r9Mo/0Q8AwxKKPtu9S1yxM0w= -github.com/docker/docker v20.10.14+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/docker-credential-helpers v0.6.4 h1:axCks+yV+2MR3/kZhAmy07yC56WZ2Pwu/fKWtKuZB0o= -github.com/docker/docker-credential-helpers v0.6.4/go.mod h1:ofX3UI0Gz1TteYBjtgs07O36Pyasyp66D2uKT7H8W1c= -github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= -github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= -github.com/docker/go-events v0.0.0-20170721190031-9461782956ad/go.mod h1:Uw6UezgYA44ePAFQYUehOuCzmy5zmg/+nl2ZfMWGkpA= -github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c/go.mod h1:Uw6UezgYA44ePAFQYUehOuCzmy5zmg/+nl2ZfMWGkpA= -github.com/docker/go-metrics v0.0.0-20180209012529-399ea8c73916/go.mod h1:/u0gXw0Gay3ceNrsHubL3BtdOL2fHf93USgMTe0W5dI= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi/U= +github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/docker/cli v29.2.1+incompatible h1:n3Jt0QVCN65eiVBoUTZQM9mcQICCJt3akW4pKAbKdJg= +github.com/docker/cli v29.2.1+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk= +github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/docker v28.5.2+incompatible h1:DBX0Y0zAjZbSrm1uzOkdr1onVghKaftjlSWt4AFexzM= +github.com/docker/docker v28.5.2+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker-credential-helpers v0.9.4 h1:76ItO69/AP/V4yT9V4uuuItG0B1N8hvt0T0c0NN/DzI= +github.com/docker/docker-credential-helpers v0.9.4/go.mod h1:v1S+hepowrQXITkEfw6o4+BMbGot02wiKpzWhGUZK6c= +github.com/docker/go-connections v0.6.0 h1:LlMG9azAe1TqfR7sO+NJttz1gy6KO7VJBh+pMmjSD94= +github.com/docker/go-connections v0.6.0/go.mod h1:AahvXYshr6JgfUJGdDCs2b5EZG/vmaMAntpSFH5BFKE= +github.com/docker/go-metrics v0.0.1 h1:AgB/0SvBxihN0X8OR4SjsblXkbMvalQ8cjmtKQ2rQV8= github.com/docker/go-metrics v0.0.1/go.mod h1:cG1hvH2utMXtqgqqYE9plW6lDxS3/5ayHzueweSI3Vw= -github.com/docker/go-units v0.4.0 h1:3uh0PgVws3nIA0Q+MwDC8yjEPf9zjRfZZWXZYDct3Tw= -github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= -github.com/docker/libtrust v0.0.0-20150114040149-fa567046d9b1/go.mod h1:cyGadeNEkKy96OOhEzfZl+yxihPEzKnqJwvfuSUqbZE= -github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM= -github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= -github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= -github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= -github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= -github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= -github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= -github.com/emicklei/go-restful v2.9.5+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= -github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg= -github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o= -github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= -github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= -github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= -github.com/envoyproxy/go-control-plane v0.10.1/go.mod h1:AY7fTTXNdv/aJ2O5jwpxAPOWUZ7hQAEvzN5Pf27BkQQ= -github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/envoyproxy/protoc-gen-validate v0.6.2/go.mod h1:2t7qjJNvHPx8IjnBOzl9E9/baC+qXE/TeeyBRzgJDws= -github.com/evanphx/json-patch v4.9.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/docker/libtrust v0.0.0-20160708172513-aabc10ec26b7 h1:UhxFibDNY/bfvqU5CAUmr9zpesgbU6SWc8/B4mflAE4= +github.com/docker/libtrust v0.0.0-20160708172513-aabc10ec26b7/go.mod h1:cyGadeNEkKy96OOhEzfZl+yxihPEzKnqJwvfuSUqbZE= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o= +github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE= +github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= +github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= -github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= -github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc= -github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= -github.com/frankban/quicktest v1.11.3/go.mod h1:wRf/ReqHper53s+kmmSZizM8NamnL3IM0I9ntUbOk+k= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= -github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= -github.com/fullsailor/pkcs7 v0.0.0-20190404230743-d7302db945fa/go.mod h1:KnogPXtdwXqoenmZCw6S+25EAm2MkxbG0deNDu4cbSA= -github.com/garyburd/redigo v0.0.0-20150301180006-535138d7bcd7/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY= -github.com/gdamore/encoding v1.0.0 h1:+7OoQ1Bc6eTm5niUzBa0Ctsh6JbMW6Ra+YNuAtDBdko= -github.com/gdamore/encoding v1.0.0/go.mod h1:alR0ol34c49FCSBLjhosxzcPHQbf2trDkoo5dl+VrEg= -github.com/gdamore/tcell/v2 v2.3.3/go.mod h1:cTTuF84Dlj/RqmaCIV5p4w8uG1zWdk0SF6oBpwHp4fU= -github.com/gdamore/tcell/v2 v2.5.0 h1:/LA5f/wqTP5mWT79czngibKVVx5wOgdFTIXPQ68fMO8= -github.com/gdamore/tcell/v2 v2.5.0/go.mod h1:wSkrPaXoiIWZqW/g7Px4xc79di6FTcpB8tvaKJ6uGBo= -github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= -github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/gliderlabs/ssh v0.2.2 h1:6zsha5zo/TWhRhwqCD3+EarCAgZ2yN28ipRnGPnwkI0= -github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= -github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-ini/ini v1.25.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= +github.com/gdamore/encoding v1.0.1 h1:YzKZckdBL6jVt2Gc+5p82qhrGiqMdG/eNs6Wy0u3Uhw= +github.com/gdamore/encoding v1.0.1/go.mod h1:0Z0cMFinngz9kS1QfMjCP8TY7em3bZYeeklsSDPivEo= +github.com/gdamore/tcell/v2 v2.13.8 h1:Mys/Kl5wfC/GcC5Cx4C2BIQH9dbnhnkPgS9/wF3RlfU= +github.com/gdamore/tcell/v2 v2.13.8/go.mod h1:+Wfe208WDdB7INEtCsNrAN6O2m+wsTPk1RAovjaILlo= +github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c= +github.com/gliderlabs/ssh v0.3.8/go.mod h1:xYoytBv1sV0aL3CavoDuJIQNURXkkfPA/wxQ1pL1fAU= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= +github.com/go-git/go-billy/v5 v5.7.0 h1:83lBUJhGWhYp0ngzCMSgllhUSuoHP1iEWYjsPl9nwqM= +github.com/go-git/go-billy/v5 v5.7.0/go.mod h1:/1IUejTKH8xipsAcdfcSAlUlo2J7lkYV8GTKxAT/L3E= +github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4= +github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII= +github.com/go-git/go-git/v5 v5.16.5 h1:mdkuqblwr57kVfXri5TTH+nMFLNUxIj9Z7F5ykFbw5s= +github.com/go-git/go-git/v5 v5.16.5/go.mod h1:QOMLpNf1qxuSY4StA/ArOdfFR2TrKEjJiye2kel2m+M= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= -github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas= -github.com/go-logr/logr v0.2.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= -github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg= -github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= -github.com/go-openapi/jsonreference v0.19.2/go.mod h1:jMjeRr2HHw6nAVajTXJ4eiUwohSTlpa0o73RUL1owJc= -github.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL98+wF9xc8zWvFonSJ8= -github.com/go-openapi/spec v0.19.3/go.mod h1:FpwSN1ksY1eteniUU7X0N/BgJ7a4WvBFVA8Lj9mJglo= -github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= -github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= -github.com/godbus/dbus v0.0.0-20151105175453-c7fdd8b5cd55/go.mod h1:/YcGZj5zSblfDWMMoOzV4fas9FZnQYTkDnsGvmh2Grw= -github.com/godbus/dbus v0.0.0-20180201030542-885f9cc04c9c/go.mod h1:/YcGZj5zSblfDWMMoOzV4fas9FZnQYTkDnsGvmh2Grw= -github.com/godbus/dbus v0.0.0-20190422162347-ade71ed3457e/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4= -github.com/godbus/dbus/v5 v5.0.3/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= -github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= -github.com/gogo/googleapis v1.2.0/go.mod h1:Njal3psf3qN6dwBtQfUmBZh2ybovJ0tlu3o/AC7HYjU= -github.com/gogo/googleapis v1.4.0/go.mod h1:5YRNX2z1oM5gXdAkurHa942MDgEJyk02w4OecKY87+c= +github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI= +github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8= +github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs= +github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= -github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= -github.com/gogo/protobuf v1.2.2-0.20190723190241-65acae22fc9d/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= -github.com/gogo/protobuf v1.3.0/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= -github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= -github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= -github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= -github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= +github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= +github.com/golang-jwt/jwt/v4 v4.2.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= +github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/golang-jwt/jwt/v4 v4.5.2 h1:YtQM7lnr8iZ+j5q71MGKkNw9Mn7AjHM68uc9g5fXeUI= +github.com/golang-jwt/jwt/v4 v4.5.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= +github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= -github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= -github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= -github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= -github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= -github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= -github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= -github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= -github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= -github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= -github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= -github.com/google/go-containerregistry v0.7.0/go.mod h1:2zaoelrL0d08gGbpdP3LqyUuBmhWbpD6IOe2s9nLS2k= -github.com/google/go-containerregistry v0.8.0 h1:mtR24eN6rapCN+shds82qFEIWWmg64NPMuyCNT7/Ogc= -github.com/google/go-containerregistry v0.8.0/go.mod h1:wW5v71NHGnQyb4k+gSshjxidrC7lN33MdWEn+Mz9TsI= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/go-containerregistry v0.20.7 h1:24VGNpS0IwrOZ2ms2P1QE3Xa5X9p4phx0aUgzYzHW6I= +github.com/google/go-containerregistry v0.20.7/go.mod h1:Lx5LCZQjLH1QBaMPeGwsME9biPeo1lPx6lbGj/UmzgM= github.com/google/go-github/v30 v30.1.0 h1:VLDx+UolQICEOKu2m4uAoMti1SxuEBAl7RSEG16L+Oo= github.com/google/go-github/v30 v30.1.0/go.mod h1:n8jBpHl45a/rlBUtRJMOG4GhNADUQFEufcolZ95JfU8= -github.com/google/go-querystring v1.0.0 h1:Xkwi/a1rcvNg1PPYe5vI8GbeBY/jrVuDX5ASuANWTrk= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= +github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= +github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= -github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= -github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= -github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/pprof v0.0.0-20260115054156-294ebfa9ad83 h1:z2ogiKUYzX5Is6zr/vP9vJGqPwcdqsWjOt+V8J7+bTc= +github.com/google/pprof v0.0.0-20260115054156-294ebfa9ad83/go.mod h1:MxpfABSjhmINe3F1It9d+8exIHFvUqtLIRCdOGNXqiI= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= -github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= -github.com/googleapis/gnostic v0.4.1/go.mod h1:LRhVm6pbyptWbWbuZ38d1eyptfvIytN3ir6b65WBswg= -github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= -github.com/gorilla/handlers v0.0.0-20150720190736-60c7bfde3e33/go.mod h1:Qkdc/uu4tH4g6mTK6auzZ766c4CA0Ng8+o/OAirnOIQ= -github.com/gorilla/mux v1.7.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= -github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= -github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= -github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= -github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= -github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= -github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= -github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= -github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= -github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= -github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= -github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= -github.com/hashicorp/consul/api v1.11.0/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M= -github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= -github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms= -github.com/hashicorp/errwrap v0.0.0-20141028054710-7554cd9344ce/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= -github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= -github.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= -github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= -github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= -github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= -github.com/hashicorp/go-multierror v0.0.0-20161216184304-ed905158d874/go.mod h1:JMRHfdO9jKNzS/+BTlxCjKNQHg/jZAft8U7LloJvN7I= -github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= -github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= -github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= -github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= -github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= -github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= -github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= -github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= -github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= -github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= -github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= -github.com/hashicorp/mdns v1.0.1/go.mod h1:4gW7WsVCke5TE7EPeYliwHlRUyBtfCwuFwuMg2DmyNY= -github.com/hashicorp/mdns v1.0.4/go.mod h1:mtBihi+LeNXGtG8L9dX59gAEa12BDtBQSp4v/YAJqrc= -github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= -github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= -github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= -github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= -github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk= -github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= +github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= github.com/hectane/go-acl v0.0.0-20190604041725-da78bae5fc95 h1:S4qyfL2sEm5Budr4KVMyEniCy+PbS55651I/a+Kn/NQ= github.com/hectane/go-acl v0.0.0-20190604041725-da78bae5fc95/go.mod h1:QiyDdbZLaJ/mZP4Zwc9g2QsfaEA4o7XvvgZegSci5/E= github.com/heroku/color v0.0.6 h1:UTFFMrmMLFcL3OweqP1lAdp8i1y/9oHqkeHjQ/b/Ny0= github.com/heroku/color v0.0.6/go.mod h1:ZBvOcx7cTF2QKOv4LbmoBtNl5uB17qWxGuzZrsi1wLU= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= -github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= -github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= -github.com/imdario/mergo v0.3.10/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= -github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= -github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= -github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= -github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/j-keck/arping v0.0.0-20160618110441-2cf9dc699c56/go.mod h1:ymszkNOg6tORTn+6F6j+Jc8TOr5osrynvN6ivFWZ2GA= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= -github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= -github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= -github.com/jmespath/go-jmespath v0.0.0-20160803190731-bd40a432e4c7/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= -github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= github.com/jpillora/backoff v0.0.0-20180909062703-3050d21c67d7/go.mod h1:2iMrUgbbvHEiQClaW2NsSzMyGHqN+rDFqY705q49KG0= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= -github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= -github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= -github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd h1:Coekwdh0v2wtGp9Gmz1Ze3eVRAWJMLokvN3QjdzCHLY= -github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= -github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= -github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= +github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= +github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.11.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= -github.com/klauspost/compress v1.11.13/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= -github.com/klauspost/compress v1.13.6 h1:P76CopJELS0TiO2mebmnzgWaajssP/EszplttgQxcgc= -github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/klauspost/compress v1.18.2 h1:iiPHWW0YrcFgpBYhsA6D1+fqHssJscY/Tm/y2Uqnapk= +github.com/klauspost/compress v1.18.2/go.mod h1:R0h/fSBs8DE4ENlcrlib3PsXS61voFxhIs2DeRhCvJ4= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= -github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA= -github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/lucasb-eyer/go-colorful v1.0.3/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= -github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY= -github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= -github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc87/1qhoTACD8w= -github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= -github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= -github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= -github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= -github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs= -github.com/marstr/guid v1.1.0/go.mod h1:74gB1z2wpxxInTG6yaqA7KrtM0NZ+RbrcqDvYHefzho= -github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/lucasb-eyer/go-colorful v1.3.0 h1:2/yBRLdWBZKrf7gB40FoiKfAWYQ0lqNcbuQwVHXptag= +github.com/lucasb-eyer/go-colorful v1.3.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.12 h1:jF+Du6AlPIjs2BiUiQlKOX0rt3SujHxPnksPKZbaA40= -github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= -github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= -github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= +github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= -github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= -github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= -github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk= -github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU= -github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= -github.com/mattn/go-shellwords v1.0.3/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= -github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= -github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= -github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= -github.com/miekg/pkcs11 v1.0.3/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= -github.com/mistifyio/go-zfs v2.1.2-0.20190413222219-f784269be439+incompatible/go.mod h1:8AuVvqP/mXw1px98n46wfvcGfQ4ci2FwoAjKYxuo3Z4= -github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= -github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= -github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= -github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= -github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= github.com/mitchellh/ioprogress v0.0.0-20180201004757-6a23b12fa88e h1:Qa6dnn8DlasdXRnacluu8HzPts0S1I9zvvUPDbBnXFI= github.com/mitchellh/ioprogress v0.0.0-20180201004757-6a23b12fa88e/go.mod h1:waEya8ee1Ro/lgxpVhkJI4BVASzkm3UZqkx/cFJiYHM= -github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/osext v0.0.0-20151018003038-5e2d6d41470f/go.mod h1:OkQIRizQZAeMln+1tSwduZz7+Af5oFlKirV/MSYes2A= -github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc= -github.com/moby/sys/mount v0.2.0 h1:WhCW5B355jtxndN5ovugJlMFJawbUODuW8fSnEH6SSM= -github.com/moby/sys/mount v0.2.0/go.mod h1:aAivFE2LB3W4bACsUXChRHQ0qKWsetY4Y9V7sxOougM= -github.com/moby/sys/mountinfo v0.4.0/go.mod h1:rEr8tzG/lsIZHBtN/JjGG+LMYx9eXgW2JI+6q0qou+A= -github.com/moby/sys/mountinfo v0.4.1 h1:1O+1cHA1aujwEwwVMa2Xm2l+gIpUHyd3+D+d7LZh1kM= -github.com/moby/sys/mountinfo v0.4.1/go.mod h1:rEr8tzG/lsIZHBtN/JjGG+LMYx9eXgW2JI+6q0qou+A= -github.com/moby/sys/symlink v0.1.0/go.mod h1:GGDODQmbFOjFsXvfLVn3+ZRxkch54RkSiGqsZeMYowQ= -github.com/moby/term v0.0.0-20200312100748-672ec06f55cd/go.mod h1:DdlQx2hp0Ss5/fLikoLlEeIYiATotOjgB//nb973jeo= -github.com/moby/term v0.0.0-20201216013528-df9cb8a40635/go.mod h1:FBS0z0QWA44HXygs7VXDUOGoN/1TV3RuWkLO04am3wc= -github.com/moby/term v0.0.0-20210619224110-3f7ff695adc6 h1:dcztxKSvZ4Id8iPpHERQBbIJfabdt4wUm5qy3wOL2Zc= -github.com/moby/term v0.0.0-20210619224110-3f7ff695adc6/go.mod h1:E2VnQOmVuvZB6UYnnDB0qG5Nq/1tD9acaOpo6xmt0Kw= +github.com/moby/buildkit v0.26.3 h1:D+ruZVAk/3ipRq5XRxBH9/DIFpRjSlTtMbghT5gQP9g= +github.com/moby/buildkit v0.26.3/go.mod h1:4T4wJzQS4kYWIfFRjsbJry4QoxDBjK+UGOEOs1izL7w= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/go-archive v0.2.0 h1:zg5QDUM2mi0JIM9fdQZWC7U8+2ZfixfTYoHL7rWUcP8= +github.com/moby/go-archive v0.2.0/go.mod h1:mNeivT14o8xU+5q1YnNrkQVpK+dnNe/K6fHqnTg4qPU= +github.com/moby/moby/api v1.53.0 h1:PihqG1ncw4W+8mZs69jlwGXdaYBeb5brF6BL7mPIS/w= +github.com/moby/moby/api v1.53.0/go.mod h1:8mb+ReTlisw4pS6BRzCMts5M49W5M7bKt1cJy/YbAqc= +github.com/moby/moby/client v0.2.2 h1:Pt4hRMCAIlyjL3cr8M5TrXCwKzguebPAc2do2ur7dEM= +github.com/moby/moby/client v0.2.2/go.mod h1:2EkIPVNCqR05CMIzL1mfA07t0HvVUUOl85pasRz/GmQ= +github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= +github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= +github.com/moby/sys/atomicwriter v0.1.0 h1:kw5D/EqkBwsBFi0ss9v1VG3wIkVhzGvLklJ+w3A14Sw= +github.com/moby/sys/atomicwriter v0.1.0/go.mod h1:Ul8oqv2ZMNHOceF643P6FKPXeCmYtlQMvpizfsSoaWs= +github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU= +github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko= +github.com/moby/sys/user v0.4.0 h1:jhcMKit7SA80hivmFJcbB1vqmw//wU61Zdui2eQXuMs= +github.com/moby/sys/user v0.4.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs= +github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g= +github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28= +github.com/moby/term v0.5.2 h1:6qk3FJAFDs6i/q3W/pQ97SX192qKfZgGjCQqfCJkgzQ= +github.com/moby/term v0.5.2/go.mod h1:d3djjFCrjnB+fl8NJux+EJzu0msscUP+f8it8hPkFLc= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= -github.com/mrunalp/fileutils v0.5.0/go.mod h1:M1WthSahJixYnrXQl/DFQuteStB1weuxD2QJNHXfbSQ= -github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw= -github.com/ncw/swift v1.0.47/go.mod h1:23YIA4yWVnGwv2dQlN4bB7egfYX6YLn0Yo/S6zZO/ZM= -github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= -github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= -github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= -github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= -github.com/onsi/ginkgo v0.0.0-20151202141238-7f8ab55aaf3b/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.6.0 h1:Ix8l273rp3QzYgXSR+c8d1fTG7UPgYkOSELPhiY/YGw= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.11.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.12.1 h1:mFwc4LvZ0xpSvDZ3E+k8Yte0hLOMxXUlP+yXtJqkYfQ= -github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= -github.com/onsi/ginkgo v1.16.4 h1:29JGrr5oVBm5ulCWet69zQkzWipVXIol6ygQUe/EzNc= -github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= -github.com/onsi/ginkgo/v2 v2.1.3 h1:e/3Cwtogj0HA+25nMP1jCMDIf8RtRYbGwGGuBIFztkc= -github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= -github.com/onsi/gomega v0.0.0-20151007035656-2152b45fa28a/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= -github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= +github.com/onsi/ginkgo/v2 v2.28.0 h1:Rrf+lVLmtlBIKv6KrIGJCjyY8N36vDVcutbGJkyqjJc= +github.com/onsi/ginkgo/v2 v2.28.0/go.mod h1:ArE1D/XhNXBXCBkKOLkbsb2c81dQHCRcF5zwn/ykDRo= github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= -github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= -github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= -github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= -github.com/onsi/gomega v1.10.3/go.mod h1:V9xEwhxec5O8UDM77eCW8vLymOMltsqPVYWrpDsH8xc= -github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= -github.com/onsi/gomega v1.19.0 h1:4ieX6qQjPP/BfC3mpsAtIGGlxTWPeA3Inl/7DtXw1tw= -github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro= -github.com/opencontainers/go-digest v0.0.0-20170106003457-a6d0ee40d420/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= -github.com/opencontainers/go-digest v0.0.0-20180430190053-c9281466c8b2/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= -github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= -github.com/opencontainers/go-digest v1.0.0-rc1.0.20180430190053-c9281466c8b2/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= +github.com/onsi/gomega v1.39.1 h1:1IJLAad4zjPn2PsnhH70V4DKRFlrCzGBNrNaru+Vf28= +github.com/onsi/gomega v1.39.1/go.mod h1:hL6yVALoTOxeWudERyfppUcZXjMwIMLnuSfruD2lcfg= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= -github.com/opencontainers/image-spec v1.0.0/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= -github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= -github.com/opencontainers/image-spec v1.0.2-0.20210730191737-8e42a01fb1b7/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= -github.com/opencontainers/image-spec v1.0.2-0.20211117181255-693428a734f5/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= -github.com/opencontainers/image-spec v1.0.2 h1:9yCKha/T5XdGtO0q9Q9a6T5NUCsTn/DrBg0D7ufOcFM= -github.com/opencontainers/image-spec v1.0.2/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= -github.com/opencontainers/runc v0.0.0-20190115041553-12f6a991201f/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= -github.com/opencontainers/runc v0.1.1/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= -github.com/opencontainers/runc v1.0.0-rc8.0.20190926000215-3e425f80a8c9/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= -github.com/opencontainers/runc v1.0.0-rc9/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= -github.com/opencontainers/runc v1.0.0-rc93/go.mod h1:3NOsor4w32B2tC0Zbl8Knk4Wg84SM2ImC1fxBuqJ/H0= -github.com/opencontainers/runc v1.0.2 h1:opHZMaswlyxz1OuGpBE53Dwe4/xF7EZTY0A2L/FpCOg= -github.com/opencontainers/runc v1.0.2/go.mod h1:aTaHFFwQXuA71CiyxOdFFIorAoemI04suvGRQFzWTD0= -github.com/opencontainers/runtime-spec v0.1.2-0.20190507144316-5b71a03e2700/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= -github.com/opencontainers/runtime-spec v1.0.1/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= -github.com/opencontainers/runtime-spec v1.0.2-0.20190207185410-29686dbc5559/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= -github.com/opencontainers/runtime-spec v1.0.2/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= -github.com/opencontainers/runtime-spec v1.0.3-0.20200929063507-e6143ca7d51d/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= -github.com/opencontainers/runtime-spec v1.0.3-0.20210326190908-1c3f411f0417/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= -github.com/opencontainers/runtime-tools v0.0.0-20181011054405-1d69bd0f9c39/go.mod h1:r3f7wjNzSs2extwzU3Y+6pKfobzPh+kKFJ3ofN+3nfs= -github.com/opencontainers/selinux v1.6.0/go.mod h1:VVGKuOLlE7v4PJyT6h7mNWvq1rzqiriPsEqVhc+svHE= -github.com/opencontainers/selinux v1.8.0/go.mod h1:RScLhm78qiWa2gbVCcGkC7tCGdgk3ogry1nUQF8Evvo= -github.com/opencontainers/selinux v1.8.2 h1:c4ca10UMgRcvZ6h0K4HtS15UaVSBEaE+iln2LVpAuGc= -github.com/opencontainers/selinux v1.8.2/go.mod h1:MUIHuUEvKB1wtJjQdOyYRgOnLD2xAPP8dBsCoU0KuF8= -github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= -github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= -github.com/pelletier/go-buffruneio v0.2.0/go.mod h1:JkE26KsDizTr40EUHkXVtNPvgGtbSNq5BcowyYOWdKo= -github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= -github.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc= -github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= -github.com/pelletier/go-toml v1.9.4 h1:tjENF6MfZAg8e4ZmZTeWaWiT2vXtsoO6+iuOjFhECwM= -github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= -github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= +github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040= +github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= +github.com/opencontainers/selinux v1.13.0 h1:Zza88GWezyT7RLql12URvoxsbLfjFx988+LGaWfbL84= +github.com/opencontainers/selinux v1.13.0/go.mod h1:XxWTed+A/s5NNq4GmYScVy+9jzXhGBVEOAyucdRUY8s= +github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= +github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4= +github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.8.1-0.20171018195549-f15c970de5b7/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= -github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= -github.com/pquerna/cachecontrol v0.0.0-20171018203845-0dec1b30a021/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA= -github.com/prometheus/client_golang v0.0.0-20180209125602-c332b6f63c06/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= -github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso= github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= github.com/prometheus/client_golang v1.1.0/go.mod h1:I1FGZT9+L76gKKOs5djB6ezCbFQP1xR9D75/vuwEF3g= -github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= -github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= -github.com/prometheus/client_model v0.0.0-20171117100541-99fa1f4be8e5/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_golang v1.23.2 h1:Je96obch5RDVy3FDMndoUsjAhG5Edi49h0RJWRi/o0o= +github.com/prometheus/client_golang v1.23.2/go.mod h1:Tb1a6LWHB3/SPIzCoaDXI4I8UHKeFTEQ1YCr+0Gyqmg= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/common v0.0.0-20180110214958-89604d197083/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= -github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= -github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk= +github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE= github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/common v0.6.0/go.mod h1:eBmuwkDJBwy6iBfxCBob6t6dR6ENT/y+J+Zk0j9GMYc= -github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= -github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= -github.com/prometheus/procfs v0.0.0-20180125133057-cb4147076ac7/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/common v0.66.1 h1:h5E0h5/Y8niHc5DlaLlWLArTQI7tMrsfQjHV+d9ZoGs= +github.com/prometheus/common v0.66.1/go.mod h1:gcaUsgf3KfRSwHY4dIMXLPV0K/Wg1oZ8+SbZk/HH/dA= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= -github.com/prometheus/procfs v0.0.0-20190522114515-bc1a522cf7b1/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.3/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDaekg4FpcdQ= -github.com/prometheus/procfs v0.0.5/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDaekg4FpcdQ= -github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= -github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= -github.com/prometheus/procfs v0.2.0/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= -github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= -github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= -github.com/rivo/tview v0.0.0-20210624165335-29d673af0ce2 h1:I5N0WNMgPSq5NKUFspB4jMJ6n2P0ipz5FlOlB4BXviQ= -github.com/rivo/tview v0.0.0-20210624165335-29d673af0ce2/go.mod h1:IxQujbYMAh4trWr0Dwa8jfciForjVmxyHpskZX6aydQ= -github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= -github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= -github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= -github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= +github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg= +github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is= +github.com/rivo/tview v0.42.0 h1:b/ftp+RxtDsHSaynXTbJb+/n/BxDEi+W3UfF5jILK6c= +github.com/rivo/tview v0.42.0/go.mod h1:cSfIYfhpSGCjp3r/ECJb+GKS7cGJnqV8vfjQPwoXyfY= +github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= +github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= github.com/rogpeppe/fastuuid v1.1.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= -github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= -github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= -github.com/sabhiram/go-gitignore v0.0.0-20201211074657-223ce5d391b0 h1:4Q/TASkyjpqyR5DL5+6c2FGSDpHM5bTMSspcXr7J6R8= -github.com/sabhiram/go-gitignore v0.0.0-20201211074657-223ce5d391b0/go.mod h1:b18R55ulyQ/h3RaWyloPyER7fWQVZvimKKhnI5OfrJQ= -github.com/safchain/ethtool v0.0.0-20190326074333-42ed695e3de8/go.mod h1:Z0q5wiBQGYcxhMZ6gUqHn6pYNLypFAvaL3UvgZLR0U4= -github.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig= -github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= +github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06 h1:OkMGxebDjyw0ULyrTYWeN0UNCCkmCWfjPnIA2W6oviI= +github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06/go.mod h1:+ePHsJ1keEjQtpvf9HHw0f4ZeJ0TLRsxhunSI2hYJSs= github.com/sclevine/spec v1.4.0 h1:z/Q9idDcay5m5irkZ28M7PtQM4aOISzOpj4bUPkDee8= github.com/sclevine/spec v1.4.0/go.mod h1:LvpgJaFyvQzRvc1kaDs0bulYwzC70PbiYjC4QnFHkOM= -github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= -github.com/seccomp/libseccomp-golang v0.9.1/go.mod h1:GbW5+tmTXfcxTToHLXlScSlAvWlF4P2Ca7zGrPiEpWo= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= -github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= -github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= -github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= -github.com/sirupsen/logrus v1.0.4-0.20170822132746-89742aefa4b2/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc= -github.com/sirupsen/logrus v1.0.6/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc= +github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8= +github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= -github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= -github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= -github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= -github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/sirupsen/logrus v1.9.4 h1:TsZE7l11zFCLZnZ+teH4Umoq5BhEIfIzfRDZ1Uzql2w= +github.com/sirupsen/logrus v1.9.4/go.mod h1:ftWc9WdOfJ0a92nsE2jF5u5ZwH8Bv2zdeOC42RjbV2g= +github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8= +github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY= github.com/smartystreets/assertions v1.0.0/go.mod h1:kHHU4qYBaI3q23Pp3VPrmWhuIUrLW/7eUrw0BU5VaoM= github.com/smartystreets/go-aws-auth v0.0.0-20180515143844-0c1422d1fdb9/go.mod h1:SnhjPscd9TpLiy1LpzGSKh3bXCfxxXuqd9xmQJy3slM= -github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= -github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/smartystreets/gunit v1.0.0/go.mod h1:qwPWnhz6pn0NnRBP++URONOVyNkPyr4SauJk4cUOwJs= -github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= -github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= -github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= -github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= -github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= -github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= -github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.4.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cobra v0.0.2-0.20171109065643-2da4a54c5cee/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= -github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= -github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE= -github.com/spf13/cobra v1.2.1/go.mod h1:ExllRjgxM/piMAM+3tAZvg8fsklGAf3tPfi+i8t68Nk= -github.com/spf13/cobra v1.3.0/go.mod h1:BrRVncBjOJa/eUcVVm9CE+oC6as8k+VYr4NY7WCi9V4= -github.com/spf13/cobra v1.4.0 h1:y+wJpx64xcgO1V+RcnwW0LEHxTKRi2ZDPSBjWnrg88Q= -github.com/spf13/cobra v1.4.0/go.mod h1:Wo4iy3BUC+X2Fybo0PDqwJIv3dNRiZLHQymsfxlB84g= -github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= -github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= -github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/pflag v1.0.1-0.20171106142849-4c012f6dcd95/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= -github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE= -github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns= -github.com/spf13/viper v1.10.0/go.mod h1:SoyBPwAtKDzypXNDFKN5kzH7ppppbGZtls1UpIy5AsM= -github.com/src-d/gcfg v1.4.0 h1:xXbNR5AlLSA315x2UO+fTSSAXCDf+Ar38/6oyGbDKQ4= -github.com/src-d/gcfg v1.4.0/go.mod h1:p/UMsR43ujA89BJY9duynAwIpvqEujIH/jFlfL7jWoI= -github.com/stefanberger/go-pkcs11uri v0.0.0-20201008174630-78d3cae3a980/go.mod h1:AO3tvPzVZ/ayst6UlUKUv6rcPQInYe3IknH3jYhAKu8= -github.com/stretchr/objx v0.0.0-20180129172003-8a3f7159479f/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU= +github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4= +github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk= +github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= -github.com/stretchr/testify v0.0.0-20180303142811-b89eecf5ca5d/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= -github.com/syndtr/gocapability v0.0.0-20170704070218-db04d3cc01c8/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= -github.com/syndtr/gocapability v0.0.0-20180916011248-d98352740cb2/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= -github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= -github.com/tchap/go-patricia v2.2.6+incompatible/go.mod h1:bmLyhP68RS6kStMGxByiQ23RP/odRBOTVjwp2cDyi6I= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= github.com/tj/assert v0.0.0-20171129193455-018094318fb0/go.mod h1:mZ9/Rh9oLWpLLDRpvE+3b7gP/C2YyLFYxNmcLnPTMe0= github.com/tj/assert v0.0.3 h1:Df/BlaZ20mq6kuai7f5z2TvPFiwC3xaWJSDQNiIS3Rk= github.com/tj/assert v0.0.3/go.mod h1:Ne6X72Q+TB1AteidzQncjw9PabbMp4PBMZ1k+vd1Pvk= @@ -887,663 +394,166 @@ github.com/tj/go-buffer v1.1.0/go.mod h1:iyiJpfFcR2B9sXu7KvjbT9fpM4mOelRSDTbntVj github.com/tj/go-elastic v0.0.0-20171221160941-36157cbbebc2/go.mod h1:WjeM0Oo1eNAjXGDx2yma7uG2XoyRZTq1uv3M/o7imD0= github.com/tj/go-kinesis v0.0.0-20171128231115-08b17f58cb1b/go.mod h1:/yhzCV0xPfx6jb1bBgRFjl5lytqVqZXEaeqWP8lTEao= github.com/tj/go-spin v1.1.0/go.mod h1:Mg1mzmePZm4dva8Qz60H2lHwmJ2loum4VIrLgVnKwh4= -github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= -github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= -github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= -github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= -github.com/urfave/cli v0.0.0-20171014202726-7bc6a0acffa5/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= -github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= -github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= -github.com/urfave/cli v1.22.2/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= -github.com/urfave/cli v1.22.4/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= -github.com/vbatts/tar-split v0.11.2 h1:Via6XqJr0hceW4wff3QRzD5gAk/tatMw/4ZA7cTlIME= -github.com/vbatts/tar-split v0.11.2/go.mod h1:vV3ZuO2yWSVsz+pfFzDG/upWH1JhjOiEaWq6kXyQ3VI= -github.com/vishvananda/netlink v0.0.0-20181108222139-023a6dafdcdf/go.mod h1:+SR5DhBJrl6ZM7CoCKvpw5BKroDKQ+PJqOg65H/2ktk= -github.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE= -github.com/vishvananda/netlink v1.1.1-0.20201029203352-d40f9887b852/go.mod h1:twkDnbuQxJYemMlGd4JFIcuhgX83tXhKS2B/PRMpOho= -github.com/vishvananda/netns v0.0.0-20180720170159-13995c7128cc/go.mod h1:ZjcWmFBXmLKZu9Nxj3WKYEafiSqer2rnvPr0en9UNpI= -github.com/vishvananda/netns v0.0.0-20191106174202-0a2b9b5464df/go.mod h1:JP3t17pCcGlemwknint6hfoeCVQrEMVwxRLRjXpq+BU= -github.com/vishvananda/netns v0.0.0-20200728191858-db3c7e526aae/go.mod h1:DD4vA1DwXk04H54A1oHXtwZmA0grkVMdPxx/VGLCah0= -github.com/willf/bitset v1.1.11-0.20200630133818-d5bec3311243/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4= -github.com/willf/bitset v1.1.11/go.mod h1:83CECat5yLh5zVOf4P1ErAgKA5UDvKtgyUABdr3+MjI= -github.com/xanzy/ssh-agent v0.2.1/go.mod h1:mLlQY/MoOhWBj+gOGMQkOeiEvkx+8pJSI+0Bx9h2kr4= -github.com/xanzy/ssh-agent v0.3.0 h1:wUMzuKtKilRgBAD1sUb8gOwwRr2FGoBVumcjoOACClI= -github.com/xanzy/ssh-agent v0.3.0/go.mod h1:3s9xbODqPuuhK9JV1R321M/FlMZSBvE5aY6eAcqrDh0= +github.com/tonistiigi/go-csvvalue v0.0.0-20240814133006-030d3b2625d0 h1:2f304B10LaZdB8kkVEaoXvAMVan2tl9AiK4G0odjQtE= +github.com/tonistiigi/go-csvvalue v0.0.0-20240814133006-030d3b2625d0/go.mod h1:278M4p8WsNh3n4a1eqiFcV2FGk7wE5fwUpUom9mK9lE= +github.com/vbatts/tar-split v0.12.2 h1:w/Y6tjxpeiFMR47yzZPlPj/FcPLpXbTUi/9H7d3CPa4= +github.com/vbatts/tar-split v0.12.2/go.mod h1:eF6B6i6ftWQcDqEn3/iGFRFRo8cBIMSJVOpnNdfTMFA= +github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= +github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= +github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= -github.com/xeipuuv/gojsonschema v0.0.0-20180618132009-1d523034197f/go.mod h1:5yf86TLmAcydyeJq5YvxkGPE2fm/u4myDekKRoLuqhs= -github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= -github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= -github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= +github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX5oPXxHm3bOH+xeAttToC8pqch2ScQN/JoXYupl6xs= -github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50/go.mod h1:NUSPSUX/bi6SeDMUh6brw0nXpxHnc96TguQh0+r/ssA= -github.com/yvasiyarov/newrelic_platform_go v0.0.0-20140908184405-b21fdbd4370f/go.mod h1:GlGEuHIJweS1mbCqG+7vt2nvWLzLLnRHbXz5JKd/Qbg= -go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= -go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= -go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= -go.etcd.io/etcd v0.5.0-alpha.5.0.20200910180754-dd1b699fc489/go.mod h1:yVHk9ub3CSBatqGNg7GRmsnfLWtoW60w4eDYfh7vHDg= -go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= -go.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= -go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= -go.etcd.io/etcd/client/pkg/v3 v3.5.1/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= -go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= -go.etcd.io/etcd/client/v2 v2.305.1/go.mod h1:pMEacxZW7o8pg4CrFE7pquyCJJzZvkvdD2RibOCCCGs= -go.mozilla.org/pkcs7 v0.0.0-20200128120323-432b2356ecb1/go.mod h1:SNgMg+EgDFwmvSmLRTNKC5fegJjB7v23qTQ0XLGUNHk= -go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= -go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= -go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= -go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= -go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= -go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= -go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= -go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= -go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= -go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= -golang.org/x/crypto v0.0.0-20171113213409-9f005a07e0d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64= +go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0/go.mod h1:UHB22Z8QsdRDrnAtX4PntOl36ajSxcdUMt1sF7Y6E7Q= +go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48= +go.opentelemetry.io/otel v1.39.0/go.mod h1:kLlFTywNWrFyEdH0oj2xK0bFYZtHRYUdv1NklR/tgc8= +go.opentelemetry.io/otel/metric v1.39.0 h1:d1UzonvEZriVfpNKEVmHXbdf909uGTOQjA0HF0Ls5Q0= +go.opentelemetry.io/otel/metric v1.39.0/go.mod h1:jrZSWL33sD7bBxg1xjrqyDjnuzTUB0x1nBERXd7Ftcs= +go.opentelemetry.io/otel/sdk v1.39.0 h1:nMLYcjVsvdui1B/4FRkwjzoRVsMK8uL/cj0OyhKzt18= +go.opentelemetry.io/otel/sdk v1.39.0/go.mod h1:vDojkC4/jsTJsE+kh+LXYQlbL8CgrEcwmt1ENZszdJE= +go.opentelemetry.io/otel/sdk/metric v1.39.0 h1:cXMVVFVgsIf2YL6QkRF4Urbr/aMInf+2WKg+sEJTtB8= +go.opentelemetry.io/otel/sdk/metric v1.39.0/go.mod h1:xq9HEVH7qeX69/JnwEfp6fVq5wosJsY1mt4lLfYdVew= +go.opentelemetry.io/otel/trace v1.39.0 h1:2d2vfpEDmCJ5zVYz7ijaJdOF59xLomrvj7bjt6/qCJI= +go.opentelemetry.io/otel/trace v1.39.0/go.mod h1:88w4/PnZSazkGzz/w84VHpQafiU4EtqqlVdxWy+rNOA= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI= +go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU= +go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc= +go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20181009213950-7c1a557ab941/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= -golang.org/x/crypto v0.0.0-20210817164053-32db794688a5 h1:HWj/xjIHfjYU5nVXpTM0s39J9CbLn7Cc5a7IC5rwsMQ= -golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= -golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= -golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= -golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= -golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= -golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= -golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= -golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= -golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= -golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= +golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts= +golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= -golang.org/x/mod v0.5.1 h1:OJxoQ/rynoF0dcCdI7cLPktw/hR2cueqYfjm43oqK38= -golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.33.0 h1:tHFzIWbBifEmbwtGz65eaWyGiGZatSrT9prnU8DbVL8= +golang.org/x/mod v0.33.0/go.mod h1:swjeQEj+6r7fODbD2cqrnje9PnziFuw4bmLbBZFrQ5w= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181011144130-49bb7cea24b1/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= -golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190619014844-b5b0513f8c1b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191004110552-13f9640d40b9/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20201006153459-a7d1128ccaa0/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= -golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= -golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20211111160137-58aab5ef257a/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20211203184738-4852103109b8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20211216030914-fe4d6282115f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20220225172249-27dd8689420f h1:oA4XRj0qtSt8Yo1Zms0CUlsT3KG69V2UGQWPBxujDmc= -golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60= +golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 h1:RerP+noqYHUQ8CMRcPlC2nvTa4dcBIjegkuWdcUDuqg= -golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.35.0 h1:Mv2mzuHuZuY2+bkyWXIHMfhNdJAdwW3FuWeCPYN5GVQ= +golang.org/x/oauth2 v0.35.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= +golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190221075227-b4e8571b14e0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190514135907-3a4b5fb9f71f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190522044717-8097e1b27ff5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190529164535-6a60838ec259/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190602015325-4c4f7f33c9ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190606203320-7fc4e5ec1444/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190801041406-cbf593c0f2f3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190812073006-9eafafc0a87e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191022100944-742c48ecaeb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191115151921-52ab43148777/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191210023423-ac6580df4449/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200120151820-655fe14d7479/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200217220822-9197077df867/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200622214017-ed371f2e16b4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200728102440-3e129f6d46b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200817155316-9781c653f443/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200831180312-196b9ba8737a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200909081042-eff7692f9009/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200916030750-2334cc1a136f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200922070232-aee5d888a860/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201112073958-5cba982894dd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201117170446-d9b008d0a637/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201202213521-69691e467435/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210309074719-68d13333faf2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210324051608-47abb6519492/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210426230700-d19ff857e887/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210816183151-1e6c022a8912/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211110154304-99a53858aa08/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211205182925-97ca703d548d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220318055525-2edf467146b5 h1:saXMvIOKvRFwbOMicHXr0B1uwoxq9dGmLe5ExMES6c4= -golang.org/x/sys v0.0.0-20220318055525-2edf467146b5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k= +golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.0.0-20201210144234-2321bbc49cbf/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 h1:JGgROgKl9N8DuW20oFS5gxc+lE67/N3FcwmBPMe7ArY= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0= +golang.org/x/term v0.40.0 h1:36e4zGLqU4yhjlmxEaagx2KuYbJq3EwY8K943ZsHcvg= +golang.org/x/term v0.40.0/go.mod h1:w2P8uVp06p2iyKKuvXIm7N/y0UCRt3UfJTfZ7oOpglM= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba h1:O8mE0/t419eoIwhTFpKVkHiTs/Igowgfkj25AcZrtiE= -golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk= +golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190624222133-a101b041ded4/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190729092621-ff9f1409240a/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI= -golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= -golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc= +golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.0.0-20160322025152-9bf6e6e569ff/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= -google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= -google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= -google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= -google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= -google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= -google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= -google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= -google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= -google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= -google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8= -google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= -google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= -google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= -google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= -google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= -google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= -google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= -google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= -google.golang.org/api v0.59.0/go.mod h1:sT2boj7M9YJxZzgeZqXogmhfmRWDtPzT31xkieUbuZU= -google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= -google.golang.org/api v0.62.0/go.mod h1:dKmwPCydfsad4qCH08MSdgWjfHOyfpd4VtDGgRFdavw= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= -google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= -google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/cloud v0.0.0-20151119220103-975617b05ea8/go.mod h1:0H1ncTHf11KCFhTc/+EFRbzSCOZx+VUbRMk55Yv5MYk= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190522204451-c2c4e71fbf69/go.mod h1:z3L6/3dTEVtUr6QSP8miRzeRqwQOioJ9I66odjN4I7s= -google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= -google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200117163144-32f20d992d24/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= -google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= -google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= -google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201110150050-8816d57aaa9a/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= -google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= -google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= -google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= -google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= -google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= -google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= -google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= -google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211008145708-270636b82663/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211028162531-8db9c33dc351/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211111162719-482062a4217b/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211129164237-f09f9a12af12/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211203200212-54befc351ae9/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa h1:I0YcKz0I7OAhddo7ya8kMnvprhcWM045PmkBdMO9zN0= -google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/grpc v0.0.0-20160317175043-d3ddb4469d5a/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= -google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= -google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= -google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= -google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.24.0/go.mod h1:XDChyiUovWa60DnaeDeZmSW86xtLtjtZbwvSiRnRtcA= -google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= -google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= -google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= -google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= -google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= -google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= -google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= -google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= -google.golang.org/grpc v1.42.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= -google.golang.org/grpc v1.43.0 h1:Eeu7bZtDZ2DpRCsLhUlcrLnvYaMK1Gz86a+hMVvELmM= -google.golang.org/grpc v1.43.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= -google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= -google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= -google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= -google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= -google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= -google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= -google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= -google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ= -google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U= +google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= +google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20141024133853-64131543e789/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= -gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKWaSkCsqBpgog8nAV2xsGOxlo= -gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= -gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= -gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce h1:+JknDZhAj8YMt7GC73Ei8pv4MzjDUNPHgQWJdtMAaDU= -gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce/go.mod h1:5AcXVHNjg+BDxry382+8OKon8SEWiKktQR07RKPsv1c= -gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= -gopkg.in/square/go-jose.v2 v2.2.2/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= -gopkg.in/square/go-jose.v2 v2.3.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= -gopkg.in/square/go-jose.v2 v2.5.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= -gopkg.in/src-d/go-billy.v4 v4.3.2 h1:0SQA1pRztfTFx2miS8sA97XvooFeNOmvUenF4o0EcVg= -gopkg.in/src-d/go-billy.v4 v4.3.2/go.mod h1:nDjArDMp+XMs1aFAESLRjfGSgfvoYN0hDfzEk0GjC98= -gopkg.in/src-d/go-git-fixtures.v3 v3.5.0 h1:ivZFOIltbce2Mo8IjzUHAFoq/IylO9WHhNOAJK+LsJg= -gopkg.in/src-d/go-git-fixtures.v3 v3.5.0/go.mod h1:dLBcvytrw/TYZsNTWCnkNF2DSIlzWYqTe3rJR56Ac7g= -gopkg.in/src-d/go-git.v4 v4.13.1 h1:SRtFyV8Kxc0UP7aCHcijOMQGPxHSmMOPrzulQWolkYE= -gopkg.in/src-d/go-git.v4 v4.13.1/go.mod h1:nx5NYcxdKxq5fpltdHnPa2Exj4Sx0EclMWZQbYDu2z8= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= -gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo= -gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw= -gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk= -gotest.tools/v3 v3.0.3 h1:4AuOwCGf4lLR9u3YOe2awrHygurzhO/HeQ6laiA6Sx0= -gotest.tools/v3 v3.0.3/go.mod h1:Z7Lb0S5l+klDB31fvDQX8ss/FlKDxtlFlw3Oa8Ymbl8= -honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= -honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -k8s.io/api v0.20.1/go.mod h1:KqwcCVogGxQY3nBlRpwt+wpAMF/KjaCc7RpywacvqUo= -k8s.io/api v0.20.4/go.mod h1:++lNL1AJMkDymriNniQsWRkMDzRaX2Y/POTUi8yvqYQ= -k8s.io/api v0.20.6/go.mod h1:X9e8Qag6JV/bL5G6bU8sdVRltWKmdHsFUGS3eVndqE8= -k8s.io/apimachinery v0.20.1/go.mod h1:WlLqWAHZGg07AeltaI0MV5uk1Omp8xaN0JGLY6gkRpU= -k8s.io/apimachinery v0.20.4/go.mod h1:WlLqWAHZGg07AeltaI0MV5uk1Omp8xaN0JGLY6gkRpU= -k8s.io/apimachinery v0.20.6/go.mod h1:ejZXtW1Ra6V1O5H8xPBGz+T3+4gfkTCeExAHKU57MAc= -k8s.io/apiserver v0.20.1/go.mod h1:ro5QHeQkgMS7ZGpvf4tSMx6bBOgPfE+f52KwvXfScaU= -k8s.io/apiserver v0.20.4/go.mod h1:Mc80thBKOyy7tbvFtB4kJv1kbdD0eIH8k8vianJcbFM= -k8s.io/apiserver v0.20.6/go.mod h1:QIJXNt6i6JB+0YQRNcS0hdRHJlMhflFmsBDeSgT1r8Q= -k8s.io/client-go v0.20.1/go.mod h1:/zcHdt1TeWSd5HoUe6elJmHSQ6uLLgp4bIJHVEuy+/Y= -k8s.io/client-go v0.20.4/go.mod h1:LiMv25ND1gLUdBeYxBIwKpkSC5IsozMMmOOeSJboP+k= -k8s.io/client-go v0.20.6/go.mod h1:nNQMnOvEUEsOzRRFIIkdmYOjAZrC8bgq0ExboWSU1I0= -k8s.io/component-base v0.20.1/go.mod h1:guxkoJnNoh8LNrbtiQOlyp2Y2XFCZQmrcg2n/DeYNLk= -k8s.io/component-base v0.20.4/go.mod h1:t4p9EdiagbVCJKrQ1RsA5/V4rFQNDfRlevJajlGwgjI= -k8s.io/component-base v0.20.6/go.mod h1:6f1MPBAeI+mvuts3sIdtpjljHWBQ2cIy38oBIWMYnrM= -k8s.io/cri-api v0.17.3/go.mod h1:X1sbHmuXhwaHs9xxYffLqJogVsnI+f6cPRcgPel7ywM= -k8s.io/cri-api v0.20.1/go.mod h1:2JRbKt+BFLTjtrILYVqQK5jqhI+XNdF6UiGMgczeBCI= -k8s.io/cri-api v0.20.4/go.mod h1:2JRbKt+BFLTjtrILYVqQK5jqhI+XNdF6UiGMgczeBCI= -k8s.io/cri-api v0.20.6/go.mod h1:ew44AjNXwyn1s0U4xCKGodU7J1HzBeZ1MpGrpa5r8Yc= -k8s.io/gengo v0.0.0-20200413195148-3a45101e95ac/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= -k8s.io/klog/v2 v2.0.0/go.mod h1:PBfzABfn139FHAV07az/IF9Wp1bkk3vpT2XSJ76fSDE= -k8s.io/klog/v2 v2.4.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y= -k8s.io/kube-openapi v0.0.0-20201113171705-d219536bb9fd/go.mod h1:WOJ3KddDSol4tAGcJo0Tvi+dK12EcqSLqcWsryKMpfM= -k8s.io/kubernetes v1.13.0/go.mod h1:ocZa8+6APFNC2tX1DZASIbocyYT5jHzqFVsY5aoB7Jk= -k8s.io/utils v0.0.0-20201110183641-67b214c5f920/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= -rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= -rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= -sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.14/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg= -sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.15/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg= -sigs.k8s.io/structured-merge-diff/v4 v4.0.2/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= -sigs.k8s.io/structured-merge-diff/v4 v4.0.3/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= -sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= -sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools/v3 v3.5.2 h1:7koQfIKdy+I8UTetycgUqXWSDwpgv193Ka+qRsmBY8Q= +gotest.tools/v3 v3.5.2/go.mod h1:LtdLGcnqToBH83WByAAi/wiwSFCArdFIUV/xxN4pcjA= +pgregory.net/rapid v1.2.0 h1:keKAYRcjm+e1F0oAuU5F5+YPAWcyxNNRK2wud503Gnk= +pgregory.net/rapid v1.2.0/go.mod h1:PY5XlDGj0+V1FCq0o192FdRhpKHGTRIWBgqjDBTrq04= diff --git a/golangci.yaml b/golangci.yaml index b44e01cd9c..b1a786c495 100644 --- a/golangci.yaml +++ b/golangci.yaml @@ -1,32 +1,51 @@ -run: - timeout: 6m - +version: "2" linters: - disable-all: true + default: none enable: - bodyclose - - deadcode - dogsled - gocritic - - goimports - - golint - - gosimple - govet - ineffassign - - maligned - misspell - nakedret + - revive - rowserrcheck - - scopelint - staticcheck - - structcheck - - stylecheck - - typecheck - unconvert - unused - - varcheck - whitespace - -linters-settings: - goimports: - local-prefixes: github.com/buildpacks/pack + settings: + revive: + rules: + - name: error-strings + disabled: true + exclusions: + generated: lax + presets: + - comments + - common-false-positives + - legacy + - std-error-handling + paths: + - third_party$ + - builtin$ + - examples$ +formatters: + enable: + - goimports + settings: + goimports: + local-prefixes: + - github.com/buildpacks/pack + exclusions: + generated: lax + paths: + - third_party$ + - builtin$ + - examples$ +issues: + default: info + rules: + - linters: + - staticcheck: info \ No newline at end of file diff --git a/internal/build/container_ops.go b/internal/build/container_ops.go index a5fbf41e80..c8fb0b5d89 100644 --- a/internal/build/container_ops.go +++ b/internal/build/container_ops.go @@ -5,34 +5,61 @@ import ( "context" "fmt" "io" - "io/ioutil" "os" "runtime" "github.com/BurntSushi/toml" - "github.com/buildpacks/lifecycle/platform" - "github.com/docker/docker/api/types" - dcontainer "github.com/docker/docker/api/types/container" - "github.com/docker/docker/client" - darchive "github.com/docker/docker/pkg/archive" + "github.com/buildpacks/lifecycle/platform/files" + dcontainer "github.com/moby/moby/api/types/container" + dockerClient "github.com/moby/moby/client" + + darchive "github.com/moby/go-archive" "github.com/pkg/errors" + cerrdefs "github.com/containerd/errdefs" + "github.com/buildpacks/pack/internal/builder" "github.com/buildpacks/pack/internal/container" "github.com/buildpacks/pack/internal/paths" "github.com/buildpacks/pack/pkg/archive" ) -type ContainerOperation func(ctrClient client.CommonAPIClient, ctx context.Context, containerID string, stdout, stderr io.Writer) error +type ContainerOperation func(ctrClient DockerClient, ctx context.Context, containerID string, stdout, stderr io.Writer) error // CopyOut copies container directories to a handler function. The handler is responsible for closing the Reader. func CopyOut(handler func(closer io.ReadCloser) error, srcs ...string) ContainerOperation { - return func(ctrClient client.CommonAPIClient, ctx context.Context, containerID string, stdout, stderr io.Writer) error { + return func(ctrClient DockerClient, ctx context.Context, containerID string, stdout, stderr io.Writer) error { for _, src := range srcs { - reader, _, err := ctrClient.CopyFromContainer(ctx, containerID, src) + result, err := ctrClient.CopyFromContainer(ctx, containerID, dockerClient.CopyFromContainerOptions{SourcePath: src}) if err != nil { return err } + reader := result.Content + + err = handler(reader) + if err != nil { + return err + } + } + + return nil + } +} + +// CopyOutMaybe copies container directories to a handler function. The handler is responsible for closing the Reader. +// CopyOutMaybe differs from CopyOut in that it will silently continue to the next source file if the file reader cannot be instantiated +// because the source file does not exist in the container. +func CopyOutMaybe(handler func(closer io.ReadCloser) error, srcs ...string) ContainerOperation { + return func(ctrClient DockerClient, ctx context.Context, containerID string, stdout, stderr io.Writer) error { + for _, src := range srcs { + result, err := ctrClient.CopyFromContainer(ctx, containerID, dockerClient.CopyFromContainerOptions{SourcePath: src}) + if err != nil { + if cerrdefs.IsNotFound(err) { + continue + } + return err + } + reader := result.Content err = handler(reader) if err != nil { @@ -56,10 +83,22 @@ func CopyOutTo(src, dest string) ContainerOperation { }, src) } +func CopyOutToMaybe(src, dest string) ContainerOperation { + return CopyOutMaybe(func(reader io.ReadCloser) error { + info := darchive.CopyInfo{ + Path: src, + IsDir: true, + } + + defer reader.Close() + return darchive.CopyTo(reader, info, dest) + }, src) +} + // CopyDir copies a local directory (src) to the destination on the container while filtering files and changing it's UID/GID. // if includeRoot is set the UID/GID will be set on the dst directory. func CopyDir(src, dst string, uid, gid int, os string, includeRoot bool, fileFilter func(string) bool) ContainerOperation { - return func(ctrClient client.CommonAPIClient, ctx context.Context, containerID string, stdout, stderr io.Writer) error { + return func(ctrClient DockerClient, ctx context.Context, containerID string, stdout, stderr io.Writer) error { tarPath := dst if os == "windows" { tarPath = paths.WindowsToSlash(dst) @@ -78,13 +117,16 @@ func CopyDir(src, dst string, uid, gid int, os string, includeRoot bool, fileFil } } -func copyDir(ctx context.Context, ctrClient client.CommonAPIClient, containerID string, appReader io.Reader) error { +func copyDir(ctx context.Context, ctrClient DockerClient, containerID string, appReader io.Reader) error { var clientErr, err error doneChan := make(chan interface{}) pr, pw := io.Pipe() go func() { - clientErr = ctrClient.CopyToContainer(ctx, containerID, "/", pr, types.CopyToContainerOptions{}) + _, clientErr = ctrClient.CopyToContainer(ctx, containerID, dockerClient.CopyToContainerOptions{ + DestinationPath: "/", + Content: pr, + }) close(doneChan) }() func() { @@ -105,11 +147,12 @@ func copyDir(ctx context.Context, ctrClient client.CommonAPIClient, containerID // for Windows containers and does not work. Instead, we perform the copy from inside a container // using xcopy. // See: https://github.com/moby/moby/issues/40771 -func copyDirWindows(ctx context.Context, ctrClient client.CommonAPIClient, containerID string, reader io.Reader, dst string, stdout, stderr io.Writer) error { - info, err := ctrClient.ContainerInspect(ctx, containerID) +func copyDirWindows(ctx context.Context, ctrClient DockerClient, containerID string, reader io.Reader, dst string, stdout, stderr io.Writer) error { + inspectResult, err := ctrClient.ContainerInspect(ctx, containerID, dockerClient.ContainerInspectOptions{}) if err != nil { return err } + info := inspectResult.Container baseName := paths.WindowsBasename(dst) @@ -118,8 +161,8 @@ func copyDirWindows(ctx context.Context, ctrClient client.CommonAPIClient, conta return err } - ctr, err := ctrClient.ContainerCreate(ctx, - &dcontainer.Config{ + ctr, err := ctrClient.ContainerCreate(ctx, dockerClient.ContainerCreateOptions{ + Config: &dcontainer.Config{ Image: info.Image, Cmd: []string{ "cmd", @@ -136,18 +179,20 @@ func copyDirWindows(ctx context.Context, ctrClient client.CommonAPIClient, conta WorkingDir: "/", User: windowsContainerAdmin, }, - &dcontainer.HostConfig{ + HostConfig: &dcontainer.HostConfig{ Binds: []string{fmt.Sprintf("%s:%s", mnt.Name, mnt.Destination)}, Isolation: dcontainer.IsolationProcess, }, - nil, nil, "", - ) + }) if err != nil { return errors.Wrapf(err, "creating prep container") } - defer ctrClient.ContainerRemove(context.Background(), ctr.ID, types.ContainerRemoveOptions{Force: true}) + defer ctrClient.ContainerRemove(context.Background(), ctr.ID, dockerClient.ContainerRemoveOptions{Force: true}) - err = ctrClient.CopyToContainer(ctx, ctr.ID, "/windows", reader, types.CopyToContainerOptions{}) + _, err = ctrClient.CopyToContainer(ctx, ctr.ID, dockerClient.CopyToContainerOptions{ + DestinationPath: "/windows", + Content: reader, + }) if err != nil { return errors.Wrap(err, "copy app to container") } @@ -157,76 +202,72 @@ func copyDirWindows(ctx context.Context, ctrClient client.CommonAPIClient, conta ctrClient, ctr.ID, container.DefaultHandler( - ioutil.Discard, // Suppress xcopy output + io.Discard, // Suppress xcopy output stderr, ), ) } -func findMount(info types.ContainerJSON, dst string) (types.MountPoint, error) { +func findMount(info dcontainer.InspectResponse, dst string) (dcontainer.MountPoint, error) { for _, m := range info.Mounts { if m.Destination == dst { return m, nil } } - return types.MountPoint{}, fmt.Errorf("no matching mount found for %s", dst) + return dcontainer.MountPoint{}, fmt.Errorf("no matching mount found for %s", dst) } -// WriteProjectMetadata -func WriteProjectMetadata(p string, metadata platform.ProjectMetadata, os string) ContainerOperation { - return func(ctrClient client.CommonAPIClient, ctx context.Context, containerID string, stdout, stderr io.Writer) error { - buf := &bytes.Buffer{} - err := toml.NewEncoder(buf).Encode(metadata) - if err != nil { - return errors.Wrap(err, "marshaling project metadata") - } +func writeToml(ctrClient DockerClient, ctx context.Context, data interface{}, dstPath string, containerID string, os string, stdout, stderr io.Writer) error { + buf := &bytes.Buffer{} + err := toml.NewEncoder(buf).Encode(data) + if err != nil { + return errors.Wrapf(err, "marshaling data to %s", dstPath) + } - tarBuilder := archive.TarBuilder{} + tarBuilder := archive.TarBuilder{} - tarPath := p - if os == "windows" { - tarPath = paths.WindowsToSlash(p) - } + tarPath := dstPath + if os == "windows" { + tarPath = paths.WindowsToSlash(dstPath) + } - tarBuilder.AddFile(tarPath, 0755, archive.NormalizedDateTime, buf.Bytes()) - reader := tarBuilder.Reader(archive.DefaultTarWriterFactory()) - defer reader.Close() + tarBuilder.AddFile(tarPath, 0755, archive.NormalizedDateTime, buf.Bytes()) + reader := tarBuilder.Reader(archive.DefaultTarWriterFactory()) + defer reader.Close() - if os == "windows" { - dirName := paths.WindowsDir(p) - return copyDirWindows(ctx, ctrClient, containerID, reader, dirName, stdout, stderr) - } + if os == "windows" { + dirName := paths.WindowsDir(dstPath) + return copyDirWindows(ctx, ctrClient, containerID, reader, dirName, stdout, stderr) + } - return ctrClient.CopyToContainer(ctx, containerID, "/", reader, types.CopyToContainerOptions{}) + _, err = ctrClient.CopyToContainer(ctx, containerID, dockerClient.CopyToContainerOptions{ + DestinationPath: "/", + Content: reader, + }) + return err +} + +// WriteProjectMetadata writes a `project-metadata.toml` based on the ProjectMetadata provided to the destination path. +func WriteProjectMetadata(dstPath string, metadata files.ProjectMetadata, os string) ContainerOperation { + return func(ctrClient DockerClient, ctx context.Context, containerID string, stdout, stderr io.Writer) error { + return writeToml(ctrClient, ctx, metadata, dstPath, containerID, os, stdout, stderr) } } // WriteStackToml writes a `stack.toml` based on the StackMetadata provided to the destination path. func WriteStackToml(dstPath string, stack builder.StackMetadata, os string) ContainerOperation { - return func(ctrClient client.CommonAPIClient, ctx context.Context, containerID string, stdout, stderr io.Writer) error { - buf := &bytes.Buffer{} - err := toml.NewEncoder(buf).Encode(stack) - if err != nil { - return errors.Wrap(err, "marshaling stack metadata") - } - - tarBuilder := archive.TarBuilder{} - - tarPath := dstPath - if os == "windows" { - tarPath = paths.WindowsToSlash(dstPath) - } - - tarBuilder.AddFile(tarPath, 0755, archive.NormalizedDateTime, buf.Bytes()) - reader := tarBuilder.Reader(archive.DefaultTarWriterFactory()) - defer reader.Close() - - if os == "windows" { - dirName := paths.WindowsDir(dstPath) - return copyDirWindows(ctx, ctrClient, containerID, reader, dirName, stdout, stderr) - } + return func(ctrClient DockerClient, ctx context.Context, containerID string, stdout, stderr io.Writer) error { + return writeToml(ctrClient, ctx, stack, dstPath, containerID, os, stdout, stderr) + } +} - return ctrClient.CopyToContainer(ctx, containerID, "/", reader, types.CopyToContainerOptions{}) +// WriteRunToml writes a `run.toml` based on the RunConfig provided to the destination path. +func WriteRunToml(dstPath string, runImages []builder.RunImageMetadata, os string) ContainerOperation { + runImageData := builder.RunImages{ + Images: runImages, + } + return func(ctrClient DockerClient, ctx context.Context, containerID string, stdout, stderr io.Writer) error { + return writeToml(ctrClient, ctx, runImageData, dstPath, containerID, os, stdout, stderr) } } @@ -253,15 +294,16 @@ func createReader(src, dst string, uid, gid int, includeRoot bool, fileFilter fu // Changing permissions on volumes through stopped containers does not work on Docker for Windows so we start the container and make change using icacls // See: https://github.com/moby/moby/issues/40771 func EnsureVolumeAccess(uid, gid int, os string, volumeNames ...string) ContainerOperation { - return func(ctrClient client.CommonAPIClient, ctx context.Context, containerID string, stdout, stderr io.Writer) error { + return func(ctrClient DockerClient, ctx context.Context, containerID string, stdout, stderr io.Writer) error { if os != "windows" { return nil } - containerInfo, err := ctrClient.ContainerInspect(ctx, containerID) + inspectResult, err := ctrClient.ContainerInspect(ctx, containerID, dockerClient.ContainerInspectOptions{}) if err != nil { return err } + containerInfo := inspectResult.Container cmd := "" binds := []string{} @@ -284,30 +326,29 @@ func EnsureVolumeAccess(uid, gid int, os string, volumeNames ...string) Containe cmd += fmt.Sprintf(`icacls %s /grant *%s:(OI)(CI)F /t /l /q`, containerPath, paths.WindowsPathSID(uid, gid)) } - ctr, err := ctrClient.ContainerCreate(ctx, - &dcontainer.Config{ + ctr, err := ctrClient.ContainerCreate(ctx, dockerClient.ContainerCreateOptions{ + Config: &dcontainer.Config{ Image: containerInfo.Image, Cmd: []string{"cmd", "/c", cmd}, WorkingDir: "/", User: windowsContainerAdmin, }, - &dcontainer.HostConfig{ + HostConfig: &dcontainer.HostConfig{ Binds: binds, Isolation: dcontainer.IsolationProcess, }, - nil, nil, "", - ) + }) if err != nil { return err } - defer ctrClient.ContainerRemove(context.Background(), ctr.ID, types.ContainerRemoveOptions{Force: true}) + defer ctrClient.ContainerRemove(context.Background(), ctr.ID, dockerClient.ContainerRemoveOptions{Force: true}) return container.RunWithHandler( ctx, ctrClient, ctr.ID, container.DefaultHandler( - ioutil.Discard, // Suppress icacls output + io.Discard, // Suppress icacls output stderr, ), ) diff --git a/internal/build/container_ops_test.go b/internal/build/container_ops_test.go index 7979f5fb76..62a6dc216d 100644 --- a/internal/build/container_ops_test.go +++ b/internal/build/container_ops_test.go @@ -5,21 +5,18 @@ import ( "context" "fmt" "io" - "io/ioutil" - "math/rand" + "io/fs" "os" "path/filepath" "runtime" "strings" "testing" - "time" - "github.com/buildpacks/lifecycle/platform" - "github.com/docker/docker/api/types" - dcontainer "github.com/docker/docker/api/types/container" - "github.com/docker/docker/api/types/mount" - "github.com/docker/docker/client" + "github.com/buildpacks/lifecycle/platform/files" "github.com/heroku/color" + dcontainer "github.com/moby/moby/api/types/container" + "github.com/moby/moby/api/types/mount" + "github.com/moby/moby/client" "github.com/sclevine/spec" "github.com/sclevine/spec/report" @@ -29,17 +26,17 @@ import ( h "github.com/buildpacks/pack/testhelpers" ) +var ctrClient *client.Client + // TestContainerOperations are integration tests for the container operations against a docker daemon func TestContainerOperations(t *testing.T) { - rand.Seed(time.Now().UTC().UnixNano()) - color.Disable(true) defer color.Disable(false) h.RequireDocker(t) var err error - ctrClient, err = client.NewClientWithOpts(client.FromEnv, client.WithVersion("1.38")) + ctrClient, err = client.New(client.FromEnv) h.AssertNil(t, err) spec.Run(t, "container-ops", testContainerOps, spec.Report(report.Terminal{}), spec.Sequential()) @@ -54,9 +51,9 @@ func testContainerOps(t *testing.T, when spec.G, it spec.S) { it.Before(func() { imageName = "container-ops.test-" + h.RandString(10) - info, err := ctrClient.Info(context.TODO()) + infoResult, err := ctrClient.Info(context.TODO(), client.InfoOptions{}) h.AssertNil(t, err) - osType = info.OSType + osType = infoResult.Info.OSType dockerfileContent := `FROM busybox` if osType == "windows" { @@ -89,7 +86,20 @@ func testContainerOps(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, err) defer cleanupContainer(ctx, ctr.ID) - copyDirOp := build.CopyDir(filepath.Join("testdata", "fake-app"), containerDir, 123, 456, osType, false, nil) + // chmod in case umask sets the wrong bits during a `git clone`. + dir := filepath.Join("testdata", "fake-app") + err = filepath.WalkDir(dir, func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + if d.IsDir() { + return nil + } + + return os.Chmod(path, 0644) + }) + h.AssertNil(t, err) + copyDirOp := build.CopyDir(dir, containerDir, 123, 456, osType, false, nil) var outBuf, errBuf bytes.Buffer err = copyDirOp(ctrClient, ctx, ctr.ID, &outBuf, &errBuf) @@ -160,17 +170,9 @@ lrwxrwxrwx 1 123 456 (.*) fake-app-symlink -> fake-app-file (.*) ... some-vol `) } else { - if runtime.GOOS == "windows" { - // Expected LCOW results - h.AssertContainsMatch(t, outBuf.String(), ` + h.AssertContainsMatch(t, outBuf.String(), ` drwxrwxrwx 2 123 456 (.*) some-vol `) - } else { - // Expected results - h.AssertContainsMatch(t, outBuf.String(), ` -drwsrwsrwt 2 123 456 (.*) some-vol -`) - } } }) }) @@ -267,30 +269,88 @@ drwsrwsrwt 2 123 456 (.*) some-vol defer cleanupContainer(ctx, ctr.ID) copyDirOp := build.CopyDir(filepath.Join("testdata", "fake-app"), containerDir, 123, 456, osType, false, nil) - err = copyDirOp(ctrClient, ctx, ctr.ID, ioutil.Discard, ioutil.Discard) + err = copyDirOp(ctrClient, ctx, ctr.ID, io.Discard, io.Discard) h.AssertNil(t, err) - tarDestination, err := ioutil.TempFile("", "pack.container.ops.test.") + tarDestination, err := os.CreateTemp("", "pack.container.ops.test.") h.AssertNil(t, err) defer os.RemoveAll(tarDestination.Name()) handler := func(reader io.ReadCloser) error { defer reader.Close() - contents, err := ioutil.ReadAll(reader) + contents, err := io.ReadAll(reader) h.AssertNil(t, err) - err = ioutil.WriteFile(tarDestination.Name(), contents, 0600) + err = os.WriteFile(tarDestination.Name(), contents, 0600) h.AssertNil(t, err) return nil } copyOutDirsOp := build.CopyOut(handler, containerDir) - err = copyOutDirsOp(ctrClient, ctx, ctr.ID, ioutil.Discard, ioutil.Discard) + err = copyOutDirsOp(ctrClient, ctx, ctr.ID, io.Discard, io.Discard) h.AssertNil(t, err) - err = container.RunWithHandler(ctx, ctrClient, ctr.ID, container.DefaultHandler(ioutil.Discard, ioutil.Discard)) + err = container.RunWithHandler(ctx, ctrClient, ctr.ID, container.DefaultHandler(io.Discard, io.Discard)) + h.AssertNil(t, err) + + separator := "/" + if osType == "windows" { + separator = `\` + } + + h.AssertTarball(t, tarDestination.Name()) + h.AssertTarHasFile(t, tarDestination.Name(), fmt.Sprintf("some-vol%sfake-app-file", separator)) + h.AssertTarHasFile(t, tarDestination.Name(), fmt.Sprintf("some-vol%sfake-app-symlink", separator)) + h.AssertTarHasFile(t, tarDestination.Name(), fmt.Sprintf("some-vol%sfile-to-ignore", separator)) + }) + }) + + when("#CopyOutMaybe", func() { + it("reads the contents of a container directory", func() { + h.SkipIf(t, osType == "windows", "copying directories out of windows containers not yet supported") + + containerDir := "/some-vol" + if osType == "windows" { + containerDir = `c:\some-vol` + } + + ctrCmd := []string{"ls", "-al", "/some-vol"} + if osType == "windows" { + ctrCmd = []string{"cmd", "/c", `dir /q /s c:\some-vol`} + } + + ctx := context.Background() + ctr, err := createContainer(ctx, imageName, containerDir, osType, ctrCmd...) + h.AssertNil(t, err) + defer cleanupContainer(ctx, ctr.ID) + + copyDirOp := build.CopyDir(filepath.Join("testdata", "fake-app"), containerDir, 123, 456, osType, false, nil) + err = copyDirOp(ctrClient, ctx, ctr.ID, io.Discard, io.Discard) + h.AssertNil(t, err) + + tarDestination, err := os.CreateTemp("", "pack.container.ops.test.") + h.AssertNil(t, err) + defer os.RemoveAll(tarDestination.Name()) + + handler := func(reader io.ReadCloser) error { + defer reader.Close() + + contents, err := io.ReadAll(reader) + h.AssertNil(t, err) + + err = os.WriteFile(tarDestination.Name(), contents, 0600) + h.AssertNil(t, err) + + return nil + } + + copyOutDirsOp := build.CopyOutMaybe(handler, containerDir) + err = copyOutDirsOp(ctrClient, ctx, ctr.ID, io.Discard, io.Discard) + h.AssertNil(t, err) + + err = container.RunWithHandler(ctx, ctrClient, ctr.ID, container.DefaultHandler(io.Discard, io.Discard)) h.AssertNil(t, err) separator := "/" @@ -391,6 +451,102 @@ drwsrwsrwt 2 123 456 (.*) some-vol }) }) + when("#WriteRunToml", func() { + it("writes file", func() { + containerDir := "/layers-vol" + containerPath := "/layers-vol/run.toml" + if osType == "windows" { + containerDir = `c:\layers-vol` + containerPath = `c:\layers-vol\run.toml` + } + + ctrCmd := []string{"ls", "-al", "/layers-vol/run.toml"} + if osType == "windows" { + ctrCmd = []string{"cmd", "/c", `dir /q /n c:\layers-vol\run.toml`} + } + ctx := context.Background() + ctr, err := createContainer(ctx, imageName, containerDir, osType, ctrCmd...) + h.AssertNil(t, err) + defer cleanupContainer(ctx, ctr.ID) + + writeOp := build.WriteRunToml(containerPath, []builder.RunImageMetadata{builder.RunImageMetadata{ + Image: "image-1", + Mirrors: []string{ + "mirror-1", + "mirror-2", + }, + }, + }, osType) + + var outBuf, errBuf bytes.Buffer + err = writeOp(ctrClient, ctx, ctr.ID, &outBuf, &errBuf) + h.AssertNil(t, err) + + err = container.RunWithHandler(ctx, ctrClient, ctr.ID, container.DefaultHandler(&outBuf, &errBuf)) + h.AssertNil(t, err) + + h.AssertEq(t, errBuf.String(), "") + if osType == "windows" { + h.AssertContains(t, outBuf.String(), `01/01/1980 12:00 AM 68 ... run.toml`) + } else { + h.AssertContains(t, outBuf.String(), `-rwxr-xr-x 1 root root 68 Jan 1 1980 /layers-vol/run.toml`) + } + }) + + it("has expected contents", func() { + containerDir := "/layers-vol" + containerPath := "/layers-vol/run.toml" + if osType == "windows" { + containerDir = `c:\layers-vol` + containerPath = `c:\layers-vol\run.toml` + } + + ctrCmd := []string{"cat", "/layers-vol/run.toml"} + if osType == "windows" { + ctrCmd = []string{"cmd", "/c", `type c:\layers-vol\run.toml`} + } + + ctx := context.Background() + ctr, err := createContainer(ctx, imageName, containerDir, osType, ctrCmd...) + h.AssertNil(t, err) + defer cleanupContainer(ctx, ctr.ID) + + writeOp := build.WriteRunToml(containerPath, []builder.RunImageMetadata{ + { + Image: "image-1", + Mirrors: []string{ + "mirror-1", + "mirror-2", + }, + }, + { + Image: "image-2", + Mirrors: []string{ + "mirror-3", + "mirror-4", + }, + }, + }, osType) + + var outBuf, errBuf bytes.Buffer + err = writeOp(ctrClient, ctx, ctr.ID, &outBuf, &errBuf) + h.AssertNil(t, err) + + err = container.RunWithHandler(ctx, ctrClient, ctr.ID, container.DefaultHandler(&outBuf, &errBuf)) + h.AssertNil(t, err) + + h.AssertEq(t, errBuf.String(), "") + h.AssertContains(t, outBuf.String(), `[[images]] + image = "image-1" + mirrors = ["mirror-1", "mirror-2"] + +[[images]] + image = "image-2" + mirrors = ["mirror-3", "mirror-4"] +`) + }) + }) + when("#WriteProjectMetadata", func() { it("writes file", func() { containerDir := "/layers-vol" @@ -409,8 +565,8 @@ drwsrwsrwt 2 123 456 (.*) some-vol h.AssertNil(t, err) defer cleanupContainer(ctx, ctr.ID) - writeOp := build.WriteProjectMetadata(p, platform.ProjectMetadata{ - Source: &platform.ProjectSource{ + writeOp := build.WriteProjectMetadata(p, files.ProjectMetadata{ + Source: &files.ProjectSource{ Type: "project", Version: map[string]interface{}{ "declared": "1.0.2", @@ -454,8 +610,8 @@ drwsrwsrwt 2 123 456 (.*) some-vol h.AssertNil(t, err) defer cleanupContainer(ctx, ctr.ID) - writeOp := build.WriteProjectMetadata(p, platform.ProjectMetadata{ - Source: &platform.ProjectSource{ + writeOp := build.WriteProjectMetadata(p, files.ProjectMetadata{ + Source: &files.ProjectSource{ Type: "project", Version: map[string]interface{}{ "declared": "1.0.2", @@ -483,6 +639,7 @@ drwsrwsrwt 2 123 456 (.*) some-vol `) }) }) + when("#EnsureVolumeAccess", func() { it("changes owner of volume", func() { h.SkipIf(t, osType != "windows", "no-op for linux") @@ -500,10 +657,11 @@ drwsrwsrwt 2 123 456 (.*) some-vol h.AssertNil(t, err) defer cleanupContainer(ctx, ctr.ID) - inspect, err := ctrClient.ContainerInspect(ctx, ctr.ID) + inspectResult, err := ctrClient.ContainerInspect(ctx, ctr.ID, client.ContainerInspectOptions{}) if err != nil { return } + inspect := inspectResult.Container // use container's current volumes var ctrVolumes []string @@ -528,32 +686,33 @@ drwsrwsrwt 2 123 456 (.*) some-vol }) } -func createContainer(ctx context.Context, imageName, containerDir, osType string, cmd ...string) (dcontainer.ContainerCreateCreatedBody, error) { +func createContainer(ctx context.Context, imageName, containerDir, osType string, cmd ...string) (client.ContainerCreateResult, error) { isolationType := dcontainer.IsolationDefault if osType == "windows" { isolationType = dcontainer.IsolationProcess } - return ctrClient.ContainerCreate(ctx, - &dcontainer.Config{ + return ctrClient.ContainerCreate(ctx, client.ContainerCreateOptions{ + Config: &dcontainer.Config{ Image: imageName, Cmd: cmd, }, - &dcontainer.HostConfig{ + HostConfig: &dcontainer.HostConfig{ Binds: []string{fmt.Sprintf("%s:%s", fmt.Sprintf("tests-volume-%s", h.RandString(5)), filepath.ToSlash(containerDir))}, Isolation: isolationType, - }, nil, nil, "", - ) + }, + }) } func cleanupContainer(ctx context.Context, ctrID string) { - inspect, err := ctrClient.ContainerInspect(ctx, ctrID) + inspectResult, err := ctrClient.ContainerInspect(ctx, ctrID, client.ContainerInspectOptions{}) if err != nil { return } + inspect := inspectResult.Container // remove container - err = ctrClient.ContainerRemove(ctx, ctrID, types.ContainerRemoveOptions{}) + _, err = ctrClient.ContainerRemove(ctx, ctrID, client.ContainerRemoveOptions{}) if err != nil { return } @@ -561,7 +720,7 @@ func cleanupContainer(ctx context.Context, ctrID string) { // remove volumes for _, m := range inspect.Mounts { if m.Type == mount.TypeVolume { - err = ctrClient.VolumeRemove(ctx, m.Name, true) + _, err = ctrClient.VolumeRemove(ctx, m.Name, client.VolumeRemoveOptions{Force: true}) if err != nil { return } diff --git a/internal/build/docker.go b/internal/build/docker.go new file mode 100644 index 0000000000..b6c7ef2006 --- /dev/null +++ b/internal/build/docker.go @@ -0,0 +1,22 @@ +package build + +import ( + "context" + + dockerClient "github.com/moby/moby/client" +) + +type DockerClient interface { + ImageRemove(ctx context.Context, image string, options dockerClient.ImageRemoveOptions) (dockerClient.ImageRemoveResult, error) + VolumeRemove(ctx context.Context, volumeID string, options dockerClient.VolumeRemoveOptions) (dockerClient.VolumeRemoveResult, error) + ContainerWait(ctx context.Context, containerID string, options dockerClient.ContainerWaitOptions) dockerClient.ContainerWaitResult + ContainerAttach(ctx context.Context, container string, options dockerClient.ContainerAttachOptions) (dockerClient.ContainerAttachResult, error) + ContainerStart(ctx context.Context, container string, options dockerClient.ContainerStartOptions) (dockerClient.ContainerStartResult, error) + ContainerCreate(ctx context.Context, options dockerClient.ContainerCreateOptions) (dockerClient.ContainerCreateResult, error) + CopyFromContainer(ctx context.Context, containerID string, options dockerClient.CopyFromContainerOptions) (dockerClient.CopyFromContainerResult, error) + ContainerInspect(ctx context.Context, containerID string, options dockerClient.ContainerInspectOptions) (dockerClient.ContainerInspectResult, error) + ContainerRemove(ctx context.Context, container string, options dockerClient.ContainerRemoveOptions) (dockerClient.ContainerRemoveResult, error) + CopyToContainer(ctx context.Context, container string, options dockerClient.CopyToContainerOptions) (dockerClient.CopyToContainerResult, error) + NetworkCreate(ctx context.Context, name string, options dockerClient.NetworkCreateOptions) (dockerClient.NetworkCreateResult, error) + NetworkRemove(ctx context.Context, networkID string, options dockerClient.NetworkRemoveOptions) (dockerClient.NetworkRemoveResult, error) +} diff --git a/internal/build/fakes/cache.go b/internal/build/fakes/cache.go index 9115060715..c6df3a8e56 100644 --- a/internal/build/fakes/cache.go +++ b/internal/build/fakes/cache.go @@ -3,7 +3,7 @@ package fakes import ( "context" - "github.com/buildpacks/pack/internal/cache" + "github.com/buildpacks/pack/pkg/cache" ) type FakeCache struct { diff --git a/internal/build/fakes/fake_builder.go b/internal/build/fakes/fake_builder.go index 4af7b413b5..2472dd8a73 100644 --- a/internal/build/fakes/fake_builder.go +++ b/internal/build/fakes/fake_builder.go @@ -8,6 +8,7 @@ import ( "github.com/buildpacks/pack/internal/build" "github.com/buildpacks/pack/internal/builder" + "github.com/buildpacks/pack/pkg/dist" ) type FakeBuilder struct { @@ -16,6 +17,8 @@ type FakeBuilder struct { ReturnForGID int ReturnForLifecycleDescriptor builder.LifecycleDescriptor ReturnForStack builder.StackMetadata + ReturnForRunImages []builder.RunImageMetadata + ReturnForOrderExtensions dist.Order } func NewFakeBuilder(ops ...func(*FakeBuilder)) (*FakeBuilder, error) { @@ -64,6 +67,12 @@ func WithImage(image imgutil.Image) func(*FakeBuilder) { } } +func WithOrderExtensions(orderExt dist.Order) func(*FakeBuilder) { + return func(builder *FakeBuilder) { + builder.ReturnForOrderExtensions = orderExt + } +} + func WithUID(uid int) func(*FakeBuilder) { return func(builder *FakeBuilder) { builder.ReturnForUID = uid @@ -96,12 +105,36 @@ func (b *FakeBuilder) LifecycleDescriptor() builder.LifecycleDescriptor { return b.ReturnForLifecycleDescriptor } +func (b *FakeBuilder) OrderExtensions() dist.Order { + return b.ReturnForOrderExtensions +} + func (b *FakeBuilder) Stack() builder.StackMetadata { return b.ReturnForStack } +func (b *FakeBuilder) RunImages() []builder.RunImageMetadata { + return b.ReturnForRunImages +} + +func (b *FakeBuilder) System() dist.System { return dist.System{} } + func WithBuilder(builder *FakeBuilder) func(*build.LifecycleOptions) { return func(opts *build.LifecycleOptions) { opts.Builder = builder } } + +// WithEnableUsernsHost creates a LifecycleOptions option that enables userns=host +func WithEnableUsernsHost() func(*build.LifecycleOptions) { + return func(opts *build.LifecycleOptions) { + opts.EnableUsernsHost = true + } +} + +// WithExecutionEnvironment creates a LifecycleOptions option that sets the execution environment +func WithExecutionEnvironment(execEnv string) func(*build.LifecycleOptions) { + return func(opts *build.LifecycleOptions) { + opts.ExecutionEnvironment = execEnv + } +} diff --git a/internal/build/lifecycle_execution.go b/internal/build/lifecycle_execution.go index 251da2e2b2..f73e7f93ea 100644 --- a/internal/build/lifecycle_execution.go +++ b/internal/build/lifecycle_execution.go @@ -4,43 +4,51 @@ import ( "context" "fmt" "math/rand" + "os" + "path/filepath" "strconv" + "time" + "github.com/BurntSushi/toml" "github.com/buildpacks/lifecycle/api" "github.com/buildpacks/lifecycle/auth" - "github.com/docker/docker/client" - "github.com/google/go-containerregistry/pkg/authn" + "github.com/buildpacks/lifecycle/platform/files" "github.com/google/go-containerregistry/pkg/name" + "github.com/moby/moby/client" "github.com/pkg/errors" + "golang.org/x/sync/errgroup" "github.com/buildpacks/pack/internal/builder" - "github.com/buildpacks/pack/internal/cache" "github.com/buildpacks/pack/internal/paths" "github.com/buildpacks/pack/internal/style" + "github.com/buildpacks/pack/pkg/cache" "github.com/buildpacks/pack/pkg/logging" ) const ( defaultProcessType = "web" overrideGID = 0 + overrideUID = 0 + sourceDateEpochEnv = "SOURCE_DATE_EPOCH" ) type LifecycleExecution struct { logger logging.Logger - docker client.CommonAPIClient + docker DockerClient platformAPI *api.Version layersVolume string appVolume string os string mountPaths mountPaths opts LifecycleOptions + tmpDir string } -func NewLifecycleExecution(logger logging.Logger, docker client.CommonAPIClient, opts LifecycleOptions) (*LifecycleExecution, error) { - latestSupportedPlatformAPI, err := findLatestSupported(append( +func NewLifecycleExecution(logger logging.Logger, docker DockerClient, tmpDir string, opts LifecycleOptions) (*LifecycleExecution, error) { + latestSupportedPlatformAPI, err := FindLatestSupported(append( opts.Builder.LifecycleDescriptor().APIs.Platform.Deprecated, opts.Builder.LifecycleDescriptor().APIs.Platform.Supported..., - )) + ), opts.LifecycleApis) if err != nil { return nil, err } @@ -59,6 +67,7 @@ func NewLifecycleExecution(logger logging.Logger, docker client.CommonAPIClient, opts: opts, os: osType, mountPaths: mountPathsForOS(osType, opts.Workspace), + tmpDir: tmpDir, } if opts.Interactive { @@ -68,7 +77,43 @@ func NewLifecycleExecution(logger logging.Logger, docker client.CommonAPIClient, return exec, nil } -func findLatestSupported(apis []*api.Version) (*api.Version, error) { +// intersection of two sorted lists of api versions +func apiIntersection(apisA, apisB []*api.Version) []*api.Version { + bind := 0 + aind := 0 + apis := []*api.Version{} + for ; aind < len(apisA); aind++ { + for ; bind < len(apisB) && apisA[aind].Compare(apisB[bind]) > 0; bind++ { + } + if bind == len(apisB) { + break + } + if apisA[aind].Equal(apisB[bind]) { + apis = append(apis, apisA[aind]) + } + } + return apis +} + +// FindLatestSupported finds the latest Platform API version supported by both the builder and the lifecycle. +func FindLatestSupported(builderapis []*api.Version, lifecycleapis []string) (*api.Version, error) { + var apis []*api.Version + // if a custom lifecycle image was used we need to take an intersection of its supported apis with the builder's supported apis. + // generally no custom lifecycle is used, which will be indicated by the lifecycleapis list being empty in the struct. + if len(lifecycleapis) > 0 { + lcapis := []*api.Version{} + for _, ver := range lifecycleapis { + v, err := api.NewVersion(ver) + if err != nil { + return nil, fmt.Errorf("unable to parse lifecycle api version %s (%v)", ver, err) + } + lcapis = append(lcapis, v) + } + apis = apiIntersection(lcapis, builderapis) + } else { + apis = builderapis + } + for i := len(SupportedPlatformAPIVersions) - 1; i >= 0; i-- { for _, version := range apis { if SupportedPlatformAPIVersions[i].Equal(version) { @@ -120,20 +165,37 @@ func (l *LifecycleExecution) PrevImageName() string { return l.opts.PreviousImage } +const maxNetworkRemoveRetries = 2 + func (l *LifecycleExecution) Run(ctx context.Context, phaseFactoryCreator PhaseFactoryCreator) error { phaseFactory := phaseFactoryCreator(l) + var buildCache Cache - if l.opts.CacheImage != "" { - cacheImage, err := name.ParseReference(l.opts.CacheImage, name.WeakValidation) + if l.opts.CacheImage != "" || (l.opts.Cache.Build.Format == cache.CacheImage) { + cacheImageName := l.opts.CacheImage + if cacheImageName == "" { + cacheImageName = l.opts.Cache.Build.Source + } + cacheImage, err := name.ParseReference(cacheImageName, name.WeakValidation) if err != nil { return fmt.Errorf("invalid cache image name: %s", err) } buildCache = cache.NewImageCache(cacheImage, l.docker) } else { - buildCache = cache.NewVolumeCache(l.opts.Image, "build", l.docker) + switch l.opts.Cache.Build.Format { + case cache.CacheVolume: + var err error + buildCache, err = cache.NewVolumeCache(l.opts.Image, l.opts.Cache.Build, "build", l.docker, l.logger) + if err != nil { + return err + } + l.logger.Debugf("Using build cache volume %s", style.Symbol(buildCache.Name())) + case cache.CacheBind: + buildCache = cache.NewBindCache(l.opts.Cache.Build, l.docker) + l.logger.Debugf("Using build cache dir %s", style.Symbol(buildCache.Name())) + } } - l.logger.Debugf("Using build cache volume %s", style.Symbol(buildCache.Name())) if l.opts.ClearCache { if err := buildCache.Clear(ctx); err != nil { return errors.Wrap(err, "clearing build cache") @@ -141,69 +203,174 @@ func (l *LifecycleExecution) Run(ctx context.Context, phaseFactoryCreator PhaseF l.logger.Debugf("Build cache %s cleared", style.Symbol(buildCache.Name())) } - launchCache := cache.NewVolumeCache(l.opts.Image, "launch", l.docker) + launchCache, err := cache.NewVolumeCache(l.opts.Image, l.opts.Cache.Launch, "launch", l.docker, l.logger) + if err != nil { + return err + } + + if l.opts.Network == "" { + // start an ephemeral bridge network + driver := "bridge" + if l.os == "windows" { + driver = "nat" + } + networkName := fmt.Sprintf("pack.local-network-%x", randString(10)) + result, err := l.docker.NetworkCreate(ctx, networkName, client.NetworkCreateOptions{ + Driver: driver, + }) + if err != nil { + return fmt.Errorf("failed to create ephemeral %s network: %w", driver, err) + } + defer func() { + for i := 0; i <= maxNetworkRemoveRetries; i++ { + time.Sleep(100 * time.Duration(i) * time.Millisecond) // wait if retrying + if _, err = l.docker.NetworkRemove(ctx, networkName, client.NetworkRemoveOptions{}); err != nil { + continue + } + break + } + }() + l.logger.Debugf("Created ephemeral bridge network %s with ID %s", networkName, result.ID) + for _, warning := range result.Warning { + l.logger.Warn(warning) + } + l.opts.Network = networkName + } if !l.opts.UseCreator { if l.platformAPI.LessThan("0.7") { l.logger.Info(style.Step("DETECTING")) - if err := l.Detect(ctx, l.opts.Network, l.opts.Volumes, phaseFactory); err != nil { + if err := l.Detect(ctx, phaseFactory); err != nil { return err } l.logger.Info(style.Step("ANALYZING")) - if err := l.Analyze(ctx, l.opts.Image.String(), l.opts.Network, l.opts.Publish, l.opts.DockerHost, l.opts.ClearCache, l.opts.RunImage, l.opts.AdditionalTags, buildCache, phaseFactory); err != nil { + if err := l.Analyze(ctx, buildCache, launchCache, phaseFactory); err != nil { return err } } else { l.logger.Info(style.Step("ANALYZING")) - if err := l.Analyze(ctx, l.opts.Image.String(), l.opts.Network, l.opts.Publish, l.opts.DockerHost, l.opts.ClearCache, l.opts.RunImage, l.opts.AdditionalTags, buildCache, phaseFactory); err != nil { + if err := l.Analyze(ctx, buildCache, launchCache, phaseFactory); err != nil { return err } l.logger.Info(style.Step("DETECTING")) - if err := l.Detect(ctx, l.opts.Network, l.opts.Volumes, phaseFactory); err != nil { + if err := l.Detect(ctx, phaseFactory); err != nil { return err } } + + var kanikoCache Cache + if l.PlatformAPI().AtLeast("0.12") { + // lifecycle 0.17.0 (introduces support for Platform API 0.12) and above will ensure that + // this volume is owned by the CNB user, + // and hence the restorer (after dropping privileges) will be able to write to it. + kanikoCache, err = cache.NewVolumeCache(l.opts.Image, l.opts.Cache.Kaniko, "kaniko", l.docker, l.logger) + if err != nil { + return err + } + } else { + switch { + case buildCache.Type() == cache.Volume: + // Re-use the build cache as the kaniko cache. Earlier versions of the lifecycle (0.16.x and below) + // already ensure this volume is owned by the CNB user. + kanikoCache = buildCache + case l.hasExtensionsForBuild(): + // We need a usable kaniko cache, so error in this case. + return fmt.Errorf("build cache must be volume cache when building with extensions") + default: + // The kaniko cache is unused, so it doesn't matter that it's not usable. + kanikoCache, err = cache.NewVolumeCache(l.opts.Image, l.opts.Cache.Kaniko, "kaniko", l.docker, l.logger) + if err != nil { + return err + } + } + } + + var ( + ephemeralRunImage string + err error + ) + if l.runImageChanged() || l.hasExtensionsForRun() { + // Pull the run image by name in case we fail to pull it by identifier later. + if ephemeralRunImage, err = l.opts.FetchRunImageWithLifecycleLayer(l.runImageNameAfterExtensions()); err != nil { + return err + } + } + l.logger.Info(style.Step("RESTORING")) - if l.opts.ClearCache { + if l.opts.ClearCache && l.PlatformAPI().LessThan("0.10") { l.logger.Info("Skipping 'restore' due to clearing cache") - } else if err := l.Restore(ctx, l.opts.Network, buildCache, phaseFactory); err != nil { + } else if err := l.Restore(ctx, buildCache, kanikoCache, phaseFactory); err != nil { return err } - l.logger.Info(style.Step("BUILDING")) + if l.runImageChanged() || l.hasExtensionsForRun() { + if newEphemeralRunImage, err := l.opts.FetchRunImageWithLifecycleLayer(l.runImageIdentifierAfterExtensions()); err == nil { + // If the run image was switched by extensions, the run image reference as written by the __restorer__ will be a digest reference + // that is pullable from a registry. + // However, if the run image is only extended (not switched), the run image reference as written by the __analyzer__ may be an image identifier + // (in the daemon case), and will not be pullable. + ephemeralRunImage = newEphemeralRunImage + } + } + + group, _ := errgroup.WithContext(context.TODO()) + if l.platformAPI.AtLeast("0.10") && l.hasExtensionsForBuild() { + group.Go(func() error { + l.logger.Info(style.Step("EXTENDING (BUILD)")) + return l.ExtendBuild(ctx, kanikoCache, phaseFactory, l.extensionsAreExperimental()) + }) + } else { + group.Go(func() error { + l.logger.Info(style.Step("BUILDING")) + return l.Build(ctx, phaseFactory) + }) + } + + if l.platformAPI.AtLeast("0.12") && l.hasExtensionsForRun() { + group.Go(func() error { + l.logger.Info(style.Step("EXTENDING (RUN)")) + return l.ExtendRun(ctx, kanikoCache, phaseFactory, ephemeralRunImage, l.extensionsAreExperimental()) + }) + } - if err := l.Build(ctx, l.opts.Network, l.opts.Volumes, phaseFactory); err != nil { + if err := group.Wait(); err != nil { return err } l.logger.Info(style.Step("EXPORTING")) - return l.Export(ctx, l.opts.Image.String(), l.opts.RunImage, l.opts.Publish, l.opts.DockerHost, l.opts.Network, buildCache, launchCache, l.opts.AdditionalTags, phaseFactory) + return l.Export(ctx, buildCache, launchCache, kanikoCache, phaseFactory) } - return l.Create(ctx, l.opts.Publish, l.opts.DockerHost, l.opts.ClearCache, l.opts.RunImage, l.opts.Image.String(), l.opts.Network, buildCache, launchCache, l.opts.AdditionalTags, l.opts.Volumes, phaseFactory) + if l.platformAPI.AtLeast("0.10") && l.hasExtensions() && !l.opts.UseCreatorWithExtensions { + return errors.New("builder has an order for extensions which is not supported when using the creator; re-run without '--trust-builder' or re-tag builder to avoid trusting it") + } + return l.Create(ctx, buildCache, launchCache, phaseFactory) } func (l *LifecycleExecution) Cleanup() error { var reterr error - if err := l.docker.VolumeRemove(context.Background(), l.layersVolume, true); err != nil { + if _, err := l.docker.VolumeRemove(context.Background(), l.layersVolume, client.VolumeRemoveOptions{Force: true}); err != nil { reterr = errors.Wrapf(err, "failed to clean up layers volume %s", l.layersVolume) } - if err := l.docker.VolumeRemove(context.Background(), l.appVolume, true); err != nil { + if _, err := l.docker.VolumeRemove(context.Background(), l.appVolume, client.VolumeRemoveOptions{Force: true}); err != nil { reterr = errors.Wrapf(err, "failed to clean up app volume %s", l.appVolume) } + if err := os.RemoveAll(l.tmpDir); err != nil { + reterr = errors.Wrapf(err, "failed to clean up working directory %s", l.tmpDir) + } return reterr } -func (l *LifecycleExecution) Create(ctx context.Context, publish bool, dockerHost string, clearCache bool, runImage, repoName, networkMode string, buildCache, launchCache Cache, additionalTags, volumes []string, phaseFactory PhaseFactory) error { +func (l *LifecycleExecution) Create(ctx context.Context, buildCache, launchCache Cache, phaseFactory PhaseFactory) error { flags := addTags([]string{ "-app", l.mountPaths.appDir(), "-cache-dir", l.mountPaths.cacheDir(), - "-run-image", runImage, - }, additionalTags) + "-run-image", l.opts.RunImage, + }, l.opts.AdditionalTags) - if clearCache { + if l.opts.ClearCache { flags = append(flags, "-skip-restore") } @@ -211,6 +378,16 @@ func (l *LifecycleExecution) Create(ctx context.Context, publish bool, dockerHos flags = append(flags, "-gid", strconv.Itoa(l.opts.GID)) } + if l.opts.UID >= overrideUID { + flags = append(flags, "-uid", strconv.Itoa(l.opts.UID)) + } + + if l.platformAPI.AtLeast("0.13") { + for _, reg := range l.opts.InsecureRegistries { + flags = append(flags, "-insecure-registry", reg) + } + } + if l.opts.PreviousImage != "" { if l.opts.Image == nil { return errors.New("image can't be nil") @@ -225,7 +402,7 @@ func (l *LifecycleExecution) Create(ctx context.Context, publish bool, dockerHos if err != nil { return fmt.Errorf("invalid previous image name: %s", err) } - if publish { + if l.opts.Publish { if image.Context().RegistryStr() != prevImage.Context().RegistryStr() { return fmt.Errorf(`when --publish is used, must be in the same image registry as image registry = %s @@ -241,32 +418,49 @@ func (l *LifecycleExecution) Create(ctx context.Context, publish bool, dockerHos flags = append(flags, "-process-type", processType) } - var cacheOpts PhaseConfigProviderOperation + var cacheBindOp PhaseConfigProviderOperation switch buildCache.Type() { case cache.Image: flags = append(flags, "-cache-image", buildCache.Name()) - cacheOpts = WithBinds(volumes...) - case cache.Volume: - cacheOpts = WithBinds(append(volumes, fmt.Sprintf("%s:%s", buildCache.Name(), l.mountPaths.cacheDir()))...) + cacheBindOp = WithBinds(l.opts.Volumes...) + case cache.Volume, cache.Bind: + cacheBindOp = WithBinds(append(l.opts.Volumes, fmt.Sprintf("%s:%s", buildCache.Name(), l.mountPaths.cacheDir()))...) + } + + withEnv := NullOp() + if l.opts.CreationTime != nil && l.platformAPI.AtLeast("0.9") { + withEnv = WithEnv(fmt.Sprintf("%s=%s", sourceDateEpochEnv, strconv.Itoa(int(l.opts.CreationTime.Unix())))) } opts := []PhaseConfigProviderOperation{ WithFlags(l.withLogLevel(flags...)...), - WithArgs(repoName), - WithNetwork(networkMode), - cacheOpts, + WithArgs(l.opts.Image.String()), + WithNetwork(l.opts.Network), + cacheBindOp, WithContainerOperations(WriteProjectMetadata(l.mountPaths.projectPath(), l.opts.ProjectMetadata, l.os)), WithContainerOperations(CopyDir(l.opts.AppPath, l.mountPaths.appDir(), l.opts.Builder.UID(), l.opts.Builder.GID(), l.os, true, l.opts.FileFilter)), If(l.opts.SBOMDestinationDir != "", WithPostContainerRunOperations( EnsureVolumeAccess(l.opts.Builder.UID(), l.opts.Builder.GID(), l.os, l.layersVolume, l.appVolume), CopyOutTo(l.mountPaths.sbomDir(), l.opts.SBOMDestinationDir))), + If(l.opts.ReportDestinationDir != "", WithPostContainerRunOperations( + EnsureVolumeAccess(l.opts.Builder.UID(), l.opts.Builder.GID(), l.os, l.layersVolume, l.appVolume), + CopyOutTo(l.mountPaths.reportPath(), l.opts.ReportDestinationDir))), If(l.opts.Interactive, WithPostContainerRunOperations( EnsureVolumeAccess(l.opts.Builder.UID(), l.opts.Builder.GID(), l.os, l.layersVolume, l.appVolume), CopyOut(l.opts.Termui.ReadLayers, l.mountPaths.layersDir(), l.mountPaths.appDir()))), + withEnv, + } + + if l.opts.Layout { + var err error + opts, err = l.appendLayoutOperations(opts) + if err != nil { + return err + } } - if publish { - authConfig, err := auth.BuildEnvVar(authn.DefaultKeychain, repoName) + if l.opts.Publish || l.opts.Layout { + authConfig, err := auth.BuildEnvVar(l.opts.Keychain, l.opts.Image.String(), l.opts.RunImage, l.opts.CacheImage, l.opts.PreviousImage) if err != nil { return err } @@ -274,7 +468,7 @@ func (l *LifecycleExecution) Create(ctx context.Context, publish bool, dockerHos opts = append(opts, WithRoot(), WithRegistryAccess(authConfig)) } else { opts = append(opts, - WithDaemonAccess(dockerHost), + WithDaemonAccess(l.opts.DockerHost), WithFlags("-daemon", "-launch-cache", l.mountPaths.launchCacheDir()), WithBinds(fmt.Sprintf("%s:%s", launchCache.Name(), l.mountPaths.launchCacheDir())), ) @@ -285,8 +479,14 @@ func (l *LifecycleExecution) Create(ctx context.Context, publish bool, dockerHos return create.Run(ctx) } -func (l *LifecycleExecution) Detect(ctx context.Context, networkMode string, volumes []string, phaseFactory PhaseFactory) error { +func (l *LifecycleExecution) Detect(ctx context.Context, phaseFactory PhaseFactory) error { flags := []string{"-app", l.mountPaths.appDir()} + + envOp := NullOp() + if l.platformAPI.AtLeast("0.10") && l.hasExtensions() { + envOp = If(l.extensionsAreExperimental(), WithEnv("CNB_EXPERIMENTAL_MODE=warn")) + } + configProvider := NewPhaseConfigProvider( "detector", l, @@ -294,13 +494,18 @@ func (l *LifecycleExecution) Detect(ctx context.Context, networkMode string, vol WithArgs( l.withLogLevel()..., ), - WithNetwork(networkMode), - WithBinds(volumes...), + WithNetwork(l.opts.Network), + WithBinds(l.opts.Volumes...), WithContainerOperations( EnsureVolumeAccess(l.opts.Builder.UID(), l.opts.Builder.GID(), l.os, l.layersVolume, l.appVolume), CopyDir(l.opts.AppPath, l.mountPaths.appDir(), l.opts.Builder.UID(), l.opts.Builder.GID(), l.os, true, l.opts.FileFilter), ), WithFlags(flags...), + If(l.hasExtensions(), WithPostContainerRunOperations( + CopyOutToMaybe(filepath.Join(l.mountPaths.layersDir(), "analyzed.toml"), l.tmpDir))), + If(l.hasExtensions(), WithPostContainerRunOperations( + CopyOutToMaybe(filepath.Join(l.mountPaths.layersDir(), "generated"), l.tmpDir))), + envOp, ) detect := phaseFactory.New(configProvider) @@ -308,19 +513,86 @@ func (l *LifecycleExecution) Detect(ctx context.Context, networkMode string, vol return detect.Run(ctx) } -func (l *LifecycleExecution) Restore(ctx context.Context, networkMode string, buildCache Cache, phaseFactory PhaseFactory) error { - flagsOpt := NullOp() - cacheOpt := NullOp() +func (l *LifecycleExecution) extensionsAreExperimental() bool { + return l.PlatformAPI().AtLeast("0.10") && l.platformAPI.LessThan("0.13") +} + +func (l *LifecycleExecution) Restore(ctx context.Context, buildCache Cache, kanikoCache Cache, phaseFactory PhaseFactory) error { + // build up flags and ops + var flags []string + if l.opts.ClearCache { + flags = append(flags, "-skip-layers") + } + var registryImages []string + + // for cache + cacheBindOp := NullOp() switch buildCache.Type() { case cache.Image: - flagsOpt = WithFlags("-cache-image", buildCache.Name()) + flags = append(flags, "-cache-image", buildCache.Name()) + registryImages = append(registryImages, buildCache.Name()) case cache.Volume: - cacheOpt = WithBinds(fmt.Sprintf("%s:%s", buildCache.Name(), l.mountPaths.cacheDir())) + flags = append(flags, "-cache-dir", l.mountPaths.cacheDir()) + cacheBindOp = WithBinds(fmt.Sprintf("%s:%s", buildCache.Name(), l.mountPaths.cacheDir())) } + + // for gid if l.opts.GID >= overrideGID { - flagsOpt = WithFlags("-gid", strconv.Itoa(l.opts.GID)) + flags = append(flags, "-gid", strconv.Itoa(l.opts.GID)) + } + + if l.opts.UID >= overrideUID { + flags = append(flags, "-uid", strconv.Itoa(l.opts.UID)) + } + + if l.platformAPI.AtLeast("0.13") { + for _, reg := range l.opts.InsecureRegistries { + flags = append(flags, "-insecure-registry", reg) + } } + // for kaniko + kanikoCacheBindOp := NullOp() + if (l.platformAPI.AtLeast("0.10") && l.hasExtensionsForBuild()) || + l.platformAPI.AtLeast("0.12") { + if l.hasExtensionsForBuild() { + flags = append(flags, "-build-image", l.opts.BuilderImage) + registryImages = append(registryImages, l.opts.BuilderImage) + } + if l.runImageChanged() || l.hasExtensionsForRun() { + registryImages = append(registryImages, l.runImageNameAfterExtensions()) + } + if l.hasExtensionsForBuild() || l.hasExtensionsForRun() { + kanikoCacheBindOp = WithBinds(fmt.Sprintf("%s:%s", kanikoCache.Name(), l.mountPaths.kanikoCacheDir())) + } + } + + // for auths + registryOp := NullOp() + if len(registryImages) > 0 { + authConfig, err := auth.BuildEnvVar(l.opts.Keychain, registryImages...) + if err != nil { + return err + } + registryOp = WithRegistryAccess(authConfig) + } + + // for export to OCI layout + layoutOp := NullOp() + layoutBindOp := NullOp() + if l.opts.Layout && l.platformAPI.AtLeast("0.12") { + layoutOp = withLayoutOperation() + layoutBindOp = WithBinds(l.opts.Volumes...) + } + + dockerOp := NullOp() + if !l.opts.Publish && !l.opts.Layout && l.platformAPI.AtLeast("0.12") { + dockerOp = WithDaemonAccess(l.opts.DockerHost) + flags = append(flags, "-daemon") + } + + flagsOp := WithFlags(flags...) + configProvider := NewPhaseConfigProvider( "restorer", l, @@ -329,13 +601,18 @@ func (l *LifecycleExecution) Restore(ctx context.Context, networkMode string, bu WithEnv(fmt.Sprintf("%s=%d", builder.EnvUID, l.opts.Builder.UID()), fmt.Sprintf("%s=%d", builder.EnvGID, l.opts.Builder.GID())), WithRoot(), // remove after platform API 0.2 is no longer supported WithArgs( - l.withLogLevel( - "-cache-dir", l.mountPaths.cacheDir(), - )..., + l.withLogLevel()..., ), - WithNetwork(networkMode), - flagsOpt, - cacheOpt, + WithNetwork(l.opts.Network), + cacheBindOp, + dockerOp, + flagsOp, + kanikoCacheBindOp, + registryOp, + layoutOp, + layoutBindOp, + If(l.hasExtensions(), WithPostContainerRunOperations( + CopyOutToMaybe(filepath.Join(l.mountPaths.layersDir(), "analyzed.toml"), l.tmpDir))), ) restore := phaseFactory.New(configProvider) @@ -343,62 +620,67 @@ func (l *LifecycleExecution) Restore(ctx context.Context, networkMode string, bu return restore.Run(ctx) } -func (l *LifecycleExecution) Analyze(ctx context.Context, repoName, networkMode string, publish bool, dockerHost string, clearCache bool, runImage string, additionalTags []string, cache Cache, phaseFactory PhaseFactory) error { - analyze, err := l.newAnalyze(repoName, networkMode, publish, dockerHost, clearCache, runImage, additionalTags, cache, phaseFactory) - if err != nil { - return err - } - defer analyze.Cleanup() - return analyze.Run(ctx) -} - -func (l *LifecycleExecution) newAnalyze(repoName, networkMode string, publish bool, dockerHost string, clearCache bool, runImage string, additionalTags []string, buildCache Cache, phaseFactory PhaseFactory) (RunnerCleaner, error) { - args := []string{ - repoName, - } +func (l *LifecycleExecution) Analyze(ctx context.Context, buildCache, launchCache Cache, phaseFactory PhaseFactory) error { + var flags []string + args := []string{l.opts.Image.String()} platformAPILessThan07 := l.platformAPI.LessThan("0.7") - if platformAPILessThan07 { - if clearCache { + + cacheBindOp := NullOp() + if l.opts.ClearCache { + if platformAPILessThan07 || l.platformAPI.AtLeast("0.9") { args = prependArg("-skip-layers", args) - } else { - args = append([]string{"-cache-dir", l.mountPaths.cacheDir()}, args...) + } + } else { + switch buildCache.Type() { + case cache.Image: + flags = append(flags, "-cache-image", buildCache.Name()) + case cache.Volume: + if platformAPILessThan07 { + args = append([]string{"-cache-dir", l.mountPaths.cacheDir()}, args...) + cacheBindOp = WithBinds(fmt.Sprintf("%s:%s", buildCache.Name(), l.mountPaths.cacheDir())) + } } } - cacheOpt := NullOp() - flagsOpt := NullOp() - switch buildCache.Type() { - case cache.Image: - if !clearCache { - flagsOpt = WithFlags("-cache-image", buildCache.Name()) - } - case cache.Volume: - if platformAPILessThan07 { - cacheOpt = WithBinds(fmt.Sprintf("%s:%s", buildCache.Name(), l.mountPaths.cacheDir())) + launchCacheBindOp := NullOp() + if l.platformAPI.AtLeast("0.9") { + if !l.opts.Publish { + args = append([]string{"-launch-cache", l.mountPaths.launchCacheDir()}, args...) + launchCacheBindOp = WithBinds(fmt.Sprintf("%s:%s", launchCache.Name(), l.mountPaths.launchCacheDir())) } } if l.opts.GID >= overrideGID { - flagsOpt = WithFlags("-gid", strconv.Itoa(l.opts.GID)) + flags = append(flags, "-gid", strconv.Itoa(l.opts.GID)) + } + + if l.opts.UID >= overrideUID { + flags = append(flags, "-uid", strconv.Itoa(l.opts.UID)) + } + + if l.platformAPI.AtLeast("0.13") { + for _, reg := range l.opts.InsecureRegistries { + flags = append(flags, "-insecure-registry", reg) + } } if l.opts.PreviousImage != "" { if l.opts.Image == nil { - return nil, errors.New("image can't be nil") + return errors.New("image can't be nil") } image, err := name.ParseReference(l.opts.Image.Name(), name.WeakValidation) if err != nil { - return nil, fmt.Errorf("invalid image name: %s", err) + return fmt.Errorf("invalid image name: %s", err) } prevImage, err := name.ParseReference(l.opts.PreviousImage, name.WeakValidation) if err != nil { - return nil, fmt.Errorf("invalid previous image name: %s", err) + return fmt.Errorf("invalid previous image name: %s", err) } - if publish { + if l.opts.Publish { if image.Context().RegistryStr() != prevImage.Context().RegistryStr() { - return nil, fmt.Errorf(`when --publish is used, must be in the same image registry as + return fmt.Errorf(`when --publish is used, must be in the same image registry as image registry = %s previous-image registry = %s`, image.Context().RegistryStr(), prevImage.Context().RegistryStr()) } @@ -409,22 +691,37 @@ func (l *LifecycleExecution) newAnalyze(repoName, networkMode string, publish bo args = append([]string{"-previous-image", l.opts.PreviousImage}, args...) } } - stackOpts := NullOp() + + stackOp := NullOp() + runOp := NullOp() if !platformAPILessThan07 { - for _, tag := range additionalTags { + for _, tag := range l.opts.AdditionalTags { args = append([]string{"-tag", tag}, args...) } - if runImage != "" { - args = append([]string{"-run-image", runImage}, args...) + if l.opts.RunImage != "" { + args = append([]string{"-run-image", l.opts.RunImage}, args...) } - args = append([]string{"-stack", l.mountPaths.stackPath()}, args...) - stackOpts = WithContainerOperations(WriteStackToml(l.mountPaths.stackPath(), l.opts.Builder.Stack(), l.os)) + if l.platformAPI.LessThan("0.12") { + args = append([]string{"-stack", l.mountPaths.stackPath()}, args...) + stackOp = WithContainerOperations(WriteStackToml(l.mountPaths.stackPath(), l.opts.Builder.Stack(), l.os)) + } else { + args = append([]string{"-run", l.mountPaths.runPath()}, args...) + runOp = WithContainerOperations(WriteRunToml(l.mountPaths.runPath(), l.opts.Builder.RunImages(), l.os)) + } + } + + layoutOp := NullOp() + if l.opts.Layout && l.platformAPI.AtLeast("0.12") { + layoutOp = withLayoutOperation() } - if publish { - authConfig, err := auth.BuildEnvVar(authn.DefaultKeychain, repoName) + flagsOp := WithFlags(flags...) + + var analyze RunnerCleaner + if l.opts.Publish || l.opts.Layout { + authConfig, err := auth.BuildEnvVar(l.opts.Keychain, l.opts.Image.String(), l.opts.RunImage, l.opts.CacheImage, l.opts.PreviousImage) if err != nil { - return nil, err + return err } configProvider := NewPhaseConfigProvider( @@ -436,52 +733,52 @@ func (l *LifecycleExecution) newAnalyze(repoName, networkMode string, publish bo WithRegistryAccess(authConfig), WithRoot(), WithArgs(l.withLogLevel(args...)...), - WithNetwork(networkMode), - flagsOpt, - cacheOpt, - stackOpts, + WithNetwork(l.opts.Network), + flagsOp, + cacheBindOp, + stackOp, + runOp, + layoutOp, ) - return phaseFactory.New(configProvider), nil - } + analyze = phaseFactory.New(configProvider) + } else { + configProvider := NewPhaseConfigProvider( + "analyzer", + l, + WithLogPrefix("analyzer"), + WithImage(l.opts.LifecycleImage), + WithEnv( + fmt.Sprintf("%s=%d", builder.EnvUID, l.opts.Builder.UID()), + fmt.Sprintf("%s=%d", builder.EnvGID, l.opts.Builder.GID()), + ), + WithDaemonAccess(l.opts.DockerHost), + launchCacheBindOp, + WithFlags(l.withLogLevel("-daemon")...), + WithArgs(args...), + flagsOp, + WithNetwork(l.opts.Network), + cacheBindOp, + stackOp, + runOp, + ) - // TODO: when platform API 0.2 is no longer supported we can delete this code: https://github.com/buildpacks/pack/issues/629. - configProvider := NewPhaseConfigProvider( - "analyzer", - l, - WithLogPrefix("analyzer"), - WithImage(l.opts.LifecycleImage), - WithEnv( - fmt.Sprintf("%s=%d", builder.EnvUID, l.opts.Builder.UID()), - fmt.Sprintf("%s=%d", builder.EnvGID, l.opts.Builder.GID()), - ), - WithDaemonAccess(dockerHost), - WithArgs( - l.withLogLevel( - prependArg( - "-daemon", - args, - )..., - )..., - ), - flagsOpt, - WithNetwork(networkMode), - cacheOpt, - stackOpts, - ) + analyze = phaseFactory.New(configProvider) + } - return phaseFactory.New(configProvider), nil + defer analyze.Cleanup() + return analyze.Run(ctx) } -func (l *LifecycleExecution) Build(ctx context.Context, networkMode string, volumes []string, phaseFactory PhaseFactory) error { +func (l *LifecycleExecution) Build(ctx context.Context, phaseFactory PhaseFactory) error { flags := []string{"-app", l.mountPaths.appDir()} configProvider := NewPhaseConfigProvider( "builder", l, WithLogPrefix("builder"), WithArgs(l.withLogLevel()...), - WithNetwork(networkMode), - WithBinds(volumes...), + WithNetwork(l.opts.Network), + WithBinds(l.opts.Volumes...), WithFlags(flags...), ) @@ -490,6 +787,49 @@ func (l *LifecycleExecution) Build(ctx context.Context, networkMode string, volu return build.Run(ctx) } +func (l *LifecycleExecution) ExtendBuild(ctx context.Context, kanikoCache Cache, phaseFactory PhaseFactory, experimental bool) error { + flags := []string{"-app", l.mountPaths.appDir()} + + configProvider := NewPhaseConfigProvider( + "extender", + l, + WithLogPrefix("extender (build)"), + WithArgs(l.withLogLevel()...), + WithBinds(l.opts.Volumes...), + If(experimental, WithEnv("CNB_EXPERIMENTAL_MODE=warn")), + WithFlags(flags...), + WithNetwork(l.opts.Network), + WithRoot(), + WithBinds(fmt.Sprintf("%s:%s", kanikoCache.Name(), l.mountPaths.kanikoCacheDir())), + ) + + extend := phaseFactory.New(configProvider) + defer extend.Cleanup() + return extend.Run(ctx) +} + +func (l *LifecycleExecution) ExtendRun(ctx context.Context, kanikoCache Cache, phaseFactory PhaseFactory, runImageName string, experimental bool) error { + flags := []string{"-app", l.mountPaths.appDir(), "-kind", "run"} + + configProvider := NewPhaseConfigProvider( + "extender", + l, + WithLogPrefix("extender (run)"), + WithArgs(l.withLogLevel()...), + WithBinds(l.opts.Volumes...), + If(experimental, WithEnv("CNB_EXPERIMENTAL_MODE=warn")), + WithFlags(flags...), + WithNetwork(l.opts.Network), + WithRoot(), + WithImage(runImageName), + WithBinds(fmt.Sprintf("%s:%s", kanikoCache.Name(), l.mountPaths.kanikoCacheDir())), + ) + + extend := phaseFactory.New(configProvider) + defer extend.Cleanup() + return extend.Run(ctx) +} + func determineDefaultProcessType(platformAPI *api.Version, providedValue string) string { shouldSetForceDefault := platformAPI.Compare(api.MustParse("0.4")) >= 0 && platformAPI.Compare(api.MustParse("0.6")) < 0 @@ -500,17 +840,26 @@ func determineDefaultProcessType(platformAPI *api.Version, providedValue string) return providedValue } -func (l *LifecycleExecution) newExport(repoName, runImage string, publish bool, dockerHost, networkMode string, buildCache, launchCache Cache, additionalTags []string, phaseFactory PhaseFactory) (RunnerCleaner, error) { +func (l *LifecycleExecution) Export(ctx context.Context, buildCache, launchCache, kanikoCache Cache, phaseFactory PhaseFactory) error { flags := []string{ "-app", l.mountPaths.appDir(), "-cache-dir", l.mountPaths.cacheDir(), - "-stack", l.mountPaths.stackPath(), + } + + expEnv := NullOp() + kanikoCacheBindOp := NullOp() + if l.platformAPI.LessThan("0.12") { + flags = append(flags, "-stack", l.mountPaths.stackPath()) + } else { + flags = append(flags, "-run", l.mountPaths.runPath()) + if l.hasExtensionsForRun() { + expEnv = If(l.extensionsAreExperimental(), WithEnv("CNB_EXPERIMENTAL_MODE=warn")) + kanikoCacheBindOp = WithBinds(fmt.Sprintf("%s:%s", kanikoCache.Name(), l.mountPaths.kanikoCacheDir())) + } } if l.platformAPI.LessThan("0.7") { - flags = append(flags, - "-run-image", runImage, - ) + flags = append(flags, "-run-image", l.opts.RunImage) } processType := determineDefaultProcessType(l.platformAPI, l.opts.DefaultProcessType) if processType != "" { @@ -520,12 +869,27 @@ func (l *LifecycleExecution) newExport(repoName, runImage string, publish bool, flags = append(flags, "-gid", strconv.Itoa(l.opts.GID)) } - cacheOpt := NullOp() + if l.opts.UID >= overrideUID { + flags = append(flags, "-uid", strconv.Itoa(l.opts.UID)) + } + + if l.platformAPI.AtLeast("0.13") { + for _, reg := range l.opts.InsecureRegistries { + flags = append(flags, "-insecure-registry", reg) + } + } + + cacheBindOp := NullOp() switch buildCache.Type() { case cache.Image: flags = append(flags, "-cache-image", buildCache.Name()) case cache.Volume: - cacheOpt = WithBinds(fmt.Sprintf("%s:%s", buildCache.Name(), l.mountPaths.cacheDir())) + cacheBindOp = WithBinds(fmt.Sprintf("%s:%s", buildCache.Name(), l.mountPaths.cacheDir())) + } + + epochEnv := NullOp() + if l.opts.CreationTime != nil && l.platformAPI.AtLeast("0.9") { + epochEnv = WithEnv(fmt.Sprintf("%s=%s", sourceDateEpochEnv, strconv.Itoa(int(l.opts.CreationTime.Unix())))) } opts := []PhaseConfigProviderOperation{ @@ -538,24 +902,41 @@ func (l *LifecycleExecution) newExport(repoName, runImage string, publish bool, WithFlags( l.withLogLevel(flags...)..., ), - WithArgs(append([]string{repoName}, additionalTags...)...), + WithArgs(append([]string{l.opts.Image.String()}, l.opts.AdditionalTags...)...), WithRoot(), - WithNetwork(networkMode), - cacheOpt, + WithNetwork(l.opts.Network), + cacheBindOp, + kanikoCacheBindOp, WithContainerOperations(WriteStackToml(l.mountPaths.stackPath(), l.opts.Builder.Stack(), l.os)), + WithContainerOperations(WriteRunToml(l.mountPaths.runPath(), l.opts.Builder.RunImages(), l.os)), WithContainerOperations(WriteProjectMetadata(l.mountPaths.projectPath(), l.opts.ProjectMetadata, l.os)), If(l.opts.SBOMDestinationDir != "", WithPostContainerRunOperations( EnsureVolumeAccess(l.opts.Builder.UID(), l.opts.Builder.GID(), l.os, l.layersVolume, l.appVolume), CopyOutTo(l.mountPaths.sbomDir(), l.opts.SBOMDestinationDir))), + If(l.opts.ReportDestinationDir != "", WithPostContainerRunOperations( + EnsureVolumeAccess(l.opts.Builder.UID(), l.opts.Builder.GID(), l.os, l.layersVolume, l.appVolume), + CopyOutTo(l.mountPaths.reportPath(), l.opts.ReportDestinationDir))), If(l.opts.Interactive, WithPostContainerRunOperations( EnsureVolumeAccess(l.opts.Builder.UID(), l.opts.Builder.GID(), l.os, l.layersVolume, l.appVolume), CopyOut(l.opts.Termui.ReadLayers, l.mountPaths.layersDir(), l.mountPaths.appDir()))), + epochEnv, + expEnv, } - if publish { - authConfig, err := auth.BuildEnvVar(authn.DefaultKeychain, repoName, runImage) + if l.opts.Layout && l.platformAPI.AtLeast("0.12") { + var err error + opts, err = l.appendLayoutOperations(opts) if err != nil { - return nil, err + return err + } + opts = append(opts, WithBinds(l.opts.Volumes...)) + } + + var export RunnerCleaner + if l.opts.Publish || l.opts.Layout { + authConfig, err := auth.BuildEnvVar(l.opts.Keychain, l.opts.Image.String(), l.opts.RunImage, l.opts.CacheImage, l.opts.PreviousImage) + if err != nil { + return err } opts = append( @@ -563,23 +944,17 @@ func (l *LifecycleExecution) newExport(repoName, runImage string, publish bool, WithRegistryAccess(authConfig), WithRoot(), ) + export = phaseFactory.New(NewPhaseConfigProvider("exporter", l, opts...)) } else { opts = append( opts, - WithDaemonAccess(dockerHost), + WithDaemonAccess(l.opts.DockerHost), WithFlags("-daemon", "-launch-cache", l.mountPaths.launchCacheDir()), WithBinds(fmt.Sprintf("%s:%s", launchCache.Name(), l.mountPaths.launchCacheDir())), ) + export = phaseFactory.New(NewPhaseConfigProvider("exporter", l, opts...)) } - return phaseFactory.New(NewPhaseConfigProvider("exporter", l, opts...)), nil -} - -func (l *LifecycleExecution) Export(ctx context.Context, repoName, runImage string, publish bool, dockerHost, networkMode string, buildCache, launchCache Cache, additionalTags []string, phaseFactory PhaseFactory) error { - export, err := l.newExport(repoName, runImage, publish, dockerHost, networkMode, buildCache, launchCache, additionalTags, phaseFactory) - if err != nil { - return err - } defer export.Cleanup() return export.Run(ctx) } @@ -591,6 +966,101 @@ func (l *LifecycleExecution) withLogLevel(args ...string) []string { return args } +func (l *LifecycleExecution) hasExtensions() bool { + return len(l.opts.Builder.OrderExtensions()) > 0 +} + +func (l *LifecycleExecution) hasExtensionsForBuild() bool { + if !l.hasExtensions() { + return false + } + generatedDir := filepath.Join(l.tmpDir, "generated") + fis, err := os.ReadDir(filepath.Join(generatedDir, "build")) + if err == nil && len(fis) > 0 { + // on older platforms, we need to find a file such as /generated/build//Dockerfile + // on newer platforms, /generated/build doesn't exist + return true + } + // on newer platforms, we need to find a file such as /generated//build.Dockerfile + fis, err = os.ReadDir(generatedDir) + if err != nil { + l.logger.Warnf("failed to read generated directory, assuming no build image extensions: %s", err) + return false + } + for _, fi := range fis { + if _, err := os.Stat(filepath.Join(generatedDir, fi.Name(), "build.Dockerfile")); err == nil { + return true + } + } + return false +} + +func (l *LifecycleExecution) hasExtensionsForRun() bool { + if !l.hasExtensions() { + return false + } + var amd files.Analyzed + if _, err := toml.DecodeFile(filepath.Join(l.tmpDir, "analyzed.toml"), &amd); err != nil { + l.logger.Warnf("failed to parse analyzed.toml file, assuming no run image extensions: %s", err) + return false + } + if amd.RunImage == nil { + // this shouldn't be reachable + l.logger.Warnf("found no run image in analyzed.toml file, assuming no run image extensions...") + return false + } + return amd.RunImage.Extend +} + +func (l *LifecycleExecution) runImageIdentifierAfterExtensions() string { + if !l.hasExtensions() { + return l.opts.RunImage + } + var amd files.Analyzed + if _, err := toml.DecodeFile(filepath.Join(l.tmpDir, "analyzed.toml"), &amd); err != nil { + l.logger.Warnf("failed to parse analyzed.toml file, assuming run image identifier did not change: %s", err) + return l.opts.RunImage + } + if amd.RunImage == nil || amd.RunImage.Reference == "" { + // this shouldn't be reachable + l.logger.Warnf("found no run image in analyzed.toml file, assuming run image identifier did not change...") + return l.opts.RunImage + } + return amd.RunImage.Reference +} + +func (l *LifecycleExecution) runImageNameAfterExtensions() string { + if !l.hasExtensions() { + return l.opts.RunImage + } + var amd files.Analyzed + if _, err := toml.DecodeFile(filepath.Join(l.tmpDir, "analyzed.toml"), &amd); err != nil { + l.logger.Warnf("failed to parse analyzed.toml file, assuming run image name did not change: %s", err) + return l.opts.RunImage + } + if amd.RunImage == nil || amd.RunImage.Image == "" { + // this shouldn't be reachable + l.logger.Warnf("found no run image in analyzed.toml file, assuming run image name did not change...") + return l.opts.RunImage + } + return amd.RunImage.Image +} + +func (l *LifecycleExecution) runImageChanged() bool { + currentRunImage := l.runImageNameAfterExtensions() + return currentRunImage != "" && currentRunImage != l.opts.RunImage +} + +func (l *LifecycleExecution) appendLayoutOperations(opts []PhaseConfigProviderOperation) ([]PhaseConfigProviderOperation, error) { + opts = append(opts, withLayoutOperation()) + return opts, nil +} + +func withLayoutOperation() PhaseConfigProviderOperation { + layoutDir := filepath.Join(paths.RootDir, "layout-repo") + return WithEnv("CNB_USE_LAYOUT=true", "CNB_LAYOUT_DIR="+layoutDir, "CNB_EXPERIMENTAL_MODE=warn") +} + func prependArg(arg string, args []string) []string { return append([]string{arg}, args...) } diff --git a/internal/build/lifecycle_execution_test.go b/internal/build/lifecycle_execution_test.go index d1ad3f8f9a..95ae5931e8 100644 --- a/internal/build/lifecycle_execution_test.go +++ b/internal/build/lifecycle_execution_test.go @@ -5,35 +5,37 @@ import ( "context" "fmt" "io" - "io/ioutil" - "math/rand" "os" "path/filepath" + "strconv" "testing" "time" - "github.com/buildpacks/pack/internal/cache" - - "github.com/google/go-containerregistry/pkg/name" - + "github.com/BurntSushi/toml" "github.com/apex/log" + ifakes "github.com/buildpacks/imgutil/fakes" "github.com/buildpacks/lifecycle/api" - "github.com/docker/docker/api/types/container" - "github.com/docker/docker/client" + "github.com/buildpacks/lifecycle/platform/files" + "github.com/google/go-containerregistry/pkg/authn" + "github.com/google/go-containerregistry/pkg/name" "github.com/heroku/color" + "github.com/moby/moby/api/types/container" + "github.com/moby/moby/api/types/network" + "github.com/moby/moby/client" "github.com/sclevine/spec" "github.com/sclevine/spec/report" "github.com/buildpacks/pack/internal/build" "github.com/buildpacks/pack/internal/build/fakes" + "github.com/buildpacks/pack/internal/paths" + "github.com/buildpacks/pack/pkg/cache" + "github.com/buildpacks/pack/pkg/dist" "github.com/buildpacks/pack/pkg/logging" h "github.com/buildpacks/pack/testhelpers" ) // TestLifecycleExecution are unit tests that test each possible phase to ensure they are executed with the proper parameters func TestLifecycleExecution(t *testing.T) { - rand.Seed(time.Now().UTC().UnixNano()) - color.Disable(true) defer color.Disable(false) @@ -41,26 +43,150 @@ func TestLifecycleExecution(t *testing.T) { } func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { - // Avoid contaminating tests with existing docker configuration. - // GGCR resolves the default keychain by inspecting DOCKER_CONFIG - this is used by the Analyze step - // when constructing the auth config (see `auth.BuildEnvVar` in phases.go). - var dockerConfigDir string + var ( + dockerConfigDir string + tmpDir string + + // lifecycle options + providedClearCache bool + providedPublish bool + providedUseCreator bool + providedLayout bool + providedDockerHost string + providedNetworkMode = "some-network-mode" + providedRunImage = "some-run-image" + providedTargetImage = "some-target-image" + providedAdditionalTags = []string{"some-additional-tag1", "some-additional-tag2"} + providedVolumes = []string{"some-mount-source:/some-mount-target"} + + // builder options + providedBuilderImage = "some-registry.com/some-namespace/some-builder-name" + withOS = "linux" + platformAPI = build.SupportedPlatformAPIVersions[0] // TODO: update the tests to target the latest api by default and make earlier apis special cases + providedUID = 2222 + providedGID = 3333 + providedOrderExt dist.Order + + lifecycle *build.LifecycleExecution + fakeBuildCache = newFakeVolumeCache() + fakeLaunchCache *fakes.FakeCache + fakeKanikoCache *fakes.FakeCache + fakePhase *fakes.FakePhase + fakePhaseFactory *fakes.FakePhaseFactory + fakeFetcher fakeImageFetcher + configProvider *build.PhaseConfigProvider + + extensionsForBuild, extensionsForRun bool + extensionsRunImageName string + extensionsRunImageIdentifier string + useCreatorWithExtensions bool + ) + + var configureDefaultTestLifecycle = func(opts *build.LifecycleOptions) { + opts.AdditionalTags = providedAdditionalTags + opts.BuilderImage = providedBuilderImage + opts.ClearCache = providedClearCache + opts.DockerHost = providedDockerHost + opts.Network = providedNetworkMode + opts.Publish = providedPublish + opts.RunImage = providedRunImage + opts.UseCreator = providedUseCreator + opts.Volumes = providedVolumes + opts.Layout = providedLayout + opts.Keychain = authn.DefaultKeychain + opts.UseCreatorWithExtensions = useCreatorWithExtensions + + targetImageRef, err := name.ParseReference(providedTargetImage) + h.AssertNil(t, err) + opts.Image = targetImageRef + } + + var lifecycleOps = []func(*build.LifecycleOptions){configureDefaultTestLifecycle} + it.Before(func() { + // Avoid contaminating tests with existing docker configuration. + // GGCR resolves the default keychain by inspecting DOCKER_CONFIG - this is used by the Analyze step + // when constructing the auth config (see `auth.BuildEnvVar` in phases.go). var err error - dockerConfigDir, err = ioutil.TempDir("", "empty-docker-config-dir") + dockerConfigDir, err = os.MkdirTemp("", "empty-docker-config-dir") h.AssertNil(t, err) - h.AssertNil(t, os.Setenv("DOCKER_CONFIG", dockerConfigDir)) + + image := ifakes.NewImage("some-image", "", nil) + h.AssertNil(t, image.SetOS(withOS)) + + fakeBuilder, err := fakes.NewFakeBuilder( + fakes.WithSupportedPlatformAPIs([]*api.Version{platformAPI}), + fakes.WithUID(providedUID), + fakes.WithGID(providedGID), + fakes.WithOrderExtensions(providedOrderExt), + fakes.WithImage(image), + ) + h.AssertNil(t, err) + fakeFetcher = fakeImageFetcher{ + callCount: 0, + calledWithArgAtCall: make(map[int]string), + } + withFakeFetchRunImageFunc := func(opts *build.LifecycleOptions) { + opts.FetchRunImageWithLifecycleLayer = newFakeFetchRunImageFunc(&fakeFetcher) + } + lifecycleOps = append(lifecycleOps, fakes.WithBuilder(fakeBuilder), withFakeFetchRunImageFunc) + + tmpDir, err = os.MkdirTemp("", "pack.unit") + h.AssertNil(t, err) + lifecycle = newTestLifecycleExec(t, true, tmpDir, lifecycleOps...) + + // construct fixtures for extensions + if extensionsForBuild { + if platformAPI.LessThan("0.13") { + err = os.MkdirAll(filepath.Join(tmpDir, "generated", "build", "some-buildpack-id"), 0755) + h.AssertNil(t, err) + } else { + err = os.MkdirAll(filepath.Join(tmpDir, "generated", "some-buildpack-id"), 0755) + h.AssertNil(t, err) + _, err = os.Create(filepath.Join(tmpDir, "generated", "some-buildpack-id", "build.Dockerfile")) + h.AssertNil(t, err) + } + } + amd := files.Analyzed{RunImage: &files.RunImage{ + Extend: false, + Image: "", + }} + if extensionsForRun { + amd.RunImage.Extend = true + } + if extensionsRunImageName != "" { + amd.RunImage.Image = extensionsRunImageName + } + if extensionsRunImageIdentifier != "" { + amd.RunImage.Reference = extensionsRunImageIdentifier + } + f, err := os.Create(filepath.Join(tmpDir, "analyzed.toml")) + h.AssertNil(t, err) + toml.NewEncoder(f).Encode(amd) + h.AssertNil(t, f.Close()) + + fakeLaunchCache = fakes.NewFakeCache() + fakeLaunchCache.ReturnForType = cache.Volume + fakeLaunchCache.ReturnForName = "some-launch-cache" + + fakeKanikoCache = fakes.NewFakeCache() + fakeKanikoCache.ReturnForType = cache.Volume + fakeKanikoCache.ReturnForName = "some-kaniko-cache" + + fakePhase = &fakes.FakePhase{} + fakePhaseFactory = fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase)) }) it.After(func() { h.AssertNil(t, os.Unsetenv("DOCKER_CONFIG")) h.AssertNil(t, os.RemoveAll(dockerConfigDir)) + _ = os.RemoveAll(tmpDir) }) when("#NewLifecycleExecution", func() { when("lifecycle supports multiple platform APIs", func() { - it("select the latest supported version", func() { + it("selects the latest supported version", func() { fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{ api.MustParse("0.2"), api.MustParse("0.3"), @@ -72,20 +198,20 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { })) h.AssertNil(t, err) - lifecycleExec := newTestLifecycleExec(t, false, fakes.WithBuilder(fakeBuilder)) + lifecycleExec := newTestLifecycleExec(t, false, "some-temp-dir", fakes.WithBuilder(fakeBuilder)) h.AssertEq(t, lifecycleExec.PlatformAPI().String(), "0.8") }) }) when("supported platform API is deprecated", func() { - it("select the deprecated version", func() { + it("selects the deprecated version", func() { fakeBuilder, err := fakes.NewFakeBuilder( fakes.WithDeprecatedPlatformAPIs([]*api.Version{api.MustParse("0.4")}), fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("1.2")}), ) h.AssertNil(t, err) - lifecycleExec := newTestLifecycleExec(t, false, fakes.WithBuilder(fakeBuilder)) + lifecycleExec := newTestLifecycleExec(t, false, "some-temp-dir", fakes.WithBuilder(fakeBuilder)) h.AssertEq(t, lifecycleExec.PlatformAPI().String(), "0.4") }) }) @@ -97,21 +223,61 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { ) h.AssertNil(t, err) - _, err = newTestLifecycleExecErr(t, false, fakes.WithBuilder(fakeBuilder)) + _, err = newTestLifecycleExecErr(t, false, "some-temp-dir", fakes.WithBuilder(fakeBuilder)) h.AssertError(t, err, "unable to find a supported Platform API version") }) }) }) + when("FindLatestSupported", func() { + it("chooses a shared version", func() { + version, err := build.FindLatestSupported([]*api.Version{api.MustParse("0.6"), api.MustParse("0.7"), api.MustParse("0.8")}, []string{"0.7"}) + h.AssertNil(t, err) + h.AssertEq(t, version, api.MustParse("0.7")) + }) + + it("chooses a shared version, highest builder supported version", func() { + version, err := build.FindLatestSupported([]*api.Version{api.MustParse("0.4"), api.MustParse("0.5"), api.MustParse("0.7")}, []string{"0.7", "0.8"}) + h.AssertNil(t, err) + h.AssertEq(t, version, api.MustParse("0.7")) + }) + + it("chooses a shared version, lowest builder supported version", func() { + version, err := build.FindLatestSupported([]*api.Version{api.MustParse("0.4"), api.MustParse("0.5"), api.MustParse("0.7")}, []string{"0.1", "0.2", "0.4"}) + h.AssertNil(t, err) + h.AssertEq(t, version, api.MustParse("0.4")) + }) + + it("Interprets empty lifecycle versions list as lack of constraints", func() { + version, err := build.FindLatestSupported([]*api.Version{api.MustParse("0.6"), api.MustParse("0.7")}, []string{}) + h.AssertNil(t, err) + h.AssertEq(t, version, api.MustParse("0.7")) + }) + + it("errors with no shared version, builder has no versions supported for some reason", func() { + _, err := build.FindLatestSupported([]*api.Version{}, []string{"0.7"}) + h.AssertNotNil(t, err) + }) + + it("errors with no shared version, builder less than lifecycle", func() { + _, err := build.FindLatestSupported([]*api.Version{api.MustParse("0.4"), api.MustParse("0.5")}, []string{"0.7", "0.8"}) + h.AssertNotNil(t, err) + }) + + it("errors with no shared version, builder greater than lifecycle", func() { + _, err := build.FindLatestSupported([]*api.Version{api.MustParse("0.8"), api.MustParse("0.9")}, []string{"0.6", "0.7"}) + h.AssertNotNil(t, err) + }) + }) + when("Run", func() { var ( - imageName name.Tag - fakeBuilder *fakes.FakeBuilder - outBuf bytes.Buffer - logger *logging.LogWithWriters - docker *client.Client - fakePhaseFactory *fakes.FakePhaseFactory - fakeTermui *fakes.FakeTermui + imageName name.Tag + fakeBuilder *fakes.FakeBuilder + outBuf bytes.Buffer + logger *logging.LogWithWriters + docker *fakeDockerClient + fakeTermui *fakes.FakeTermui ) it.Before(func() { @@ -124,7 +290,7 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { fakeBuilder, err = fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.3")})) h.AssertNil(t, err) logger = logging.NewLogWithWriters(&outBuf, &outBuf) - docker, err = client.NewClientWithOpts(client.FromEnv, client.WithVersion("1.38")) + docker = &fakeDockerClient{} h.AssertNil(t, err) fakePhaseFactory = fakes.NewFakePhaseFactory() }) @@ -142,7 +308,7 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { Termui: fakeTermui, } - lifecycle, err := build.NewLifecycleExecution(logger, docker, opts) + lifecycle, err := build.NewLifecycleExecution(logger, docker, "some-temp-dir", opts) h.AssertNil(t, err) h.AssertEq(t, filepath.Base(lifecycle.AppDir()), "workspace") @@ -159,6 +325,7 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { } } }) + when("Run with workspace dir", func() { it("succeeds", func() { opts := build.LifecycleOptions{ @@ -173,7 +340,7 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { Termui: fakeTermui, } - lifecycle, err := build.NewLifecycleExecution(logger, docker, opts) + lifecycle, err := build.NewLifecycleExecution(logger, docker, "some-temp-dir", opts) h.AssertNil(t, err) err = lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory { @@ -191,7 +358,62 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { } }) }) + + when("there are extensions", func() { + providedUseCreator = true + providedOrderExt = dist.Order{dist.OrderEntry{Group: []dist.ModuleRef{ /* don't care */ }}} + + when("platform < 0.10", func() { + platformAPI = api.MustParse("0.9") + + it("succeeds", func() { + err := lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory { + return fakePhaseFactory + }) + h.AssertNil(t, err) + + h.AssertEq(t, len(fakePhaseFactory.NewCalledWithProvider), 1) + + for _, entry := range fakePhaseFactory.NewCalledWithProvider { + if entry.Name() == "creator" { + h.AssertSliceContains(t, entry.ContainerConfig().Cmd, providedTargetImage) + } + } + }) + }) + + when("platform >= 0.10", func() { + platformAPI = api.MustParse("0.10") + + it("errors", func() { + err := lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory { + return fakePhaseFactory + }) + h.AssertNotNil(t, err) + }) + + when("use creator with extensions supported by the lifecycle", func() { + useCreatorWithExtensions = true + + it("allows the build to proceed (but the creator will error if extensions are detected)", func() { + err := lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory { + return fakePhaseFactory + }) + h.AssertNil(t, err) + + h.AssertEq(t, len(fakePhaseFactory.NewCalledWithProvider), 1) + + for _, entry := range fakePhaseFactory.NewCalledWithProvider { + if entry.Name() == "creator" { + h.AssertSliceContains(t, entry.ContainerConfig().Cmd, providedTargetImage) + } + } + }) + }) + }) + }) }) + when("Run without using creator", func() { when("platform < 0.7", func() { it("calls the phases with the right order", func() { @@ -206,7 +428,7 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { Termui: fakeTermui, } - lifecycle, err := build.NewLifecycleExecution(logger, docker, opts) + lifecycle, err := build.NewLifecycleExecution(logger, docker, "some-temp-dir", opts) h.AssertNil(t, err) err = lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory { @@ -240,7 +462,7 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { Termui: fakeTermui, } - lifecycle, err := build.NewLifecycleExecution(logger, docker, opts) + lifecycle, err := build.NewLifecycleExecution(logger, docker, "some-temp-dir", opts) h.AssertNil(t, err) err = lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory { @@ -270,7 +492,7 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { Termui: fakeTermui, } - lifecycle, err := build.NewLifecycleExecution(logger, docker, opts) + lifecycle, err := build.NewLifecycleExecution(logger, docker, "some-temp-dir", opts) h.AssertNil(t, err) err = lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory { @@ -289,6 +511,7 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { } } }) + when("Run with workspace dir", func() { it("succeeds", func() { opts := build.LifecycleOptions{ @@ -303,7 +526,7 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { Termui: fakeTermui, } - lifecycle, err := build.NewLifecycleExecution(logger, docker, opts) + lifecycle, err := build.NewLifecycleExecution(logger, docker, "some-temp-dir", opts) h.AssertNil(t, err) h.AssertEq(t, filepath.Base(lifecycle.AppDir()), "app") @@ -325,11 +548,282 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { h.AssertEq(t, appCount, 3) }) }) + + when("--clear-cache", func() { + providedUseCreator = false + providedClearCache = true + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { // allow buildCache.Clear to succeed without requiring the docker daemon to be running + options.Cache.Build.Format = cache.CacheBind + }) + + when("platform < 0.10", func() { + platformAPI = api.MustParse("0.9") + + it("does not run restore", func() { + err := lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory { + return fakePhaseFactory + }) + h.AssertNil(t, err) + + h.AssertEq(t, len(fakePhaseFactory.NewCalledWithProvider), 4) + }) + }) + + when("platform >= 0.10", func() { + platformAPI = api.MustParse("0.10") + + it("runs restore", func() { + err := lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory { + return fakePhaseFactory + }) + h.AssertNil(t, err) + + h.AssertEq(t, len(fakePhaseFactory.NewCalledWithProvider), 5) + }) + }) + }) + + when("extensions", func() { + providedUseCreator = false + providedOrderExt = dist.Order{dist.OrderEntry{Group: []dist.ModuleRef{ /* don't care */ }}} + + when("for build", func() { + when("present in /generated/", func() { + extensionsForBuild = true + + when("platform >= 0.13", func() { + platformAPI = api.MustParse("0.13") + + it("runs the extender (build)", func() { + err := lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory { + return fakePhaseFactory + }) + h.AssertNil(t, err) + + h.AssertEq(t, len(fakePhaseFactory.NewCalledWithProvider), 5) + + var found bool + for _, entry := range fakePhaseFactory.NewCalledWithProvider { + if entry.Name() == "extender" { + found = true + } + } + h.AssertEq(t, found, true) + }) + }) + }) + + when("present in /generated/build", func() { + extensionsForBuild = true + + when("platform < 0.10", func() { + platformAPI = api.MustParse("0.9") + + it("runs the builder", func() { + err := lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory { + return fakePhaseFactory + }) + h.AssertNil(t, err) + + h.AssertEq(t, len(fakePhaseFactory.NewCalledWithProvider), 5) + + var found bool + for _, entry := range fakePhaseFactory.NewCalledWithProvider { + if entry.Name() == "builder" { + found = true + } + } + h.AssertEq(t, found, true) + }) + }) + + when("platform 0.10 to 0.12", func() { + platformAPI = api.MustParse("0.10") + + it("runs the extender (build)", func() { + err := lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory { + return fakePhaseFactory + }) + h.AssertNil(t, err) + + h.AssertEq(t, len(fakePhaseFactory.NewCalledWithProvider), 5) + + var found bool + for _, entry := range fakePhaseFactory.NewCalledWithProvider { + if entry.Name() == "extender" { + found = true + } + } + h.AssertEq(t, found, true) + }) + }) + }) + + when("not present in /generated/build", func() { + platformAPI = api.MustParse("0.10") + + it("runs the builder", func() { + err := lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory { + return fakePhaseFactory + }) + h.AssertNil(t, err) + + h.AssertEq(t, len(fakePhaseFactory.NewCalledWithProvider), 5) + + var found bool + for _, entry := range fakePhaseFactory.NewCalledWithProvider { + if entry.Name() == "builder" { + found = true + } + } + h.AssertEq(t, found, true) + }) + }) + }) + + when("for run", func() { + when("analyzed.toml run image", func() { + when("matches provided run image", func() { + it("does nothing", func() { + err := lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory { + return fakePhaseFactory + }) + h.AssertNil(t, err) + h.AssertEq(t, fakeFetcher.callCount, 0) + }) + }) + + when("does not match provided run image", func() { + extensionsRunImageName = "some-new-run-image" + extensionsRunImageIdentifier = "some-new-run-image-identifier" + + it("pulls the new run image", func() { + err := lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory { + return fakePhaseFactory + }) + h.AssertNil(t, err) + h.AssertEq(t, fakeFetcher.callCount, 2) + h.AssertEq(t, fakeFetcher.calledWithArgAtCall[0], "some-new-run-image") + h.AssertEq(t, fakeFetcher.calledWithArgAtCall[1], "some-new-run-image-identifier") + }) + }) + }) + + when("analyzed.toml run image extend", func() { + when("true", func() { + extensionsForRun = true + + when("platform >= 0.12", func() { + platformAPI = api.MustParse("0.12") + + it("runs the extender (run)", func() { + err := lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory { + return fakePhaseFactory + }) + h.AssertNil(t, err) + + h.AssertEq(t, len(fakePhaseFactory.NewCalledWithProvider), 6) + + var found bool + for _, entry := range fakePhaseFactory.NewCalledWithProvider { + if entry.Name() == "extender" { + found = true + } + } + h.AssertEq(t, found, true) + }) + }) + + when("platform < 0.12", func() { + platformAPI = api.MustParse("0.11") + + it("doesn't run the extender", func() { + err := lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory { + return fakePhaseFactory + }) + h.AssertNil(t, err) + + h.AssertEq(t, len(fakePhaseFactory.NewCalledWithProvider), 5) + + var found bool + for _, entry := range fakePhaseFactory.NewCalledWithProvider { + if entry.Name() == "extender" { + found = true + } + } + h.AssertEq(t, found, false) + }) + }) + }) + + when("false", func() { + platformAPI = api.MustParse("0.12") + + it("doesn't run the extender", func() { + err := lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory { + return fakePhaseFactory + }) + h.AssertNil(t, err) + + h.AssertEq(t, len(fakePhaseFactory.NewCalledWithProvider), 5) + + var found bool + for _, entry := range fakePhaseFactory.NewCalledWithProvider { + if entry.Name() == "extender" { + found = true + } + } + h.AssertEq(t, found, false) + }) + }) + }) + }) + }) + }) + + when("network is not provided", func() { + it("creates an ephemeral bridge network", func() { + beforeNetworks := func() int { + networks, err := docker.NetworkList(context.Background(), client.NetworkListOptions{}) + h.AssertNil(t, err) + return len(networks.Items) + }() + + opts := build.LifecycleOptions{ + Image: imageName, + Builder: fakeBuilder, + Termui: fakeTermui, + } + + lifecycle, err := build.NewLifecycleExecution(logger, docker, "some-temp-dir", opts) + h.AssertNil(t, err) + + err = lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory { + return fakePhaseFactory + }) + h.AssertNil(t, err) + + for _, entry := range fakePhaseFactory.NewCalledWithProvider { + h.AssertContains(t, string(entry.HostConfig().NetworkMode), "pack.local-network-") + h.AssertEq(t, entry.HostConfig().NetworkMode.IsDefault(), false) + h.AssertEq(t, entry.HostConfig().NetworkMode.IsHost(), false) + h.AssertEq(t, entry.HostConfig().NetworkMode.IsNone(), false) + h.AssertEq(t, entry.HostConfig().NetworkMode.IsPrivate(), true) + h.AssertEq(t, entry.HostConfig().NetworkMode.IsUserDefined(), true) + } + + afterNetworks := func() int { + networks, err := docker.NetworkList(context.Background(), client.NetworkListOptions{}) + h.AssertNil(t, err) + return len(networks.Items) + }() + h.AssertEq(t, beforeNetworks, afterNetworks) + }) }) when("Error cases", func() { - when("passed invalid cache-image", func() { - it("fails", func() { + when("passed invalid", func() { + it("fails for cache-image", func() { opts := build.LifecycleOptions{ Publish: false, ClearCache: false, @@ -342,96 +836,81 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { Termui: fakeTermui, } - lifecycle, err := build.NewLifecycleExecution(logger, docker, opts) + lifecycle, err := build.NewLifecycleExecution(logger, docker, "some-temp-dir", opts) + h.AssertNil(t, err) + + err = lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory { + return fakePhaseFactory + }) + + h.AssertError(t, err, fmt.Sprintf("invalid cache image name: %s", "could not parse reference: %%")) + }) + + it("fails for cache flags", func() { + opts := build.LifecycleOptions{ + Publish: false, + ClearCache: false, + RunImage: "test", + Image: imageName, + Builder: fakeBuilder, + TrustBuilder: false, + UseCreator: false, + Cache: cache.CacheOpts{ + Build: cache.CacheInfo{ + Format: cache.CacheImage, + Source: "%%%", + }, + }, + Termui: fakeTermui, + } + + lifecycle, err := build.NewLifecycleExecution(logger, docker, "some-temp-dir", opts) h.AssertNil(t, err) err = lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory { return fakePhaseFactory }) - h.AssertError(t, err, fmt.Sprintf("invalid cache image name: %s", "could not parse reference: %%!(NOVERB)")) + h.AssertError(t, err, fmt.Sprintf("invalid cache image name: %s", "could not parse reference: %%")) }) }) }) }) when("#Create", func() { - var ( - fakeBuildCache *fakes.FakeCache - fakeLaunchCache *fakes.FakeCache - ) it.Before(func() { - fakeBuildCache = fakes.NewFakeCache() - fakeBuildCache.ReturnForType = cache.Volume - fakeBuildCache.ReturnForName = "some-cache" - - fakeLaunchCache = fakes.NewFakeCache() - fakeLaunchCache.ReturnForType = cache.Volume - fakeLaunchCache.ReturnForName = "some-launch-cache" - }) + err := lifecycle.Create(context.Background(), fakeBuildCache, fakeLaunchCache, fakePhaseFactory) + h.AssertNil(t, err) - it("creates a phase and then run it", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhase := &fakes.FakePhase{} - fakePhaseFactory := fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase)) + lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 + h.AssertNotEq(t, lastCallIndex, -1) - err := lifecycle.Create(context.Background(), false, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) - h.AssertNil(t, err) + configProvider = fakePhaseFactory.NewCalledWithProvider[lastCallIndex] + h.AssertEq(t, configProvider.Name(), "creator") + }) + it("creates a phase and then runs it", func() { h.AssertEq(t, fakePhase.CleanupCallCount, 1) h.AssertEq(t, fakePhase.RunCallCount, 1) }) it("configures the phase with the expected arguments", func() { - verboseLifecycle := newTestLifecycleExec(t, true) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedRepoName := "some-repo-name" - expectedRunImage := "some-run-image" - - err := verboseLifecycle.Create(context.Background(), false, "", false, expectedRunImage, expectedRepoName, "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "creator") h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-log-level", "debug"}, - []string{"-run-image", expectedRunImage}, - []string{expectedRepoName}, + []string{"-run-image", providedRunImage}, + []string{providedTargetImage}, ) }) it("configures the phase with the expected network mode", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedNetworkMode := "some-network-mode" - - err := lifecycle.Create(context.Background(), false, "", false, "test", "test", expectedNetworkMode, fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(expectedNetworkMode)) + h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(providedNetworkMode)) }) when("clear cache", func() { - it("configures the phase with the expected arguments", func() { - verboseLifecycle := newTestLifecycleExec(t, true) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := verboseLifecycle.Create(context.Background(), false, "", true, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + providedClearCache = true - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "creator") + it("configures the phase with the expected arguments", func() { h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-skip-restore"}, @@ -441,17 +920,6 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { when("clear cache is false", func() { it("configures the phase with the expected arguments", func() { - verboseLifecycle := newTestLifecycleExec(t, true) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := verboseLifecycle.Create(context.Background(), false, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "creator") h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-cache-dir", "/cache"}, @@ -460,205 +928,79 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { }) when("using a cache image", func() { - it.Before(func() { - fakeBuildCache.ReturnForType = cache.Image - fakeBuildCache.ReturnForName = "some-cache-image" - }) - it("configures the phase with the expected arguments", func() { - verboseLifecycle := newTestLifecycleExec(t, true) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := verboseLifecycle.Create(context.Background(), false, "", true, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) - h.AssertNil(t, err) + providedClearCache = true + fakeBuildCache = newFakeImageCache() - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "creator") + it("configures the phase with the expected arguments", func() { h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-skip-restore"}, []string{"-cache-image", "some-cache-image"}, ) - h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, ":/cache") }) }) when("additional tags are specified", func() { it("configures phases with additional tags", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - additionalTags := []string{"additional-tag-1", "additional-tag-2"} - - err := lifecycle.Create(context.Background(), false, "", false, "test", "test", "test", fakes.NewFakeCache(), fakes.NewFakeCache(), additionalTags, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, - []string{"-tag", additionalTags[0], "-tag", additionalTags[1]}, + []string{"-tag", providedAdditionalTags[0], "-tag", providedAdditionalTags[1]}, ) }) }) when("publish", func() { - var ( - fakeBuildCache *fakes.FakeCache - fakeLaunchCache *fakes.FakeCache - ) - it.Before(func() { - fakeBuildCache = fakes.NewFakeCache() - fakeBuildCache.ReturnForName = "some-cache" - fakeBuildCache.ReturnForType = cache.Volume - - fakeLaunchCache = fakes.NewFakeCache() - fakeLaunchCache.ReturnForType = cache.Volume - fakeLaunchCache.ReturnForName = "some-launch-cache" - }) + providedPublish = true it("configures the phase with binds", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - volumeMount := "custom-mount-source:/custom-mount-target" - expectedBinds := []string{volumeMount, "some-cache:/cache"} - - err := lifecycle.Create(context.Background(), true, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{volumeMount}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + expectedBinds := providedVolumes + expectedBinds = append(expectedBinds, "some-cache:/cache") - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBinds...) }) it("configures the phase with root", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := lifecycle.Create(context.Background(), true, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertEq(t, configProvider.ContainerConfig().User, "root") }) it("configures the phase with registry access", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedRepos := "some-repo-name" - - err := lifecycle.Create(context.Background(), true, "", false, "test", expectedRepos, "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_REGISTRY_AUTH={}") }) when("using a cache image", func() { - it.Before(func() { - fakeBuildCache.ReturnForType = cache.Image - fakeBuildCache.ReturnForName = "some-cache-image" - }) + fakeBuildCache = newFakeImageCache() it("configures the phase with the expected arguments", func() { - verboseLifecycle := newTestLifecycleExec(t, true) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := verboseLifecycle.Create(context.Background(), true, "", true, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "creator") h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, - []string{"-skip-restore"}, []string{"-cache-image", "some-cache-image"}, ) - h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, ":/cache") }) }) when("platform 0.3", func() { - var ( - fakeBuildCache *fakes.FakeCache - fakeLaunchCache *fakes.FakeCache - ) - it.Before(func() { - fakeBuildCache = fakes.NewFakeCache() - fakeBuildCache.ReturnForName = "some-cache" - fakeBuildCache.ReturnForType = cache.Volume - - fakeLaunchCache = fakes.NewFakeCache() - fakeLaunchCache.ReturnForType = cache.Volume - fakeLaunchCache.ReturnForName = "some-launch-cache" - }) + platformAPI = api.MustParse("0.3") it("doesn't hint at default process type", func() { - fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.3")})) - h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder)) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err = lifecycle.Export(context.Background(), "test", "test", true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-process-type") }) }) when("platform 0.4", func() { + platformAPI = api.MustParse("0.4") + it("hints at default process type", func() { - fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.4")})) - h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder)) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err = lifecycle.Export(context.Background(), "test", "test", true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-process-type", "web"}) - }) - }) + h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-process-type", "web"}) + }) + }) when("platform >= 0.6", func() { + platformAPI = api.MustParse("0.6") + when("no user provided process type is present", func() { it("doesn't provide 'web' as default process type", func() { - fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.6")})) - h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder)) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err = lifecycle.Export(context.Background(), "test", "test", true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-process-type") }) }) @@ -666,31 +1008,7 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { }) when("publish is false", func() { - var ( - fakeBuildCache *fakes.FakeCache - fakeLaunchCache *fakes.FakeCache - ) - it.Before(func() { - fakeBuildCache = fakes.NewFakeCache() - fakeBuildCache.ReturnForName = "some-cache" - fakeBuildCache.ReturnForType = cache.Volume - - fakeLaunchCache = fakes.NewFakeCache() - fakeLaunchCache.ReturnForType = cache.Volume - fakeLaunchCache.ReturnForName = "some-launch-cache" - }) it("configures the phase with the expected arguments", func() { - verboseLifecycle := newTestLifecycleExec(t, true) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := verboseLifecycle.Create(context.Background(), false, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "creator") h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-daemon"}, @@ -698,74 +1016,52 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { ) }) - it("configures the phase with daemon access", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := lifecycle.Create(context.Background(), false, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.ContainerConfig().User, "root") - h.AssertSliceContains(t, configProvider.HostConfig().Binds, "/var/run/docker.sock:/var/run/docker.sock") + when("no docker-host", func() { + it("configures the phase with daemon access", func() { + h.AssertEq(t, configProvider.ContainerConfig().User, "root") + h.AssertSliceContains(t, configProvider.HostConfig().Binds, "/var/run/docker.sock:/var/run/docker.sock") + }) }) - it("configures the phase with daemon access with tcp docker-host", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := lifecycle.Create(context.Background(), false, "tcp://localhost:1234", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + when("tcp docker-host", func() { + providedDockerHost = `tcp://localhost:1234` - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, "/var/run/docker.sock:/var/run/docker.sock") - h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "DOCKER_HOST=tcp://localhost:1234") + it("configures the phase with daemon access with tcp docker-host", func() { + h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, "/var/run/docker.sock:/var/run/docker.sock") + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "DOCKER_HOST=tcp://localhost:1234") + }) }) - it("configures the phase with daemon access with alternative unix socket docker-host", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := lifecycle.Create(context.Background(), false, "unix:///home/user/docker.sock", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + when("alternative unix socket docker-host", func() { + providedDockerHost = `unix:///home/user/docker.sock` - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertSliceContains(t, configProvider.HostConfig().Binds, "/home/user/docker.sock:/var/run/docker.sock") + it("configures the phase with daemon access", func() { + h.AssertSliceContains(t, configProvider.HostConfig().Binds, "/home/user/docker.sock:/var/run/docker.sock") + }) }) - it("configures the phase with daemon access with alternative windows pipe docker-host", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() + when("alternative windows pipe docker-host", func() { + providedDockerHost = `npipe:\\\\.\pipe\docker_engine_alt` - err := lifecycle.Create(context.Background(), false, `npipe:\\\\.\pipe\docker_engine_alt`, false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, "/home/user/docker.sock:/var/run/docker.sock") - h.AssertSliceContains(t, configProvider.HostConfig().Binds, `\\.\pipe\docker_engine_alt:\\.\pipe\docker_engine`) + it("configures the phase with daemon access", func() { + h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, "/home/user/docker.sock:/var/run/docker.sock") + h.AssertSliceContains(t, configProvider.HostConfig().Binds, `\\.\pipe\docker_engine_alt:\\.\pipe\docker_engine`) + }) }) when("environment variable DOCKER_HOST is set", func() { + providedDockerHost = `inherit` + var ( oldDH string oldDHExists bool ) + it.Before(func() { oldDH, oldDHExists = os.LookupEnv("DOCKER_HOST") os.Setenv("DOCKER_HOST", "tcp://example.com:1234") }) + it.After(func() { if oldDHExists { os.Setenv("DOCKER_HOST", oldDH) @@ -773,11 +1069,12 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { os.Unsetenv("DOCKER_HOST") } }) - it("configures the phase with daemon access with inherited docker-host", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - err := lifecycle.Create(context.Background(), false, `inherit`, false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) + it("configures the phase with daemon access with inherited docker-host", func() { + lifecycle := newTestLifecycleExec(t, true, "some-temp-dir", lifecycleOps...) + fakePhase := &fakes.FakePhase{} + fakePhaseFactory := fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase)) + err := lifecycle.Create(context.Background(), fakeBuildCache, fakeLaunchCache, fakePhaseFactory) h.AssertNil(t, err) lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 @@ -788,86 +1085,42 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { }) }) - it("configures the phase with daemon access with docker-host with unknown protocol", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - err := lifecycle.Create(context.Background(), false, `withoutprotocol`, false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + when("docker-host with unknown protocol", func() { + providedDockerHost = `withoutprotocol` - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "DOCKER_HOST=withoutprotocol") + it("configures the phase with daemon access", func() { + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "DOCKER_HOST=withoutprotocol") + }) }) it("configures the phase with binds", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - volumeMount := "custom-mount-source:/custom-mount-target" - expectedBinds := []string{volumeMount, "some-cache:/cache", "some-launch-cache:/launch-cache"} + expectedBinds := providedVolumes + expectedBinds = append(expectedBinds, "some-cache:/cache", "some-launch-cache:/launch-cache") - err := lifecycle.Create(context.Background(), false, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{volumeMount}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBinds...) }) when("platform 0.3", func() { - it("doesn't hint at default process type", func() { - fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.3")})) - h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder)) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err = lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + platformAPI = api.MustParse("0.3") - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] + it("doesn't hint at default process type", func() { h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-process-type") }) }) when("platform 0.4", func() { - it("hints at default process type", func() { - fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.4")})) - h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder)) - fakePhaseFactory := fakes.NewFakePhaseFactory() + platformAPI = api.MustParse("0.4") - err = lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] + it("hints at default process type", func() { h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-process-type", "web"}) }) }) when("platform >= 0.6", func() { + platformAPI = api.MustParse("0.6") + when("no user provided process type is present", func() { it("doesn't provide 'web' as default process type", func() { - fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.6")})) - h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder)) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err = lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-process-type") }) }) @@ -876,42 +1129,50 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { when("override GID", func() { when("override GID is provided", func() { - it("configures the phase with the expected arguments", func() { - verboseLifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { - options.GID = 2 - }) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := verboseLifecycle.Create(context.Background(), false, "", true, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.GID = 2 + }) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "creator") + it("configures the phase with the expected arguments", func() { h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-gid", "2"}, ) }) }) + when("override GID is not provided", func() { + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.GID = -1 + }) + it("gid is not added to the expected arguments", func() { - verboseLifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { - options.GID = -1 - }) - fakePhaseFactory := fakes.NewFakePhaseFactory() + h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-gid") + }) + }) + }) - err := verboseLifecycle.Create(context.Background(), false, "", true, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) - h.AssertNil(t, err) + when("override UID", func() { + when("override UID is provided", func() { + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.UID = 1001 + }) - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + it("configures the phase with the expected arguments", func() { + h.AssertIncludeAllExpectedPatterns(t, + configProvider.ContainerConfig().Cmd, + []string{"-uid", "1001"}, + ) + }) + }) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "creator") - h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-gid") + when("override UID is not provided", func() { + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.UID = -1 + }) + + it("uid is not added to the expected arguments", func() { + h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-uid") }) }) }) @@ -919,90 +1180,77 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { when("-previous-image is used and builder is trusted", func() { when("image is invalid", func() { it("errors", func() { - var imageName name.Tag imageName, err := name.NewTag("/x/y/?!z", name.WeakValidation) h.AssertError(t, err, "repository can only contain the characters `abcdefghijklmnopqrstuvwxyz0123456789_-./`") - lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { + + lifecycleOps := append(lifecycleOps, func(options *build.LifecycleOptions) { options.Image = imageName - options.PreviousImage = "previous-image" + options.PreviousImage = "some-previous-image" }) - fakePhaseFactory := fakes.NewFakePhaseFactory() - err = lifecycle.Create(context.Background(), false, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) + lifecycle := newTestLifecycleExec(t, true, "some-temp-dir", lifecycleOps...) + + err = lifecycle.Create(context.Background(), fakeBuildCache, fakeLaunchCache, fakePhaseFactory) h.AssertError(t, err, "invalid image name") }) }) when("previous-image is invalid", func() { it("errors", func() { - var imageName name.Tag imageName, err := name.NewTag("/some/image", name.WeakValidation) h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { + + lifecycleOps := append(lifecycleOps, func(options *build.LifecycleOptions) { options.PreviousImage = "%%%" options.Image = imageName }) - fakePhaseFactory := fakes.NewFakePhaseFactory() - err = lifecycle.Create(context.Background(), false, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) + lifecycle := newTestLifecycleExec(t, true, "some-temp-dir", lifecycleOps...) + + err = lifecycle.Create(context.Background(), fakeBuildCache, fakeLaunchCache, fakePhaseFactory) h.AssertError(t, err, "invalid previous image name") }) }) when("--publish is false", func() { - it("successfully passes previous-image to creator", func() { - var imageName name.Tag - imageName, err := name.NewTag("/some/image", name.WeakValidation) - h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { - options.PreviousImage = "previous-image" - options.Image = imageName - }) - fakePhaseFactory := fakes.NewFakePhaseFactory() - err = lifecycle.Create(context.Background(), false, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) - h.AssertNil(t, err) + imageName, _ := name.NewTag("/some/image", name.WeakValidation) - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.PreviousImage = "some-previous-image" + options.Image = imageName + }) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "creator") - h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-previous-image", "previous-image"}) + it("successfully passes previous-image to creator", func() { + h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-previous-image", "some-previous-image"}) }) }) when("--publish is true", func() { + providedPublish = true + when("previous-image and image are in the same registry", func() { - it("successfully passes previous-image to creator", func() { - var imageName name.Tag - imageName, err := name.NewTag("/some/image", name.WeakValidation) - h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { - options.PreviousImage = "index.docker.io/some/previous:latest" - options.Image = imageName - }) - fakePhaseFactory := fakes.NewFakePhaseFactory() - err = lifecycle.Create(context.Background(), true, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) - h.AssertNil(t, err) + imageName, _ := name.NewTag("/some/image", name.WeakValidation) - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.PreviousImage = "index.docker.io/some/previous:latest" + options.Image = imageName + }) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "creator") + it("successfully passes previous-image to creator", func() { h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-previous-image", "index.docker.io/some/previous:latest"}) }) }) when("previous-image and image are not in the same registry", func() { it("errors", func() { - var imageName name.Tag imageName, err := name.NewTag("/some/image", name.WeakValidation) h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { + + lifecycleOps := append(lifecycleOps, func(options *build.LifecycleOptions) { options.PreviousImage = "example.io/some/previous:latest" options.Image = imageName }) - fakePhaseFactory := fakes.NewFakePhaseFactory() - err = lifecycle.Create(context.Background(), true, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) + lifecycle := newTestLifecycleExec(t, true, "some-temp-dir", lifecycleOps...) + + err = lifecycle.Create(context.Background(), fakeBuildCache, fakeLaunchCache, fakePhaseFactory) h.AssertError(t, err, fmt.Sprintf("%s", err)) }) }) @@ -1010,76 +1258,128 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { }) when("interactive mode", func() { - it("provides the termui readLayersFunc as a post container operation", func() { - lifecycle := newTestLifecycleExec(t, false, func(opts *build.LifecycleOptions) { - opts.Interactive = true - opts.Termui = &fakes.FakeTermui{ReadLayersFunc: func(_ io.ReadCloser) { - // no-op - }} - }) - fakePhase := &fakes.FakePhase{} - fakePhaseFactory := fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase)) - - err := lifecycle.Create(context.Background(), false, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) - h.AssertNil(t, err) + lifecycleOps = append(lifecycleOps, func(opts *build.LifecycleOptions) { + opts.Interactive = true + opts.Termui = &fakes.FakeTermui{ReadLayersFunc: func(_ io.ReadCloser) { + // no-op + }} + }) + it("provides the termui readLayersFunc as a post container operation", func() { h.AssertEq(t, fakePhase.CleanupCallCount, 1) h.AssertEq(t, fakePhase.RunCallCount, 1) - provider := fakePhaseFactory.NewCalledWithProvider[0] - h.AssertEq(t, len(provider.PostContainerRunOps()), 2) - h.AssertFunctionName(t, provider.PostContainerRunOps()[0], "EnsureVolumeAccess") - h.AssertFunctionName(t, provider.PostContainerRunOps()[1], "CopyOut") + h.AssertEq(t, len(configProvider.PostContainerRunOps()), 2) + h.AssertFunctionName(t, configProvider.PostContainerRunOps()[0], "EnsureVolumeAccess") + h.AssertFunctionName(t, configProvider.PostContainerRunOps()[1], "CopyOut") }) }) when("sbom destination directory is provided", func() { + lifecycleOps = append(lifecycleOps, func(opts *build.LifecycleOptions) { + opts.SBOMDestinationDir = "some-destination-dir" + }) + it("provides copy-sbom-func as a post container operation", func() { - lifecycle := newTestLifecycleExec(t, false, func(opts *build.LifecycleOptions) { - opts.SBOMDestinationDir = "some-destination-dir" - }) - fakePhase := &fakes.FakePhase{} - fakePhaseFactory := fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase)) + h.AssertEq(t, fakePhase.CleanupCallCount, 1) + h.AssertEq(t, fakePhase.RunCallCount, 1) - err := lifecycle.Create(context.Background(), false, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) - h.AssertNil(t, err) + h.AssertEq(t, len(configProvider.PostContainerRunOps()), 2) + h.AssertFunctionName(t, configProvider.PostContainerRunOps()[0], "EnsureVolumeAccess") + h.AssertFunctionName(t, configProvider.PostContainerRunOps()[1], "CopyOut") + }) + }) + + when("report destination directory is provided", func() { + lifecycleOps = append(lifecycleOps, func(opts *build.LifecycleOptions) { + opts.ReportDestinationDir = "a-destination-dir" + }) + it("provides copy-sbom-func as a post container operation", func() { h.AssertEq(t, fakePhase.CleanupCallCount, 1) h.AssertEq(t, fakePhase.RunCallCount, 1) - provider := fakePhaseFactory.NewCalledWithProvider[0] - h.AssertEq(t, len(provider.PostContainerRunOps()), 2) - h.AssertFunctionName(t, provider.PostContainerRunOps()[0], "EnsureVolumeAccess") - h.AssertFunctionName(t, provider.PostContainerRunOps()[1], "CopyOut") + h.AssertEq(t, len(configProvider.PostContainerRunOps()), 2) + h.AssertFunctionName(t, configProvider.PostContainerRunOps()[0], "EnsureVolumeAccess") + h.AssertFunctionName(t, configProvider.PostContainerRunOps()[1], "CopyOut") }) }) - }) - when("#Detect", func() { - it("creates a phase and then runs it", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhase := &fakes.FakePhase{} - fakePhaseFactory := fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase)) + when("--creation-time", func() { + when("platform < 0.9", func() { + platformAPI = api.MustParse("0.8") - err := lifecycle.Detect(context.Background(), "test", []string{}, fakePhaseFactory) - h.AssertNil(t, err) + intTime, _ := strconv.ParseInt("1234567890", 10, 64) + providedTime := time.Unix(intTime, 0).UTC() - h.AssertEq(t, fakePhase.CleanupCallCount, 1) - h.AssertEq(t, fakePhase.RunCallCount, 1) + lifecycleOps = append(lifecycleOps, func(baseOpts *build.LifecycleOptions) { + baseOpts.CreationTime = &providedTime + }) + + it("is ignored", func() { + h.AssertSliceNotContains(t, configProvider.ContainerConfig().Env, "SOURCE_DATE_EPOCH=1234567890") + }) + }) + + when("platform >= 0.9", func() { + platformAPI = api.MustParse("0.9") + + when("provided", func() { + intTime, _ := strconv.ParseInt("1234567890", 10, 64) + providedTime := time.Unix(intTime, 0).UTC() + + lifecycleOps = append(lifecycleOps, func(baseOpts *build.LifecycleOptions) { + baseOpts.CreationTime = &providedTime + }) + + it("configures the phase with env SOURCE_DATE_EPOCH", func() { + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "SOURCE_DATE_EPOCH=1234567890") + }) + }) + + when("not provided", func() { + lifecycleOps = append(lifecycleOps, func(baseOpts *build.LifecycleOptions) { + baseOpts.CreationTime = nil + }) + + it("does not panic", func() { + // no-op + }) + }) + }) }) - it("configures the phase with the expected arguments", func() { - verboseLifecycle := newTestLifecycleExec(t, true) - fakePhaseFactory := fakes.NewFakePhaseFactory() + when("layout", func() { + providedLayout = true + layoutRepo := filepath.Join(paths.RootDir, "layout-repo") + platformAPI = api.MustParse("0.12") + + it("configures the phase with oci layout environment variables", func() { + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_USE_LAYOUT=true") + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, fmt.Sprintf("CNB_LAYOUT_DIR=%s", layoutRepo)) + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_EXPERIMENTAL_MODE=warn") + }) + }) + }) - err := verboseLifecycle.Detect(context.Background(), "test", []string{"test"}, fakePhaseFactory) + when("#Detect", func() { + it.Before(func() { + err := lifecycle.Detect(context.Background(), fakePhaseFactory) h.AssertNil(t, err) lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 h.AssertNotEq(t, lastCallIndex, -1) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] + configProvider = fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertEq(t, configProvider.Name(), "detector") + }) + + it("creates a phase and then runs it", func() { + h.AssertEq(t, fakePhase.CleanupCallCount, 1) + h.AssertEq(t, fakePhase.RunCallCount, 1) + }) + + it("configures the phase with the expected arguments", func() { h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-log-level", "debug"}, @@ -1087,90 +1387,63 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { }) it("configures the phase with the expected network mode", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedNetworkMode := "some-network-mode" - - err := lifecycle.Detect(context.Background(), expectedNetworkMode, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(expectedNetworkMode)) + h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(providedNetworkMode)) }) it("configures the phase to copy app dir", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedBind := "some-mount-source:/some-mount-target" + h.AssertSliceContains(t, configProvider.HostConfig().Binds, providedVolumes...) + h.AssertEq(t, len(configProvider.ContainerOps()), 2) + h.AssertFunctionName(t, configProvider.ContainerOps()[0], "EnsureVolumeAccess") + h.AssertFunctionName(t, configProvider.ContainerOps()[1], "CopyDir") + }) - err := lifecycle.Detect(context.Background(), "test", []string{expectedBind}, fakePhaseFactory) - h.AssertNil(t, err) + when("extensions", func() { + platformAPI = api.MustParse("0.10") - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + when("present in the order", func() { + providedOrderExt = dist.Order{dist.OrderEntry{Group: []dist.ModuleRef{ /* don't care */ }}} - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBind) + it("sets CNB_EXPERIMENTAL_MODE=warn in the environment", func() { + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_EXPERIMENTAL_MODE=warn") + }) + }) - h.AssertEq(t, len(configProvider.ContainerOps()), 2) - h.AssertFunctionName(t, configProvider.ContainerOps()[0], "EnsureVolumeAccess") - h.AssertFunctionName(t, configProvider.ContainerOps()[1], "CopyDir") + when("not present in the order", func() { + it("sets CNB_EXPERIMENTAL_MODE=warn in the environment", func() { + h.AssertSliceNotContains(t, configProvider.ContainerConfig().Env, "CNB_EXPERIMENTAL_MODE=warn") + }) + }) }) }) when("#Analyze", func() { - var fakeCache *fakes.FakeCache it.Before(func() { - fakeCache = fakes.NewFakeCache() - fakeCache.ReturnForType = cache.Volume - }) - it("creates a phase and then runs it", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhase := &fakes.FakePhase{} - fakePhaseFactory := fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase)) - - err := lifecycle.Analyze(context.Background(), "test", "test", false, "", false, "test", []string{}, fakeCache, fakePhaseFactory) + err := lifecycle.Analyze(context.Background(), fakeBuildCache, fakeLaunchCache, fakePhaseFactory) h.AssertNil(t, err) + lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 + h.AssertNotEq(t, lastCallIndex, -1) + + configProvider = fakePhaseFactory.NewCalledWithProvider[lastCallIndex] + h.AssertEq(t, configProvider.Name(), "analyzer") + }) + + it("creates a phase and then runs it", func() { h.AssertEq(t, fakePhase.CleanupCallCount, 1) h.AssertEq(t, fakePhase.RunCallCount, 1) }) + when("platform < 0.7", func() { when("clear cache", func() { - it("configures the phase with the expected arguments", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedRepoName := "some-repo-name" - - err := lifecycle.Analyze(context.Background(), expectedRepoName, "test", false, "", true, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + providedClearCache = true - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "analyzer") + it("configures the phase with the expected arguments", func() { h.AssertSliceContains(t, configProvider.ContainerConfig().Cmd, "-skip-layers") }) }) when("clear cache is false", func() { it("configures the phase with the expected arguments", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedRepoName := "some-repo-name" - - err := lifecycle.Analyze(context.Background(), expectedRepoName, "test", false, "", false, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "analyzer") h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-cache-dir", "/cache"}, @@ -1179,49 +1452,24 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { }) when("using a cache image", func() { - var ( - lifecycle *build.LifecycleExecution - fakePhaseFactory *fakes.FakePhaseFactory - expectedRepoName = "some-repo-name" - ) - it.Before(func() { - fakeCache.ReturnForType = cache.Image - fakeCache.ReturnForName = "some-cache-image" - - lifecycle = newTestLifecycleExec(t, false, func(options *build.LifecycleOptions) { - options.GID = -1 - }) - fakePhaseFactory = fakes.NewFakePhaseFactory() - }) - it("configures the phase with a build cache images", func() { - err := lifecycle.Analyze(context.Background(), expectedRepoName, "", false, "", false, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + fakeBuildCache = newFakeImageCache() - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "analyzer") + it("configures the phase with a build cache image", func() { h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, ":/cache") h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-cache-image", "some-cache-image"}, ) - h.AssertIncludeAllExpectedPatterns(t, + h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, - []string{"-cache-dir", "/cache"}, + "-cache-dir", ) }) - when("clear-cache", func() { - it("cache is omitted from Analyze", func() { - err := lifecycle.Analyze(context.Background(), expectedRepoName, "", false, "", true, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + when("clear-cache", func() { + providedClearCache = true - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "analyzer") + it("cache is omitted from Analyze", func() { h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-cache-image") }) }) @@ -1229,394 +1477,244 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { }) when("platform >= 0.7", func() { - var ( - lifecycle *build.LifecycleExecution - fakePhaseFactory *fakes.FakePhaseFactory - fakeBuilder *fakes.FakeBuilder - expectedRepoName = "some-repo-name" - ) - - it.Before(func() { - fakePhaseFactory = fakes.NewFakePhaseFactory() - var err error - fakeBuilder, err = fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.7")})) - h.AssertNil(t, err) - lifecycle = newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder)) - }) + platformAPI = api.MustParse("0.7") it("doesn't set cache dir", func() { - err := lifecycle.Analyze(context.Background(), expectedRepoName, "test", false, "", false, "test", []string{}, fakeCache, fakePhaseFactory) - - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "analyzer") h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, ":/cache") }) it("passes additional tags", func() { - err := lifecycle.Analyze(context.Background(), expectedRepoName, "test", false, "", false, "test", []string{"additional-tag"}, fakeCache, fakePhaseFactory) - - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "analyzer") h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, - []string{"-tag", "additional-tag"}, + []string{"-tag", "some-additional-tag2", "-tag", "some-additional-tag1"}, ) }) it("passes run image", func() { - err := lifecycle.Analyze(context.Background(), expectedRepoName, "test", false, "", false, "test", []string{"additional-tag"}, fakeCache, fakePhaseFactory) - - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "analyzer") h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, - []string{"-run-image", "test"}, + []string{"-run-image", "some-run-image"}, ) }) it("passes stack", func() { - err := lifecycle.Analyze(context.Background(), expectedRepoName, "test", false, "", false, "test", []string{"additional-tag"}, fakeCache, fakePhaseFactory) - - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "analyzer") h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-stack", "/layers/stack.toml"}, ) }) - it("passes previous image", func() { - lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { - options.Image = name.MustParseReference("image") - options.PreviousImage = "previous-image" - }, fakes.WithBuilder(fakeBuilder)) - - err := lifecycle.Analyze(context.Background(), expectedRepoName, "test", false, "", false, "test", []string{"additional-tag"}, fakeCache, fakePhaseFactory) + when("previous image", func() { + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.PreviousImage = "some-previous-image" + }) - h.AssertNil(t, err) + it("passes previous image", func() { + h.AssertIncludeAllExpectedPatterns(t, + configProvider.ContainerConfig().Cmd, + []string{"-previous-image", "some-previous-image"}, + ) + }) + }) + }) - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + when("platform >= 0.12", func() { + platformAPI = api.MustParse("0.12") - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "analyzer") + it("passes run", func() { h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, - []string{"-previous-image", "previous-image"}, + []string{"-run", "/layers/run.toml"}, ) + h.AssertSliceNotContains(t, + configProvider.ContainerConfig().Cmd, + "-stack", + ) + }) + + when("layout is true", func() { + providedLayout = true + + it("configures the phase with the expected environment variables", func() { + layoutDir := filepath.Join(paths.RootDir, "layout-repo") + h.AssertSliceContains(t, + configProvider.ContainerConfig().Env, "CNB_USE_LAYOUT=true", fmt.Sprintf("CNB_LAYOUT_DIR=%s", layoutDir), + ) + }) }) }) when("publish", func() { - it("runs the phase with the lifecycle image", func() { - lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { + providedPublish = true + + when("lifecycle image", func() { + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { options.LifecycleImage = "some-lifecycle-image" }) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := lifecycle.Analyze(context.Background(), "test", "test", true, "", false, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.ContainerConfig().Image, "some-lifecycle-image") + it("runs the phase with the lifecycle image", func() { + h.AssertEq(t, configProvider.ContainerConfig().Image, "some-lifecycle-image") + }) }) it("sets the CNB_USER_ID and CNB_GROUP_ID in the environment", func() { - fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithUID(2222), fakes.WithGID(3333)) - h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, false, fakes.WithBuilder(fakeBuilder)) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err = lifecycle.Analyze(context.Background(), "test", "test", true, "", false, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_USER_ID=2222") - h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_GROUP_ID=3333") + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, fmt.Sprintf("CNB_USER_ID=%d", providedUID)) + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, fmt.Sprintf("CNB_GROUP_ID=%d", providedGID)) }) it("configures the phase with registry access", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedRepos := "some-repo-name" - expectedNetworkMode := "some-network-mode" - - err := lifecycle.Analyze(context.Background(), expectedRepos, expectedNetworkMode, true, "", false, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_REGISTRY_AUTH={}") - h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(expectedNetworkMode)) + h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(providedNetworkMode)) }) it("configures the phase with root", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := lifecycle.Analyze(context.Background(), "test", "test", true, "", false, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertEq(t, configProvider.ContainerConfig().User, "root") }) it("configures the phase with the expected arguments", func() { - verboseLifecycle := newTestLifecycleExec(t, true) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedRepoName := "some-repo-name" - - err := verboseLifecycle.Analyze(context.Background(), expectedRepoName, "test", true, "", false, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "analyzer") h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-log-level", "debug"}, - []string{expectedRepoName}, + []string{providedTargetImage}, ) }) it("configures the phase with binds", func() { - fakeCache.ReturnForName = "some-cache" - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() expectedBind := "some-cache:/cache" - err := lifecycle.Analyze(context.Background(), "test", "test", true, "", false, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBind) }) when("using a cache image", func() { - it.Before(func() { - fakeCache.ReturnForName = "some-cache-image" - fakeCache.ReturnForType = cache.Image - }) + fakeBuildCache = newFakeImageCache() it("configures the phase with a build cache images", func() { - lifecycle := newTestLifecycleExec(t, false, func(options *build.LifecycleOptions) { - options.GID = -1 - }) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedRepoName := "some-repo-name" - - err := lifecycle.Analyze(context.Background(), expectedRepoName, "test", true, "", false, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, ":/cache") h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-cache-image", "some-cache-image"}, ) - h.AssertIncludeAllExpectedPatterns(t, + h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, - []string{"-cache-dir", "/cache"}, + "-cache-dir", ) }) }) }) when("publish is false", func() { - it("runs the phase with the lifecycle image", func() { - lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { + when("lifecycle image", func() { + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { options.LifecycleImage = "some-lifecycle-image" }) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := lifecycle.Analyze(context.Background(), "test", "test", false, "", false, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.ContainerConfig().Image, "some-lifecycle-image") + it("runs the phase with the lifecycle image", func() { + h.AssertEq(t, configProvider.ContainerConfig().Image, "some-lifecycle-image") + }) }) it("sets the CNB_USER_ID and CNB_GROUP_ID in the environment", func() { - fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithUID(2222), fakes.WithGID(3333)) - h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, false, fakes.WithBuilder(fakeBuilder)) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err = lifecycle.Analyze(context.Background(), "test", "test", false, "", false, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_USER_ID=2222") - h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_GROUP_ID=3333") + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, fmt.Sprintf("CNB_USER_ID=%d", providedUID)) + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, fmt.Sprintf("CNB_GROUP_ID=%d", providedGID)) }) it("configures the phase with daemon access", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := lifecycle.Analyze(context.Background(), "test", "test", false, "", false, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertEq(t, configProvider.ContainerConfig().User, "root") h.AssertSliceContains(t, configProvider.HostConfig().Binds, "/var/run/docker.sock:/var/run/docker.sock") }) - it("configures the phase with daemon access with TCP docker-host", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := lifecycle.Analyze(context.Background(), "test", "test", false, "tcp://localhost:1234", false, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + when("tcp docker-host", func() { + providedDockerHost = `tcp://localhost:1234` - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, "/var/run/docker.sock:/var/run/docker.sock") - h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "DOCKER_HOST=tcp://localhost:1234") + it("configures the phase with daemon access with TCP docker-host", func() { + h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, "/var/run/docker.sock:/var/run/docker.sock") + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "DOCKER_HOST=tcp://localhost:1234") + }) }) it("configures the phase with the expected arguments", func() { - verboseLifecycle := newTestLifecycleExec(t, true) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedRepoName := "some-repo-name" - - err := verboseLifecycle.Analyze(context.Background(), expectedRepoName, "test", false, "", true, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "analyzer") h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-log-level", "debug"}, []string{"-daemon"}, - []string{expectedRepoName}, + []string{providedTargetImage}, ) }) it("configures the phase with the expected network mode", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedNetworkMode := "some-network-mode" - - err := lifecycle.Analyze(context.Background(), "test", expectedNetworkMode, false, "", false, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(expectedNetworkMode)) + h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(providedNetworkMode)) }) it("configures the phase with binds", func() { - fakeCache.ReturnForName = "some-cache" - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() expectedBind := "some-cache:/cache" - err := lifecycle.Analyze(context.Background(), "test", "test", false, "", true, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBind) }) - }) - when("override GID", func() { - var ( - lifecycle *build.LifecycleExecution - fakePhaseFactory *fakes.FakePhaseFactory - ) - fakePhase := &fakes.FakePhase{} - fakePhaseFactory = fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase)) + when("platform >= 0.9", func() { + platformAPI = api.MustParse("0.9") + + providedClearCache = true + + it("configures the phase with launch cache and skip layers", func() { + expectedBinds := []string{"some-launch-cache:/launch-cache"} - when("override GID is provided", func() { - it.Before(func() { - lifecycle = newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { - options.GID = 2 - }) - }) - it("configures the phase with the expected arguments", func() { - err := lifecycle.Analyze(context.Background(), "test", "test", false, "", false, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, - []string{"-gid", "2"}, + []string{"-skip-layers"}, + []string{"-launch-cache", "/launch-cache"}, ) + h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBinds...) }) - }) - when("override GID is not provided", func() { - it.Before(func() { - lifecycle = newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { - options.GID = -1 + + when("override GID", func() { + when("override GID is provided", func() { + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.GID = 2 + }) + + it("configures the phase with the expected arguments", func() { + h.AssertIncludeAllExpectedPatterns(t, + configProvider.ContainerConfig().Cmd, + []string{"-gid", "2"}, + ) + }) + }) + + when("override GID is not provided", func() { + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.GID = -1 + }) + + it("gid is not added to the expected arguments", func() { + h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-gid") + }) }) }) - it("gid is not added to the expected arguments", func() { - err := lifecycle.Analyze(context.Background(), "test", "test", false, "", false, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-gid") + + when("override UID", func() { + when("override UID is provided", func() { + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.UID = 1001 + }) + + it("configures the phase with the expected arguments", func() { + h.AssertIncludeAllExpectedPatterns(t, + configProvider.ContainerConfig().Cmd, + []string{"-uid", "1001"}, + ) + }) + }) + + when("override UID is not provided", func() { + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.UID = -1 + }) + + it("uid is not added to the expected arguments", func() { + h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-uid") + }) + }) }) }) }) @@ -1627,13 +1725,14 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { var imageName name.Tag imageName, err := name.NewTag("/x/y/?!z", name.WeakValidation) h.AssertError(t, err, "repository can only contain the characters `abcdefghijklmnopqrstuvwxyz0123456789_-./`") - lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { + + lifecycleOps := append(lifecycleOps, func(options *build.LifecycleOptions) { options.Image = imageName - options.PreviousImage = "previous-image" + options.PreviousImage = "some-previous-image" }) - fakePhaseFactory := fakes.NewFakePhaseFactory() + lifecycle := newTestLifecycleExec(t, true, "some-temp-dir", lifecycleOps...) - err = lifecycle.Analyze(context.Background(), "test", "test", false, "", false, "test", []string{}, fakeCache, fakePhaseFactory) + err = lifecycle.Analyze(context.Background(), fakeBuildCache, fakeLaunchCache, fakePhaseFactory) h.AssertError(t, err, "invalid image name") }) }) @@ -1643,79 +1742,66 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { var imageName name.Tag imageName, err := name.NewTag("/some/image", name.WeakValidation) h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { + + lifecycleOps := append(lifecycleOps, func(options *build.LifecycleOptions) { options.PreviousImage = "%%%" options.Image = imageName }) - fakePhaseFactory := fakes.NewFakePhaseFactory() - err = lifecycle.Analyze(context.Background(), "test", "test", false, "", false, "test", []string{}, fakeCache, fakePhaseFactory) + lifecycle := newTestLifecycleExec(t, true, "some-temp-dir", lifecycleOps...) + + err = lifecycle.Analyze(context.Background(), fakeBuildCache, fakeLaunchCache, fakePhaseFactory) h.AssertError(t, err, "invalid previous image name") }) }) when("--publish is false", func() { - it("successfully passes previous-image to analyzer", func() { - var imageName name.Tag - imageName, err := name.NewTag("/some/image", name.WeakValidation) - h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { + when("previous image", func() { + imageName, _ := name.NewTag("/some/image", name.WeakValidation) + + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { options.PreviousImage = "previous-image" options.Image = imageName }) - prevImage, err := name.ParseReference(lifecycle.PrevImageName(), name.WeakValidation) - h.AssertNil(t, err) - - fakePhaseFactory := fakes.NewFakePhaseFactory() - err = lifecycle.Analyze(context.Background(), "test", "test", false, "", false, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "analyzer") - h.AssertEq(t, lifecycle.ImageName().Name(), prevImage.Name()) + it("successfully passes previous-image to analyzer", func() { + prevImage, err := name.ParseReference(lifecycle.PrevImageName(), name.WeakValidation) + h.AssertNil(t, err) + h.AssertEq(t, lifecycle.ImageName().Name(), prevImage.Name()) + }) }) }) when("--publish is true", func() { + providedPublish = true + when("previous-image and image are in the same registry", func() { + imageName, _ := name.NewTag("/some/image", name.WeakValidation) + + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.PreviousImage = "index.docker.io/some/previous:latest" + options.Image = imageName + }) + it("successfully passes previous-image to analyzer", func() { - var imageName name.Tag - imageName, err := name.NewTag("/some/image", name.WeakValidation) - h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { - options.PreviousImage = "index.docker.io/some/previous:latest" - options.Image = imageName - }) prevImage, err := name.ParseReference(lifecycle.PrevImageName(), name.WeakValidation) h.AssertNil(t, err) - - fakePhaseFactory := fakes.NewFakePhaseFactory() - err = lifecycle.Analyze(context.Background(), "test", "test", true, "", false, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "analyzer") h.AssertEq(t, lifecycle.ImageName().Name(), prevImage.Name()) }) }) when("previous-image and image are not in the same registry", func() { it("errors", func() { - var imageName name.Tag imageName, err := name.NewTag("/some/image", name.WeakValidation) h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { + + lifecycleOps := append(lifecycleOps, func(options *build.LifecycleOptions) { options.PreviousImage = "example.io/some/previous:latest" options.Image = imageName }) - fakePhaseFactory := fakes.NewFakePhaseFactory() - err = lifecycle.Analyze(context.Background(), "test", "test", true, "", false, "test", []string{}, fakeCache, fakePhaseFactory) - h.AssertError(t, err, fmt.Sprintf("%s", err)) + lifecycle := newTestLifecycleExec(t, true, "some-temp-dir", lifecycleOps...) + + err = lifecycle.Analyze(context.Background(), fakeBuildCache, fakeLaunchCache, fakePhaseFactory) + h.AssertNotNil(t, err) }) }) }) @@ -1723,83 +1809,42 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { }) when("#Restore", func() { - var fakeCache *fakes.FakeCache it.Before(func() { - fakeCache = fakes.NewFakeCache() - fakeCache.ReturnForName = "some-cache" - fakeCache.ReturnForType = cache.Volume - }) - it("runs the phase with the lifecycle image", func() { - lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { - options.LifecycleImage = "some-lifecycle-image" - }) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := lifecycle.Restore(context.Background(), "test", fakeCache, fakePhaseFactory) + err := lifecycle.Restore(context.Background(), fakeBuildCache, fakeKanikoCache, fakePhaseFactory) h.AssertNil(t, err) lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 h.AssertNotEq(t, lastCallIndex, -1) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.ContainerConfig().Image, "some-lifecycle-image") + configProvider = fakePhaseFactory.NewCalledWithProvider[lastCallIndex] + h.AssertEq(t, configProvider.Name(), "restorer") }) - it("sets the CNB_USER_ID and CNB_GROUP_ID in the environment", func() { - fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithUID(2222), fakes.WithGID(3333)) - h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, false, fakes.WithBuilder(fakeBuilder)) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err = lifecycle.Restore(context.Background(), "test", fakeCache, fakePhaseFactory) - h.AssertNil(t, err) + when("lifecycle image", func() { + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.LifecycleImage = "some-lifecycle-image" + }) - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + it("runs the phase with the lifecycle image", func() { + h.AssertEq(t, configProvider.ContainerConfig().Image, "some-lifecycle-image") + }) + }) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_USER_ID=2222") - h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_GROUP_ID=3333") + it("sets the CNB_USER_ID and CNB_GROUP_ID in the environment", func() { + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, fmt.Sprintf("CNB_USER_ID=%d", providedUID)) + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, fmt.Sprintf("CNB_GROUP_ID=%d", providedGID)) }) it("creates a phase and then runs it", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhase := &fakes.FakePhase{} - fakePhaseFactory := fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase)) - - err := lifecycle.Restore(context.Background(), "test", fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - h.AssertEq(t, fakePhase.CleanupCallCount, 1) h.AssertEq(t, fakePhase.RunCallCount, 1) }) it("configures the phase with root access", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := lifecycle.Restore(context.Background(), "test", fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertEq(t, configProvider.ContainerConfig().User, "root") }) it("configures the phase with the expected arguments", func() { - verboseLifecycle := newTestLifecycleExec(t, true) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := verboseLifecycle.Restore(context.Background(), "test", fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "restorer") h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-log-level", "debug"}, @@ -1808,137 +1853,237 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { }) it("configures the phase with the expected network mode", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedNetworkMode := "some-network-mode" - - err := lifecycle.Restore(context.Background(), expectedNetworkMode, fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(expectedNetworkMode)) + h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(providedNetworkMode)) }) it("configures the phase with binds", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() expectedBind := "some-cache:/cache" - err := lifecycle.Restore(context.Background(), "test", fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBind) }) - when("using cache image", func() { - var ( - lifecycle *build.LifecycleExecution - fakePhaseFactory *fakes.FakePhaseFactory - ) + when("there are extensions", func() { + platformAPI = api.MustParse("0.12") + providedOrderExt = dist.Order{dist.OrderEntry{Group: []dist.ModuleRef{ /* don't care */ }}} - it.Before(func() { - fakeCache.ReturnForType = cache.Image - fakeCache.ReturnForName = "some-cache-image" + when("for build", func() { + extensionsForBuild = true - lifecycle = newTestLifecycleExec(t, false, func(options *build.LifecycleOptions) { - options.GID = -1 + it("configures the phase with registry access", func() { + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_REGISTRY_AUTH={}") }) - fakePhaseFactory = fakes.NewFakePhaseFactory() }) - it("configures the phase with a cache image", func() { - err := lifecycle.Restore(context.Background(), "test", fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + when("for run", func() { + extensionsForRun = true + + it("configures the phase with registry access", func() { + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_REGISTRY_AUTH={}") + }) + }) + }) + + when("using cache image", func() { + fakeBuildCache = newFakeImageCache() - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] + it("configures the phase with a cache image", func() { h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, ":/cache") h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-cache-image", "some-cache-image"}, ) + h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-cache-dir") + }) + + it("configures the phase with registry access", func() { + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_REGISTRY_AUTH={}") }) }) when("override GID", func() { - var ( - lifecycle *build.LifecycleExecution - fakePhaseFactory *fakes.FakePhaseFactory - ) - fakePhase := &fakes.FakePhase{} - fakePhaseFactory = fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase)) - when("override GID is provided", func() { - it.Before(func() { - lifecycle = newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { - options.GID = 2 - }) + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.GID = 2 }) + it("configures the phase with the expected arguments", func() { - err := lifecycle.Restore(context.Background(), "test", fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-gid", "2"}, ) }) }) + when("override GID is not provided", func() { - it.Before(func() { - lifecycle = newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { - options.GID = -1 - }) + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.GID = -1 }) + it("gid is not added to the expected arguments", func() { - err := lifecycle.Restore(context.Background(), "test", fakeCache, fakePhaseFactory) - h.AssertNil(t, err) - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-gid") }) }) }) - }) - when("#Build", func() { - it("creates a phase and then runs it", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhase := &fakes.FakePhase{} - fakePhaseFactory := fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase)) + when("override UID", func() { + when("override UID is provided", func() { + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.UID = 1001 + }) - err := lifecycle.Build(context.Background(), "test", []string{}, fakePhaseFactory) - h.AssertNil(t, err) + it("configures the phase with the expected arguments", func() { + h.AssertIncludeAllExpectedPatterns(t, + configProvider.ContainerConfig().Cmd, + []string{"-uid", "1001"}, + ) + }) + }) - h.AssertEq(t, fakePhase.CleanupCallCount, 1) - h.AssertEq(t, fakePhase.RunCallCount, 1) + when("override UID is not provided", func() { + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.UID = -1 + }) + + it("uid is not added to the expected arguments", func() { + h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-uid") + }) + }) }) - it("configures the phase with the expected arguments", func() { - fakeBuilder, err := fakes.NewFakeBuilder() - h.AssertNil(t, err) - verboseLifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder)) - fakePhaseFactory := fakes.NewFakePhaseFactory() + when("--clear-cache", func() { + providedClearCache = true + + it("provides -skip-layers", func() { + h.AssertSliceContains(t, configProvider.ContainerConfig().Cmd, "-skip-layers") + }) + }) + + when("extensions", func() { + providedOrderExt = dist.Order{dist.OrderEntry{Group: []dist.ModuleRef{ /* don't care */ }}} + + when("for build", func() { + when("present in /generated/build", func() { + extensionsForBuild = true + + when("platform < 0.10", func() { + platformAPI = api.MustParse("0.9") + + it("does not provide -build-image or /kaniko bind", func() { + h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-build-image") + h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, "some-kaniko-cache:/kaniko") + }) + }) + + when("platform >= 0.10", func() { + platformAPI = api.MustParse("0.10") + + it("provides -build-image and /kaniko bind", func() { + h.AssertSliceContainsInOrder(t, configProvider.ContainerConfig().Cmd, "-build-image", providedBuilderImage) + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_REGISTRY_AUTH={}") + h.AssertSliceContains(t, configProvider.HostConfig().Binds, "some-kaniko-cache:/kaniko") + }) + }) + }) + + when("not present in /generated/build", func() { + platformAPI = api.MustParse("0.10") + + it("does not provide -build-image or /kaniko bind", func() { + h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-build-image") + h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, "some-kaniko-cache:/kaniko") + }) + }) + }) + + when("for run", func() { + when("analyzed.toml extend", func() { + when("true", func() { + extensionsForRun = true + + when("platform >= 0.12", func() { + platformAPI = api.MustParse("0.12") + + it("provides /kaniko bind", func() { + h.AssertSliceContains(t, configProvider.HostConfig().Binds, "some-kaniko-cache:/kaniko") + }) + }) + + when("platform < 0.12", func() { + platformAPI = api.MustParse("0.11") + + it("does not provide /kaniko bind", func() { + h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, "some-kaniko-cache:/kaniko") + }) + }) + }) - err = verboseLifecycle.Build(context.Background(), "test", []string{}, fakePhaseFactory) + when("false", func() { + platformAPI = api.MustParse("0.12") + + it("does not provide /kaniko bind", func() { + h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, "some-kaniko-cache:/kaniko") + }) + }) + }) + }) + }) + + when("publish is false", func() { + when("platform >= 0.12", func() { + platformAPI = api.MustParse("0.12") + + it("configures the phase with daemon access", func() { + h.AssertEq(t, configProvider.ContainerConfig().User, "root") + h.AssertSliceContains(t, configProvider.HostConfig().Binds, "/var/run/docker.sock:/var/run/docker.sock") + }) + + it("configures the phase with the expected arguments", func() { + h.AssertIncludeAllExpectedPatterns(t, + configProvider.ContainerConfig().Cmd, + []string{"-daemon"}, + ) + }) + }) + }) + + when("layout is true", func() { + when("platform >= 0.12", func() { + platformAPI = api.MustParse("0.12") + providedLayout = true + + it("it configures the phase with access to provided volumes", func() { + // this is required to read the /layout-repo + h.AssertSliceContains(t, configProvider.HostConfig().Binds, providedVolumes...) + }) + + it("configures the phase with the expected environment variables", func() { + layoutDir := filepath.Join(paths.RootDir, "layout-repo") + h.AssertSliceContains(t, + configProvider.ContainerConfig().Env, "CNB_USE_LAYOUT=true", fmt.Sprintf("CNB_LAYOUT_DIR=%s", layoutDir), + ) + }) + }) + }) + }) + + when("#Build", func() { + it.Before(func() { + err := lifecycle.Build(context.Background(), fakePhaseFactory) h.AssertNil(t, err) lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 h.AssertNotEq(t, lastCallIndex, -1) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] + configProvider = fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertEq(t, configProvider.Name(), "builder") + }) + + it("creates a phase and then runs it", func() { + h.AssertEq(t, fakePhase.CleanupCallCount, 1) + h.AssertEq(t, fakePhase.RunCallCount, 1) + }) + + it("configures the phase with the expected arguments", func() { h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-log-level", "debug"}, @@ -1946,162 +2091,249 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { }) it("configures the phase with the expected network mode", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedNetworkMode := "some-network-mode" + h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(providedNetworkMode)) + }) - err := lifecycle.Build(context.Background(), expectedNetworkMode, []string{}, fakePhaseFactory) + it("configures the phase with binds", func() { + h.AssertSliceContains(t, configProvider.HostConfig().Binds, providedVolumes...) + }) + }) + + when("#ExtendBuild", func() { + var experimental bool + it.Before(func() { + experimental = true + err := lifecycle.ExtendBuild(context.Background(), fakeKanikoCache, fakePhaseFactory, experimental) h.AssertNil(t, err) lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 h.AssertNotEq(t, lastCallIndex, -1) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(expectedNetworkMode)) + configProvider = fakePhaseFactory.NewCalledWithProvider[lastCallIndex] + h.AssertEq(t, configProvider.Name(), "extender") }) - it("configures the phase with binds", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedBind := "some-mount-source:/some-mount-target" + it("creates a phase and then runs it", func() { + h.AssertEq(t, fakePhase.CleanupCallCount, 1) + h.AssertEq(t, fakePhase.RunCallCount, 1) + }) - err := lifecycle.Build(context.Background(), "test", []string{expectedBind}, fakePhaseFactory) - h.AssertNil(t, err) + it("configures the phase with the expected arguments", func() { + h.AssertSliceContainsInOrder(t, configProvider.ContainerConfig().Cmd, "-log-level", "debug") + h.AssertSliceContainsInOrder(t, configProvider.ContainerConfig().Cmd, "-app", "/workspace") + }) - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + it("configures the phase with binds", func() { + expectedBinds := providedVolumes + expectedBinds = append(expectedBinds, "some-kaniko-cache:/kaniko") - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBind) + h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBinds...) }) - }) - when("#Export", func() { - var ( - fakeBuildCache *fakes.FakeCache - fakeLaunchCache *fakes.FakeCache - ) + it("sets CNB_EXPERIMENTAL_MODE=warn in the environment", func() { + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_EXPERIMENTAL_MODE=warn") + }) - it.Before(func() { - fakeBuildCache = fakes.NewFakeCache() - fakeBuildCache.ReturnForType = cache.Volume - fakeBuildCache.ReturnForName = "some-cache" + it("configures the phase with the expected network mode", func() { + h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(providedNetworkMode)) + }) - fakeLaunchCache = fakes.NewFakeCache() - fakeLaunchCache.ReturnForType = cache.Volume - fakeLaunchCache.ReturnForName = "some-launch-cache" + it("configures the phase with root", func() { + h.AssertEq(t, configProvider.ContainerConfig().User, "root") }) - it("creates a phase and then runs it", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhase := &fakes.FakePhase{} - fakePhaseFactory := fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase)) + when("experimental is false", func() { + it.Before(func() { + experimental = false + err := lifecycle.ExtendBuild(context.Background(), fakeKanikoCache, fakePhaseFactory, experimental) + h.AssertNil(t, err) - err := lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) + lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 + h.AssertNotEq(t, lastCallIndex, -1) + + configProvider = fakePhaseFactory.NewCalledWithProvider[lastCallIndex] + h.AssertEq(t, configProvider.Name(), "extender") + }) + + it("CNB_EXPERIMENTAL_MODE=warn is not enable in the environment", func() { + h.AssertSliceNotContains(t, configProvider.ContainerConfig().Env, "CNB_EXPERIMENTAL_MODE=warn") + }) + }) + }) + + when("#ExtendRun", func() { + var experimental bool + it.Before(func() { + experimental = true + err := lifecycle.ExtendRun(context.Background(), fakeKanikoCache, fakePhaseFactory, "some-run-image", experimental) h.AssertNil(t, err) + lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 + h.AssertNotEq(t, lastCallIndex, -1) + + configProvider = fakePhaseFactory.NewCalledWithProvider[lastCallIndex] + h.AssertEq(t, configProvider.Name(), "extender") + }) + + it("creates a phase and then runs it", func() { h.AssertEq(t, fakePhase.CleanupCallCount, 1) h.AssertEq(t, fakePhase.RunCallCount, 1) }) + it("runs the phase with the run image", func() { + h.AssertEq(t, configProvider.ContainerConfig().Image, "some-run-image") + }) + it("configures the phase with the expected arguments", func() { - verboseLifecycle := newTestLifecycleExec(t, true) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedRepoName := "some-repo-name" - expectedRunImage := "some-run-image" + h.AssertSliceContainsInOrder(t, configProvider.ContainerConfig().Entrypoint, "") // the run image may have an entrypoint configured, override it + h.AssertSliceContainsInOrder(t, configProvider.ContainerConfig().Cmd, "-log-level", "debug") + h.AssertSliceContainsInOrder(t, configProvider.ContainerConfig().Cmd, "-app", "/workspace") + h.AssertSliceContainsInOrder(t, configProvider.ContainerConfig().Cmd, "-kind", "run") + }) + + it("configures the phase with binds", func() { + expectedBinds := providedVolumes + expectedBinds = append(expectedBinds, "some-kaniko-cache:/kaniko") + + h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBinds...) + }) + + it("sets CNB_EXPERIMENTAL_MODE=warn in the environment", func() { + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_EXPERIMENTAL_MODE=warn") + }) + + it("configures the phase with the expected network mode", func() { + h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(providedNetworkMode)) + }) + + it("configures the phase with root", func() { + h.AssertEq(t, configProvider.ContainerConfig().User, "root") + }) + + when("experimental is false", func() { + it.Before(func() { + experimental = false + err := lifecycle.ExtendRun(context.Background(), fakeKanikoCache, fakePhaseFactory, "some-run-image", experimental) + h.AssertNil(t, err) + + lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 + h.AssertNotEq(t, lastCallIndex, -1) + + configProvider = fakePhaseFactory.NewCalledWithProvider[lastCallIndex] + h.AssertEq(t, configProvider.Name(), "extender") + }) - err := verboseLifecycle.Export(context.Background(), expectedRepoName, expectedRunImage, false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) + it("CNB_EXPERIMENTAL_MODE=warn is not enable in the environment", func() { + h.AssertSliceNotContains(t, configProvider.ContainerConfig().Env, "CNB_EXPERIMENTAL_MODE=warn") + }) + }) + }) + + when("#Export", func() { + it.Before(func() { + err := lifecycle.Export(context.Background(), fakeBuildCache, fakeLaunchCache, fakeKanikoCache, fakePhaseFactory) h.AssertNil(t, err) lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 h.AssertNotEq(t, lastCallIndex, -1) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] + configProvider = fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertEq(t, configProvider.Name(), "exporter") + }) + + it("creates a phase and then runs it", func() { + h.AssertEq(t, fakePhase.CleanupCallCount, 1) + h.AssertEq(t, fakePhase.RunCallCount, 1) + }) + + it("configures the phase with the expected arguments", func() { h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-log-level", "debug"}, []string{"-cache-dir", "/cache"}, - []string{"-run-image", expectedRunImage}, - []string{expectedRepoName}, + []string{"-run-image", providedRunImage}, + []string{"-stack", "/layers/stack.toml"}, + []string{providedTargetImage}, ) + h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-run") }) - when("additional tags are specified", func() { - it("passes tag arguments to the exporter", func() { - verboseLifecycle := newTestLifecycleExec(t, true) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedRepoName := "some-repo-name" - expectedRunImage := "some-run-image" - additionalTags := []string{"additional-tag-1", "additional-tag-2"} + when("platform >= 0.12", func() { + platformAPI = api.MustParse("0.12") - err := verboseLifecycle.Export(context.Background(), expectedRepoName, expectedRunImage, false, "", "test", fakes.NewFakeCache(), fakes.NewFakeCache(), additionalTags, fakePhaseFactory) - h.AssertNil(t, err) + it("provides -run instead of -stack", func() { + h.AssertIncludeAllExpectedPatterns(t, + configProvider.ContainerConfig().Cmd, + []string{"-run", "/layers/run.toml"}, + ) + h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-stack") + }) - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + when("there are extensions", func() { + providedOrderExt = dist.Order{dist.OrderEntry{Group: []dist.ModuleRef{ /* don't care */ }}} + + when("for run", func() { + extensionsForRun = true + + it("sets CNB_EXPERIMENTAL_MODE=warn in the environment", func() { + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_EXPERIMENTAL_MODE=warn") + }) + + it("configures the phase with binds", func() { + expectedBinds := []string{"some-cache:/cache", "some-launch-cache:/launch-cache", "some-kaniko-cache:/kaniko"} + + h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBinds...) + }) + }) + }) + + when("layout is true", func() { + providedLayout = true + + it("it configures the phase with access to provided volumes", func() { + // this is required to read the /layout-repo + h.AssertSliceContains(t, configProvider.HostConfig().Binds, providedVolumes...) + }) + + it("configures the phase with the expected environment variables", func() { + layoutDir := filepath.Join(paths.RootDir, "layout-repo") + h.AssertSliceContains(t, + configProvider.ContainerConfig().Env, "CNB_USE_LAYOUT=true", fmt.Sprintf("CNB_LAYOUT_DIR=%s", layoutDir), + ) + }) + }) + }) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "exporter") + when("additional tags are specified", func() { + it("passes tag arguments to the exporter", func() { h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-log-level", "debug"}, []string{"-cache-dir", "/cache"}, - []string{"-run-image", expectedRunImage}, - []string{expectedRepoName, additionalTags[0], additionalTags[1]}, + []string{"-run-image", providedRunImage}, + []string{providedTargetImage, providedAdditionalTags[0], providedAdditionalTags[1]}, ) }) }) when("platform >= 0.7", func() { - it("doesn't hint at default process type", func() { - expectedRepoName := "some-repo-name" - expectedRunImage := "some-run-image" - additionalTags := []string{"additional-tag-1", "additional-tag-2"} - fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.7")})) - h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder)) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err = lifecycle.Export(context.Background(), expectedRepoName, expectedRunImage, false, "", "test", fakes.NewFakeCache(), fakes.NewFakeCache(), additionalTags, fakePhaseFactory) - h.AssertNil(t, err) + platformAPI = api.MustParse("0.7") - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "exporter") + it("doesn't hint at default process type", func() { h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-log-level", "debug"}, []string{"-cache-dir", "/cache"}, - []string{expectedRepoName, additionalTags[0], additionalTags[1]}, + []string{providedTargetImage, providedAdditionalTags[0], providedAdditionalTags[1]}, ) h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-run-image") }) }) when("using cache image", func() { - it.Before(func() { - fakeBuildCache.ReturnForType = cache.Image - fakeBuildCache.ReturnForName = "some-cache-image" - }) + fakeBuildCache = newFakeImageCache() it("configures phase with cache image", func() { - verboseLifecycle := newTestLifecycleExec(t, true) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedRepoName := "some-repo-name" - expectedRunImage := "some-run-image" - - err := verboseLifecycle.Export(context.Background(), expectedRepoName, expectedRunImage, false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "exporter") - h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, ":/cache") h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, @@ -2111,155 +2343,66 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { }) when("publish", func() { - it("runs the phase with the lifecycle image", func() { - lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { - options.LifecycleImage = "some-lifecycle-image" - }) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := lifecycle.Export(context.Background(), "test", "test", true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + providedPublish = true - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.ContainerConfig().Image, "some-lifecycle-image") - }) - - it("sets the CNB_USER_ID and CNB_GROUP_ID in the environment", func() { - fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithUID(2222), fakes.WithGID(3333)) - h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, false, fakes.WithBuilder(fakeBuilder)) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err = lifecycle.Export(context.Background(), "test", "test", true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) + when("lifecycle image", func() { + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.LifecycleImage = "some-lifecycle-image" + }) - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + it("runs the phase with the lifecycle image", func() { + h.AssertEq(t, configProvider.ContainerConfig().Image, "some-lifecycle-image") + }) + }) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_USER_ID=2222") - h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_GROUP_ID=3333") + it("sets the CNB_USER_ID and CNB_GROUP_ID in the environment", func() { + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, fmt.Sprintf("CNB_USER_ID=%d", providedUID)) + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, fmt.Sprintf("CNB_GROUP_ID=%d", providedGID)) }) it("configures the phase with registry access", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedRepos := []string{"some-repo-name", "some-run-image"} - - err := lifecycle.Export(context.Background(), expectedRepos[0], expectedRepos[1], true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_REGISTRY_AUTH={}") }) it("configures the phase with root", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := lifecycle.Export(context.Background(), "test", "test", true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertEq(t, configProvider.ContainerConfig().User, "root") }) it("configures the phase with the expected network mode", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedNetworkMode := "some-network-mode" - - err := lifecycle.Export(context.Background(), "test", "test", true, "", expectedNetworkMode, fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(expectedNetworkMode)) + h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(providedNetworkMode)) }) it("configures the phase with binds", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() expectedBind := "some-cache:/cache" - err := lifecycle.Export(context.Background(), "test", "test", true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBind) }) it("configures the phase to write stack toml", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedBinds := []string{"some-cache:/cache", "some-launch-cache:/launch-cache"} - - err := lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] + expectedBinds := []string{"some-cache:/cache"} h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBinds...) - h.AssertEq(t, len(configProvider.ContainerOps()), 2) + h.AssertEq(t, len(configProvider.ContainerOps()), 3) h.AssertFunctionName(t, configProvider.ContainerOps()[0], "WriteStackToml") - h.AssertFunctionName(t, configProvider.ContainerOps()[1], "WriteProjectMetadata") + h.AssertFunctionName(t, configProvider.ContainerOps()[1], "WriteRunToml") + h.AssertFunctionName(t, configProvider.ContainerOps()[2], "WriteProjectMetadata") }) - it("configures the phase with default process type", func() { - lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { + when("default process type", func() { + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { options.DefaultProcessType = "test-process" }) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedDefaultProc := []string{"-process-type", "test-process"} - - err := lifecycle.Export(context.Background(), "test", "test", true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, expectedDefaultProc) + it("configures the phase with default process type", func() { + expectedDefaultProc := []string{"-process-type", "test-process"} + h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, expectedDefaultProc) + }) }) when("using cache image and publishing", func() { - it.Before(func() { - fakeBuildCache.ReturnForType = cache.Image - fakeBuildCache.ReturnForName = "some-cache-image" - }) + fakeBuildCache = newFakeImageCache() it("configures phase with cache image", func() { - verboseLifecycle := newTestLifecycleExec(t, true) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedRepoName := "some-repo-name" - expectedRunImage := "some-run-image" - - err := verboseLifecycle.Export(context.Background(), expectedRepoName, expectedRunImage, true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "exporter") - h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, ":/cache") h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, @@ -2269,56 +2412,26 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { }) when("platform 0.3", func() { - it("doesn't hint at default process type", func() { - fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.3")})) - h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder)) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err = lifecycle.Export(context.Background(), "test", "test", true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) + platformAPI = api.MustParse("0.3") - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] + it("doesn't hint at default process type", func() { h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-process-type") }) }) when("platform 0.4", func() { - it("hints at default process type", func() { - fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.4")})) - h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder)) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err = lifecycle.Export(context.Background(), "test", "test", true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + platformAPI = api.MustParse("0.4") - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] + it("hints at default process type", func() { h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-process-type", "web"}) }) }) when("platform >= 0.6", func() { + platformAPI = api.MustParse("0.6") + when("no user provided process type is present", func() { it("doesn't provide 'web' as default process type", func() { - fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.6")})) - h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder)) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err = lifecycle.Export(context.Background(), "test", "test", true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-process-type") }) }) @@ -2326,81 +2439,36 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { }) when("publish is false", func() { - it("runs the phase with the lifecycle image", func() { - lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { + when("lifecycle image", func() { + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { options.LifecycleImage = "some-lifecycle-image" }) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.ContainerConfig().Image, "some-lifecycle-image") + it("runs the phase with the lifecycle image", func() { + h.AssertEq(t, configProvider.ContainerConfig().Image, "some-lifecycle-image") + }) }) it("sets the CNB_USER_ID and CNB_GROUP_ID in the environment", func() { - fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithUID(2222), fakes.WithGID(3333)) - h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, false, fakes.WithBuilder(fakeBuilder)) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err = lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_USER_ID=2222") - h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_GROUP_ID=3333") + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, fmt.Sprintf("CNB_USER_ID=%d", providedUID)) + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, fmt.Sprintf("CNB_GROUP_ID=%d", providedGID)) }) it("configures the phase with daemon access", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertEq(t, configProvider.ContainerConfig().User, "root") h.AssertSliceContains(t, configProvider.HostConfig().Binds, "/var/run/docker.sock:/var/run/docker.sock") }) - it("configures the phase with daemon access with tcp docker-host", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := lifecycle.Export(context.Background(), "test", "test", false, "tcp://localhost:1234", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + when("tcp docker-host", func() { + providedDockerHost = `tcp://localhost:1234` - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, "/var/run/docker.sock:/var/run/docker.sock") - h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "DOCKER_HOST=tcp://localhost:1234") + it("configures the phase with daemon access with tcp docker-host", func() { + h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, "/var/run/docker.sock:/var/run/docker.sock") + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "DOCKER_HOST=tcp://localhost:1234") + }) }) it("configures the phase with the expected arguments", func() { - verboseLifecycle := newTestLifecycleExec(t, true) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err := verboseLifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.Name(), "exporter") h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-daemon"}, @@ -2409,220 +2477,249 @@ func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) { }) it("configures the phase with the expected network mode", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedNetworkMode := "some-network-mode" - - err := lifecycle.Export(context.Background(), "test", "test", false, "", expectedNetworkMode, fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(expectedNetworkMode)) + h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(providedNetworkMode)) }) it("configures the phase with binds", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() expectedBinds := []string{"some-cache:/cache", "some-launch-cache:/launch-cache"} - err := lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBinds...) }) it("configures the phase to write stack toml", func() { - lifecycle := newTestLifecycleExec(t, false) - fakePhaseFactory := fakes.NewFakePhaseFactory() expectedBinds := []string{"some-cache:/cache", "some-launch-cache:/launch-cache"} - - err := lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBinds...) - h.AssertEq(t, len(configProvider.ContainerOps()), 2) + h.AssertEq(t, len(configProvider.ContainerOps()), 3) h.AssertFunctionName(t, configProvider.ContainerOps()[0], "WriteStackToml") - h.AssertFunctionName(t, configProvider.ContainerOps()[1], "WriteProjectMetadata") - }) - - it("configures the phase with default process type", func() { - lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { - options.DefaultProcessType = "test-process" - }) - fakePhaseFactory := fakes.NewFakePhaseFactory() - expectedDefaultProc := []string{"-process-type", "test-process"} - - err := lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, expectedDefaultProc) + h.AssertFunctionName(t, configProvider.ContainerOps()[1], "WriteRunToml") + h.AssertFunctionName(t, configProvider.ContainerOps()[2], "WriteProjectMetadata") }) - when("platform 0.3", func() { - it("doesn't hint at default process type", func() { - fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.3")})) - h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder)) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err = lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + when("default process type", func() { + when("provided", func() { + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.DefaultProcessType = "test-process" + }) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-process-type") + it("configures the phase with default process type", func() { + expectedDefaultProc := []string{"-process-type", "test-process"} + h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, expectedDefaultProc) + }) }) - }) - - when("platform 0.4", func() { - it("hints at default process type", func() { - fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.4")})) - h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder)) - fakePhaseFactory := fakes.NewFakePhaseFactory() - - err = lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + when("platform 0.3", func() { + platformAPI = api.MustParse("0.3") - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-process-type", "web"}) + it("doesn't hint at default process type", func() { + h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-process-type") + }) }) - }) - when("platform >= 0.6", func() { - when("no user provided process type is present", func() { - it("doesn't provide 'web' as default process type", func() { - fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.6")})) - h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder)) - fakePhaseFactory := fakes.NewFakePhaseFactory() + when("platform 0.4", func() { + platformAPI = api.MustParse("0.4") - err = lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) + it("hints at default process type", func() { + h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-process-type", "web"}) + }) + }) - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) + when("platform >= 0.6", func() { + platformAPI = api.MustParse("0.6") - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] - h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-process-type") + when("no user provided process type is present", func() { + it("doesn't provide 'web' as default process type", func() { + h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-process-type") + }) }) }) }) }) when("override GID", func() { - var ( - lifecycle *build.LifecycleExecution - fakePhaseFactory *fakes.FakePhaseFactory - ) - fakePhase := &fakes.FakePhase{} - fakePhaseFactory = fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase)) - when("override GID is provided", func() { - it.Before(func() { - lifecycle = newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { - options.GID = 2 - }) + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.GID = 2 }) + it("configures the phase with the expected arguments", func() { - err := lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-gid", "2"}, ) }) }) + when("override GID is not provided", func() { - it.Before(func() { - lifecycle = newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) { - options.GID = -1 - }) + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.GID = -1 }) + it("gid is not added to the expected arguments", func() { - err := lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) - lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1 - h.AssertNotEq(t, lastCallIndex, -1) - configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex] h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-gid") }) }) }) - when("interactive mode", func() { - it("provides the termui readLayersFunc as a post container operation", func() { - lifecycle := newTestLifecycleExec(t, false, func(opts *build.LifecycleOptions) { - opts.Interactive = true - opts.Termui = &fakes.FakeTermui{ReadLayersFunc: func(_ io.ReadCloser) { - // no-op - }} + when("override UID", func() { + when("override UID is provided", func() { + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.UID = 1001 }) - fakePhase := &fakes.FakePhase{} - fakePhaseFactory := fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase)) - err := lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory) - h.AssertNil(t, err) + it("configures the phase with the expected arguments", func() { + h.AssertIncludeAllExpectedPatterns(t, + configProvider.ContainerConfig().Cmd, + []string{"-uid", "1001"}, + ) + }) + }) - h.AssertEq(t, fakePhase.CleanupCallCount, 1) - h.AssertEq(t, fakePhase.RunCallCount, 1) + when("override UID is not provided", func() { + lifecycleOps = append(lifecycleOps, func(options *build.LifecycleOptions) { + options.UID = -1 + }) + + it("uid is not added to the expected arguments", func() { + h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-uid") + }) + }) + }) + + when("interactive mode", func() { + lifecycleOps = append(lifecycleOps, func(opts *build.LifecycleOptions) { + opts.Interactive = true + opts.Termui = &fakes.FakeTermui{ReadLayersFunc: func(_ io.ReadCloser) { + // no-op + }} + }) - provider := fakePhaseFactory.NewCalledWithProvider[0] - h.AssertEq(t, len(provider.PostContainerRunOps()), 2) - h.AssertFunctionName(t, provider.PostContainerRunOps()[0], "EnsureVolumeAccess") - h.AssertFunctionName(t, provider.PostContainerRunOps()[1], "CopyOut") + it("provides the termui readLayersFunc as a post container operation", func() { + h.AssertEq(t, len(configProvider.PostContainerRunOps()), 2) + h.AssertFunctionName(t, configProvider.PostContainerRunOps()[0], "EnsureVolumeAccess") + h.AssertFunctionName(t, configProvider.PostContainerRunOps()[1], "CopyOut") }) }) when("sbom destination directory is provided", func() { + lifecycleOps = append(lifecycleOps, func(opts *build.LifecycleOptions) { + opts.SBOMDestinationDir = "some-destination-dir" + }) + + it("provides copy-sbom-func as a post container operation", func() { + h.AssertEq(t, len(configProvider.PostContainerRunOps()), 2) + h.AssertFunctionName(t, configProvider.PostContainerRunOps()[0], "EnsureVolumeAccess") + h.AssertFunctionName(t, configProvider.PostContainerRunOps()[1], "CopyOut") + }) + }) + + when("report destination directory is provided", func() { + lifecycleOps = append(lifecycleOps, func(opts *build.LifecycleOptions) { + opts.ReportDestinationDir = "a-destination-dir" + }) + it("provides copy-sbom-func as a post container operation", func() { - lifecycle := newTestLifecycleExec(t, false, func(opts *build.LifecycleOptions) { - opts.SBOMDestinationDir = "some-destination-dir" + h.AssertEq(t, len(configProvider.PostContainerRunOps()), 2) + h.AssertFunctionName(t, configProvider.PostContainerRunOps()[0], "EnsureVolumeAccess") + h.AssertFunctionName(t, configProvider.PostContainerRunOps()[1], "CopyOut") + }) + }) + + when("--creation-time", func() { + when("platform < 0.9", func() { + platformAPI = api.MustParse("0.8") + + intTime, _ := strconv.ParseInt("1234567890", 10, 64) + providedTime := time.Unix(intTime, 0).UTC() + + lifecycleOps = append(lifecycleOps, func(baseOpts *build.LifecycleOptions) { + baseOpts.CreationTime = &providedTime }) - fakePhase := &fakes.FakePhase{} - fakePhaseFactory := fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase)) - err := lifecycle.Create(context.Background(), false, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory) - h.AssertNil(t, err) + it("is ignored", func() { + h.AssertSliceNotContains(t, configProvider.ContainerConfig().Env, "SOURCE_DATE_EPOCH=1234567890") + }) + }) - h.AssertEq(t, fakePhase.CleanupCallCount, 1) - h.AssertEq(t, fakePhase.RunCallCount, 1) + when("platform >= 0.9", func() { + platformAPI = api.MustParse("0.9") + + when("provided", func() { + intTime, _ := strconv.ParseInt("1234567890", 10, 64) + providedTime := time.Unix(intTime, 0).UTC() + + lifecycleOps = append(lifecycleOps, func(baseOpts *build.LifecycleOptions) { + baseOpts.CreationTime = &providedTime + }) + + it("configures the phase with env SOURCE_DATE_EPOCH", func() { + h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "SOURCE_DATE_EPOCH=1234567890") + }) + }) - provider := fakePhaseFactory.NewCalledWithProvider[0] - h.AssertEq(t, len(provider.PostContainerRunOps()), 2) - h.AssertFunctionName(t, provider.PostContainerRunOps()[0], "EnsureVolumeAccess") - h.AssertFunctionName(t, provider.PostContainerRunOps()[1], "CopyOut") + when("not provided", func() { + it("does not panic", func() { + // no-op + }) + }) }) }) }) } -func newTestLifecycleExecErr(t *testing.T, logVerbose bool, ops ...func(*build.LifecycleOptions)) (*build.LifecycleExecution, error) { - docker, err := client.NewClientWithOpts(client.FromEnv, client.WithVersion("1.38")) +func newFakeVolumeCache() *fakes.FakeCache { + c := fakes.NewFakeCache() + c.ReturnForType = cache.Volume + c.ReturnForName = "some-cache" + return c +} + +func newFakeImageCache() *fakes.FakeCache { + c := fakes.NewFakeCache() + c.ReturnForType = cache.Image + c.ReturnForName = "some-cache-image" + return c +} + +func newFakeFetchRunImageFunc(f *fakeImageFetcher) func(name string) (string, error) { + return func(name string) (string, error) { + return fmt.Sprintf("ephemeral-%s", name), f.fetchRunImage(name) + } +} + +type fakeImageFetcher struct { + callCount int + calledWithArgAtCall map[int]string +} + +func (f *fakeImageFetcher) fetchRunImage(name string) error { + f.calledWithArgAtCall[f.callCount] = name + f.callCount++ + return nil +} + +type fakeDockerClient struct { + nNetworks int + build.DockerClient +} + +func (f *fakeDockerClient) NetworkList(ctx context.Context, opts client.NetworkListOptions) (client.NetworkListResult, error) { + ret := make([]network.Summary, f.nNetworks) + return client.NetworkListResult{Items: ret}, nil +} + +func (f *fakeDockerClient) NetworkCreate(ctx context.Context, name string, options client.NetworkCreateOptions) (client.NetworkCreateResult, error) { + f.nNetworks++ + return client.NetworkCreateResult{}, nil +} + +func (f *fakeDockerClient) NetworkRemove(ctx context.Context, network string, options client.NetworkRemoveOptions) (client.NetworkRemoveResult, error) { + f.nNetworks-- + return client.NetworkRemoveResult{}, nil +} + +func newTestLifecycleExecErr(t *testing.T, logVerbose bool, tmpDir string, ops ...func(*build.LifecycleOptions)) (*build.LifecycleExecution, error) { + docker, err := client.New(client.FromEnv) h.AssertNil(t, err) var outBuf bytes.Buffer @@ -2647,13 +2744,13 @@ func newTestLifecycleExecErr(t *testing.T, logVerbose bool, ops ...func(*build.L op(&opts) } - return build.NewLifecycleExecution(logger, docker, opts) + return build.NewLifecycleExecution(logger, docker, tmpDir, opts) } -func newTestLifecycleExec(t *testing.T, logVerbose bool, ops ...func(*build.LifecycleOptions)) *build.LifecycleExecution { +func newTestLifecycleExec(t *testing.T, logVerbose bool, tmpDir string, ops ...func(*build.LifecycleOptions)) *build.LifecycleExecution { t.Helper() - lifecycleExec, err := newTestLifecycleExecErr(t, logVerbose, ops...) + lifecycleExec, err := newTestLifecycleExecErr(t, logVerbose, tmpDir, ops...) h.AssertNil(t, err) return lifecycleExec } diff --git a/internal/build/lifecycle_executor.go b/internal/build/lifecycle_executor.go index b0c5973b9b..c15dcc5a78 100644 --- a/internal/build/lifecycle_executor.go +++ b/internal/build/lifecycle_executor.go @@ -3,18 +3,19 @@ package build import ( "context" "io" - "math/rand" + "os" "time" "github.com/buildpacks/imgutil" "github.com/buildpacks/lifecycle/api" - "github.com/buildpacks/lifecycle/platform" - "github.com/docker/docker/client" + "github.com/buildpacks/lifecycle/platform/files" + "github.com/google/go-containerregistry/pkg/authn" "github.com/google/go-containerregistry/pkg/name" "github.com/buildpacks/pack/internal/builder" - "github.com/buildpacks/pack/internal/cache" "github.com/buildpacks/pack/internal/container" + "github.com/buildpacks/pack/pkg/cache" + "github.com/buildpacks/pack/pkg/dist" "github.com/buildpacks/pack/pkg/logging" ) @@ -27,6 +28,13 @@ var ( api.MustParse("0.6"), api.MustParse("0.7"), api.MustParse("0.8"), + api.MustParse("0.9"), + api.MustParse("0.10"), + api.MustParse("0.11"), + api.MustParse("0.12"), + api.MustParse("0.13"), + api.MustParse("0.14"), + api.MustParse("0.15"), } ) @@ -36,12 +44,15 @@ type Builder interface { GID() int LifecycleDescriptor() builder.LifecycleDescriptor Stack() builder.StackMetadata + RunImages() []builder.RunImageMetadata Image() imgutil.Image + OrderExtensions() dist.Order + System() dist.System } type LifecycleExecutor struct { logger logging.Logger - docker client.CommonAPIClient + docker DockerClient } type Cache interface { @@ -58,45 +69,59 @@ type Termui interface { ReadLayers(reader io.ReadCloser) error } -func init() { - rand.Seed(time.Now().UTC().UnixNano()) -} - type LifecycleOptions struct { - AppPath string - Image name.Reference - Builder Builder - LifecycleImage string - RunImage string - ProjectMetadata platform.ProjectMetadata - ClearCache bool - Publish bool - TrustBuilder bool - UseCreator bool - Interactive bool - Termui Termui - DockerHost string - CacheImage string - HTTPProxy string - HTTPSProxy string - NoProxy string - Network string - AdditionalTags []string - Volumes []string - DefaultProcessType string - FileFilter func(string) bool - Workspace string - GID int - PreviousImage string - SBOMDestinationDir string + AppPath string + Image name.Reference + Builder Builder + BuilderImage string // differs from Builder.Name() and Builder.Image().Name() in that it includes the registry context + LifecycleImage string + LifecycleApis []string // optional - populated only if custom lifecycle image is downloaded, from that lifecycle image's labels. + RunImage string + FetchRunImageWithLifecycleLayer func(name string) (string, error) + ProjectMetadata files.ProjectMetadata + ClearCache bool + Publish bool + TrustBuilder bool + UseCreator bool + UseCreatorWithExtensions bool + Interactive bool + Layout bool + Termui Termui + DockerHost string + Cache cache.CacheOpts + ExecutionEnvironment string + CacheImage string + HTTPProxy string + HTTPSProxy string + NoProxy string + Network string + AdditionalTags []string + Volumes []string + InsecureRegistries []string + DefaultProcessType string + FileFilter func(string) bool + Workspace string + GID int + UID int + PreviousImage string + ReportDestinationDir string + SBOMDestinationDir string + CreationTime *time.Time + Keychain authn.Keychain + EnableUsernsHost bool } -func NewLifecycleExecutor(logger logging.Logger, docker client.CommonAPIClient) *LifecycleExecutor { +func NewLifecycleExecutor(logger logging.Logger, docker DockerClient) *LifecycleExecutor { return &LifecycleExecutor{logger: logger, docker: docker} } func (l *LifecycleExecutor) Execute(ctx context.Context, opts LifecycleOptions) error { - lifecycleExec, err := NewLifecycleExecution(l.logger, l.docker, opts) + tmpDir, err := os.MkdirTemp("", "pack.tmp") + if err != nil { + return err + } + + lifecycleExec, err := NewLifecycleExecution(l.logger, l.docker, tmpDir, opts) if err != nil { return err } diff --git a/internal/build/mount_paths.go b/internal/build/mount_paths.go index 133919ee7f..ad4bd91136 100644 --- a/internal/build/mount_paths.go +++ b/internal/build/mount_paths.go @@ -38,10 +38,18 @@ func (m mountPaths) stackPath() string { return m.join(m.layersDir(), "stack.toml") } +func (m mountPaths) runPath() string { + return m.join(m.layersDir(), "run.toml") +} + func (m mountPaths) projectPath() string { return m.join(m.layersDir(), "project-metadata.toml") } +func (m mountPaths) reportPath() string { + return m.join(m.layersDir(), "report.toml") +} + func (m mountPaths) appDirName() string { return m.workspace } @@ -54,6 +62,10 @@ func (m mountPaths) cacheDir() string { return m.join(m.volume, "cache") } +func (m mountPaths) kanikoCacheDir() string { + return m.join(m.volume, "kaniko") +} + func (m mountPaths) launchCacheDir() string { return m.join(m.volume, "launch-cache") } diff --git a/internal/build/phase.go b/internal/build/phase.go index f1930d8cc1..613fad653e 100644 --- a/internal/build/phase.go +++ b/internal/build/phase.go @@ -4,9 +4,8 @@ import ( "context" "io" - "github.com/docker/docker/api/types" - dcontainer "github.com/docker/docker/api/types/container" - "github.com/docker/docker/client" + dcontainer "github.com/moby/moby/api/types/container" + "github.com/moby/moby/client" "github.com/pkg/errors" "github.com/buildpacks/pack/internal/container" @@ -16,11 +15,11 @@ type Phase struct { name string infoWriter io.Writer errorWriter io.Writer - docker client.CommonAPIClient + docker DockerClient handler container.Handler ctrConf *dcontainer.Config hostConf *dcontainer.HostConfig - ctr dcontainer.ContainerCreateCreatedBody + ctr client.ContainerCreateResult uid, gid int appPath string containerOps []ContainerOperation @@ -30,7 +29,10 @@ type Phase struct { func (p *Phase) Run(ctx context.Context) error { var err error - p.ctr, err = p.docker.ContainerCreate(ctx, p.ctrConf, p.hostConf, nil, nil, "") + p.ctr, err = p.docker.ContainerCreate(ctx, client.ContainerCreateOptions{ + Config: p.ctrConf, + HostConfig: p.hostConf, + }) if err != nil { return errors.Wrapf(err, "failed to create '%s' container", p.name) } @@ -65,5 +67,6 @@ func (p *Phase) Run(ctx context.Context) error { } func (p *Phase) Cleanup() error { - return p.docker.ContainerRemove(context.Background(), p.ctr.ID, types.ContainerRemoveOptions{Force: true}) + _, err := p.docker.ContainerRemove(context.Background(), p.ctr.ID, client.ContainerRemoveOptions{Force: true}) + return err } diff --git a/internal/build/phase_config_provider.go b/internal/build/phase_config_provider.go index be248ccec6..36e452931e 100644 --- a/internal/build/phase_config_provider.go +++ b/internal/build/phase_config_provider.go @@ -6,7 +6,7 @@ import ( "os" "strings" - "github.com/docker/docker/api/types/container" + "github.com/moby/moby/api/types/container" pcontainer "github.com/buildpacks/pack/internal/container" "github.com/buildpacks/pack/internal/style" @@ -17,6 +17,7 @@ const ( linuxContainerAdmin = "root" windowsContainerAdmin = "ContainerAdministrator" platformAPIEnvVar = "CNB_PLATFORM_API" + executionEnvVar = "CNB_EXEC_ENV" ) type PhaseConfigProviderOperation func(*PhaseConfigProvider) @@ -34,9 +35,16 @@ type PhaseConfigProvider struct { } func NewPhaseConfigProvider(name string, lifecycleExec *LifecycleExecution, ops ...PhaseConfigProviderOperation) *PhaseConfigProvider { + hostConf := new(container.HostConfig) + if lifecycleExec.opts.EnableUsernsHost { + hostConf.UsernsMode = "host" + } + if lifecycleExec.os != "windows" { + hostConf.SecurityOpt = []string{"no-new-privileges=true"} + } provider := &PhaseConfigProvider{ ctrConf: new(container.Config), - hostConf: new(container.HostConfig), + hostConf: hostConf, name: name, os: lifecycleExec.os, infoWriter: logging.GetWriterForLevel(lifecycleExec.logger, logging.InfoLevel), @@ -52,6 +60,7 @@ func NewPhaseConfigProvider(name string, lifecycleExec *LifecycleExecution, ops ops = append(ops, WithEnv(fmt.Sprintf("%s=%s", platformAPIEnvVar, lifecycleExec.platformAPI.String())), + If(lifecycleExec.platformAPI.AtLeast("0.15"), WithEnv(fmt.Sprintf("%s=%s", executionEnvVar, lifecycleExec.opts.ExecutionEnvironment))), WithLifecycleProxy(lifecycleExec), WithBinds([]string{ fmt.Sprintf("%s:%s", lifecycleExec.layersVolume, lifecycleExec.mountPaths.layersDir()), @@ -63,12 +72,13 @@ func NewPhaseConfigProvider(name string, lifecycleExec *LifecycleExecution, ops op(provider) } + provider.ctrConf.Entrypoint = []string{""} // override entrypoint in case it is set provider.ctrConf.Cmd = append([]string{"/cnb/lifecycle/" + name}, provider.ctrConf.Cmd...) - lifecycleExec.logger.Debugf("Running the %s on OS %s with:", style.Symbol(provider.Name()), style.Symbol(provider.os)) + lifecycleExec.logger.Debugf("Running the %s on OS %s from image %s with:", style.Symbol(provider.Name()), style.Symbol(provider.os), style.Symbol(provider.ctrConf.Image)) lifecycleExec.logger.Debug("Container Settings:") lifecycleExec.logger.Debugf(" Args: %s", style.Symbol(strings.Join(provider.ctrConf.Cmd, " "))) - lifecycleExec.logger.Debugf(" System Envs: %s", style.Symbol(strings.Join(provider.ctrConf.Env, " "))) + lifecycleExec.logger.Debugf(" System Envs: %s", style.Symbol(strings.Join(sanitized(provider.ctrConf.Env), " "))) lifecycleExec.logger.Debugf(" Image: %s", style.Symbol(provider.ctrConf.Image)) lifecycleExec.logger.Debugf(" User: %s", style.Symbol(provider.ctrConf.User)) lifecycleExec.logger.Debugf(" Labels: %s", style.Symbol(fmt.Sprintf("%s", provider.ctrConf.Labels))) @@ -84,6 +94,18 @@ func NewPhaseConfigProvider(name string, lifecycleExec *LifecycleExecution, ops return provider } +func sanitized(origEnv []string) []string { + var sanitizedEnv []string + for _, env := range origEnv { + if strings.HasPrefix(env, "CNB_REGISTRY_AUTH") { + sanitizedEnv = append(sanitizedEnv, "CNB_REGISTRY_AUTH=") + continue + } + sanitizedEnv = append(sanitizedEnv, env) + } + return sanitizedEnv +} + func (p *PhaseConfigProvider) ContainerConfig() *container.Config { return p.ctrConf } diff --git a/internal/build/phase_config_provider_test.go b/internal/build/phase_config_provider_test.go index 6339dd248a..b204935e87 100644 --- a/internal/build/phase_config_provider_test.go +++ b/internal/build/phase_config_provider_test.go @@ -3,15 +3,13 @@ package build_test import ( "bytes" "io" - "math/rand" "testing" - "time" ifakes "github.com/buildpacks/imgutil/fakes" - "github.com/docker/docker/api/types/container" - "github.com/docker/docker/api/types/strslice" - "github.com/docker/docker/client" + "github.com/buildpacks/lifecycle/api" "github.com/heroku/color" + dcontainer "github.com/moby/moby/api/types/container" + "github.com/moby/moby/client" "github.com/pkg/errors" "github.com/sclevine/spec" "github.com/sclevine/spec/report" @@ -23,8 +21,6 @@ import ( ) func TestPhaseConfigProvider(t *testing.T) { - rand.Seed(time.Now().UTC().UnixNano()) - color.Disable(true) defer color.Disable(false) @@ -37,9 +33,9 @@ func testPhaseConfigProvider(t *testing.T, when spec.G, it spec.S) { expectedBuilderImage := ifakes.NewImage("some-builder-name", "", nil) fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithImage(expectedBuilderImage)) h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, false, fakes.WithBuilder(fakeBuilder)) + lifecycle := newTestLifecycleExec(t, false, "some-temp-dir", fakes.WithBuilder(fakeBuilder)) expectedPhaseName := "some-name" - expectedCmd := strslice.StrSlice{"/cnb/lifecycle/" + expectedPhaseName} + expectedCmd := []string{"/cnb/lifecycle/" + expectedPhaseName} phaseConfigProvider := build.NewPhaseConfigProvider(expectedPhaseName, lifecycle) @@ -62,7 +58,23 @@ func testPhaseConfigProvider(t *testing.T, when spec.G, it spec.S) { h.AssertSliceContainsMatch(t, phaseConfigProvider.HostConfig().Binds, "pack-layers-.*:/layers") h.AssertSliceContainsMatch(t, phaseConfigProvider.HostConfig().Binds, "pack-app-.*:/workspace") - h.AssertEq(t, phaseConfigProvider.HostConfig().Isolation, container.IsolationEmpty) + h.AssertEq(t, phaseConfigProvider.HostConfig().Isolation, dcontainer.IsolationEmpty) + h.AssertEq(t, phaseConfigProvider.HostConfig().UsernsMode, dcontainer.UsernsMode("")) + h.AssertSliceContains(t, phaseConfigProvider.HostConfig().SecurityOpt, "no-new-privileges=true") + }) + + when("userns-host is enabled", func() { + it("sets user namespace mode to host", func() { + expectedBuilderImage := ifakes.NewImage("some-builder-name", "", nil) + fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithImage(expectedBuilderImage)) + h.AssertNil(t, err) + lifecycle := newTestLifecycleExec(t, false, "some-temp-dir", fakes.WithBuilder(fakeBuilder), fakes.WithEnableUsernsHost()) + expectedPhaseName := "some-name" + + phaseConfigProvider := build.NewPhaseConfigProvider(expectedPhaseName, lifecycle) + + h.AssertEq(t, phaseConfigProvider.HostConfig().UsernsMode, dcontainer.UsernsMode("host")) + }) }) when("building for Windows", func() { @@ -71,22 +83,23 @@ func testPhaseConfigProvider(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, fakeBuilderImage.SetOS("windows")) fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithImage(fakeBuilderImage)) h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, false, fakes.WithBuilder(fakeBuilder)) + lifecycle := newTestLifecycleExec(t, false, "some-temp-dir", fakes.WithBuilder(fakeBuilder)) phaseConfigProvider := build.NewPhaseConfigProvider("some-name", lifecycle) - h.AssertEq(t, phaseConfigProvider.HostConfig().Isolation, container.IsolationProcess) + h.AssertEq(t, phaseConfigProvider.HostConfig().Isolation, dcontainer.IsolationProcess) + h.AssertSliceNotContains(t, phaseConfigProvider.HostConfig().SecurityOpt, "no-new-privileges=true") }) }) when("building with interactive mode", func() { it("returns a phase config provider with interactive args", func() { - handler := func(bodyChan <-chan container.ContainerWaitOKBody, errChan <-chan error, reader io.Reader) error { + handler := func(bodyChan <-chan dcontainer.WaitResponse, errChan <-chan error, reader io.Reader) error { return errors.New("i was called") } fakeTermui := &fakes.FakeTermui{HandlerFunc: handler} - lifecycle := newTestLifecycleExec(t, false, fakes.WithTermui(fakeTermui)) + lifecycle := newTestLifecycleExec(t, false, "some-temp-dir", fakes.WithTermui(fakeTermui)) phaseConfigProvider := build.NewPhaseConfigProvider("some-name", lifecycle) h.AssertError(t, phaseConfigProvider.Handler()(nil, nil, nil), "i was called") @@ -95,8 +108,8 @@ func testPhaseConfigProvider(t *testing.T, when spec.G, it spec.S) { when("called with WithArgs", func() { it("sets args on the config", func() { - lifecycle := newTestLifecycleExec(t, false) - expectedArgs := strslice.StrSlice{"some-arg-1", "some-arg-2"} + lifecycle := newTestLifecycleExec(t, false, "some-temp-dir") + expectedArgs := []string{"some-arg-1", "some-arg-2"} phaseConfigProvider := build.NewPhaseConfigProvider( "some-name", @@ -111,7 +124,7 @@ func testPhaseConfigProvider(t *testing.T, when spec.G, it spec.S) { when("called with WithFlags", func() { it("sets args on the config", func() { - lifecycle := newTestLifecycleExec(t, false) + lifecycle := newTestLifecycleExec(t, false, "some-temp-dir") phaseConfigProvider := build.NewPhaseConfigProvider( "some-name", @@ -127,7 +140,7 @@ func testPhaseConfigProvider(t *testing.T, when spec.G, it spec.S) { when("called with WithBinds", func() { it("sets binds on the config", func() { - lifecycle := newTestLifecycleExec(t, false) + lifecycle := newTestLifecycleExec(t, false, "some-temp-dir") expectedBinds := []string{"some-bind-1", "some-bind-2"} phaseConfigProvider := build.NewPhaseConfigProvider( @@ -143,7 +156,7 @@ func testPhaseConfigProvider(t *testing.T, when spec.G, it spec.S) { when("called with WithDaemonAccess", func() { when("building for non-Windows", func() { it("sets daemon access on the config", func() { - lifecycle := newTestLifecycleExec(t, false) + lifecycle := newTestLifecycleExec(t, false, "some-temp-dir") phaseConfigProvider := build.NewPhaseConfigProvider( "some-name", @@ -163,7 +176,7 @@ func testPhaseConfigProvider(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, fakeBuilderImage.SetOS("windows")) fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithImage(fakeBuilderImage)) h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, false, fakes.WithBuilder(fakeBuilder)) + lifecycle := newTestLifecycleExec(t, false, "some-temp-dir", fakes.WithBuilder(fakeBuilder)) phaseConfigProvider := build.NewPhaseConfigProvider( "some-name", @@ -179,7 +192,7 @@ func testPhaseConfigProvider(t *testing.T, when spec.G, it spec.S) { when("called with WithEnv", func() { it("sets the environment on the config", func() { - lifecycle := newTestLifecycleExec(t, false) + lifecycle := newTestLifecycleExec(t, false, "some-temp-dir") phaseConfigProvider := build.NewPhaseConfigProvider( "some-name", @@ -191,9 +204,49 @@ func testPhaseConfigProvider(t *testing.T, when spec.G, it spec.S) { }) }) + when("execution environment is set", func() { + when("platform API >= 0.15", func() { + it("sets CNB_EXEC_ENV environment variable", func() { + expectedBuilderImage := ifakes.NewImage("some-builder-name", "", nil) + fakeBuilder, err := fakes.NewFakeBuilder( + fakes.WithImage(expectedBuilderImage), + fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.15")}), + ) + h.AssertNil(t, err) + lifecycle := newTestLifecycleExec(t, false, "some-temp-dir", + fakes.WithBuilder(fakeBuilder), + fakes.WithExecutionEnvironment("test"), + ) + + phaseConfigProvider := build.NewPhaseConfigProvider("some-name", lifecycle) + + h.AssertSliceContains(t, phaseConfigProvider.ContainerConfig().Env, "CNB_EXEC_ENV=test") + }) + }) + + when("platform API < 0.15", func() { + it("does not set CNB_EXEC_ENV environment variable", func() { + expectedBuilderImage := ifakes.NewImage("some-builder-name", "", nil) + fakeBuilder, err := fakes.NewFakeBuilder( + fakes.WithImage(expectedBuilderImage), + fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.14")}), + ) + h.AssertNil(t, err) + lifecycle := newTestLifecycleExec(t, false, "some-temp-dir", + fakes.WithBuilder(fakeBuilder), + fakes.WithExecutionEnvironment("test"), + ) + + phaseConfigProvider := build.NewPhaseConfigProvider("some-name", lifecycle) + + h.AssertSliceNotContains(t, phaseConfigProvider.ContainerConfig().Env, "CNB_EXEC_ENV=test") + }) + }) + }) + when("called with WithImage", func() { it("sets the image on the config", func() { - lifecycle := newTestLifecycleExec(t, false) + lifecycle := newTestLifecycleExec(t, false, "some-temp-dir") phaseConfigProvider := build.NewPhaseConfigProvider( "some-name", @@ -207,7 +260,7 @@ func testPhaseConfigProvider(t *testing.T, when spec.G, it spec.S) { when("called with WithNetwork", func() { it("sets the network mode on the config", func() { - lifecycle := newTestLifecycleExec(t, false) + lifecycle := newTestLifecycleExec(t, false, "some-temp-dir") expectedNetworkMode := "some-network-mode" phaseConfigProvider := build.NewPhaseConfigProvider( @@ -219,14 +272,14 @@ func testPhaseConfigProvider(t *testing.T, when spec.G, it spec.S) { h.AssertEq( t, phaseConfigProvider.HostConfig().NetworkMode, - container.NetworkMode(expectedNetworkMode), + dcontainer.NetworkMode(expectedNetworkMode), ) }) }) when("called with WithRegistryAccess", func() { it("sets registry access on the config", func() { - lifecycle := newTestLifecycleExec(t, false) + lifecycle := newTestLifecycleExec(t, false, "some-temp-dir") authConfig := "some-auth-config" phaseConfigProvider := build.NewPhaseConfigProvider( @@ -246,7 +299,7 @@ func testPhaseConfigProvider(t *testing.T, when spec.G, it spec.S) { when("called with WithRoot", func() { when("building for non-Windows", func() { it("sets root user on the config", func() { - lifecycle := newTestLifecycleExec(t, false) + lifecycle := newTestLifecycleExec(t, false, "some-temp-dir") phaseConfigProvider := build.NewPhaseConfigProvider( "some-name", @@ -264,7 +317,7 @@ func testPhaseConfigProvider(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, fakeBuilderImage.SetOS("windows")) fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithImage(fakeBuilderImage)) h.AssertNil(t, err) - lifecycle := newTestLifecycleExec(t, false, fakes.WithBuilder(fakeBuilder)) + lifecycle := newTestLifecycleExec(t, false, "some-temp-dir", fakes.WithBuilder(fakeBuilder)) phaseConfigProvider := build.NewPhaseConfigProvider( "some-name", @@ -279,7 +332,7 @@ func testPhaseConfigProvider(t *testing.T, when spec.G, it spec.S) { when("called with WithLogPrefix", func() { it("sets prefix writers", func() { - lifecycle := newTestLifecycleExec(t, false) + lifecycle := newTestLifecycleExec(t, false, "some-temp-dir") phaseConfigProvider := build.NewPhaseConfigProvider( "some-name", @@ -300,7 +353,7 @@ func testPhaseConfigProvider(t *testing.T, when spec.G, it spec.S) { var outBuf bytes.Buffer logger := logging.NewLogWithWriters(&outBuf, &outBuf, logging.WithVerbose()) - docker, err := client.NewClientWithOpts(client.FromEnv, client.WithVersion("1.38")) + docker, err := client.New(client.FromEnv) h.AssertNil(t, err) defaultBuilder, err := fakes.NewFakeBuilder() @@ -311,7 +364,7 @@ func testPhaseConfigProvider(t *testing.T, when spec.G, it spec.S) { Builder: defaultBuilder, } - lifecycleExec, err := build.NewLifecycleExecution(logger, docker, opts) + lifecycleExec, err := build.NewLifecycleExecution(logger, docker, "some-temp-dir", opts) h.AssertNil(t, err) _ = build.NewPhaseConfigProvider( @@ -329,6 +382,37 @@ func testPhaseConfigProvider(t *testing.T, when spec.G, it spec.S) { h.AssertContainsMatch(t, outBuf.String(), `Binds: \'\S+:\S+layers \S+:\S+workspace'`) h.AssertContains(t, outBuf.String(), "Network Mode: ''") }) + + when("there is registry auth", func() { + it("sanitizes the output", func() { + authConfig := "some-auth-config" + + var outBuf bytes.Buffer + logger := logging.NewLogWithWriters(&outBuf, &outBuf, logging.WithVerbose()) + + docker, err := client.New(client.FromEnv) + h.AssertNil(t, err) + + defaultBuilder, err := fakes.NewFakeBuilder() + h.AssertNil(t, err) + + opts := build.LifecycleOptions{ + AppPath: "some-app-path", + Builder: defaultBuilder, + } + + lifecycleExec, err := build.NewLifecycleExecution(logger, docker, "some-temp-dir", opts) + h.AssertNil(t, err) + + _ = build.NewPhaseConfigProvider( + "some-name", + lifecycleExec, + build.WithRegistryAccess(authConfig), + ) + + h.AssertContains(t, outBuf.String(), "System Envs: 'CNB_REGISTRY_AUTH= CNB_PLATFORM_API=0.4'") + }) + }) }) }) } diff --git a/internal/build/phase_test.go b/internal/build/phase_test.go index 07765bdeee..a9cc740eed 100644 --- a/internal/build/phase_test.go +++ b/internal/build/phase_test.go @@ -5,8 +5,6 @@ import ( "context" "fmt" "io" - "io/ioutil" - "math/rand" "net" "os" "path/filepath" @@ -15,15 +13,17 @@ import ( "strconv" "sync" "testing" - "time" + + // "github.com/docker/docker/api/types/volume" "github.com/buildpacks/imgutil/local" "github.com/buildpacks/lifecycle/auth" - dcontainer "github.com/docker/docker/api/types/container" - "github.com/docker/docker/api/types/filters" - "github.com/docker/docker/client" + dcontainer "github.com/moby/moby/api/types/container" + + // "github.com/docker/docker/api/types/filters" "github.com/google/go-containerregistry/pkg/authn" "github.com/heroku/color" + "github.com/moby/moby/client" "github.com/sclevine/spec" "github.com/sclevine/spec/report" @@ -37,27 +37,22 @@ import ( const phaseName = "phase" -var ( - repoName string - ctrClient client.CommonAPIClient -) +var repoName string // TestPhase is a integration test suite to ensure that the phase options are propagated to the container. func TestPhase(t *testing.T) { - rand.Seed(time.Now().UTC().UnixNano()) - color.Disable(true) defer color.Disable(false) h.RequireDocker(t) var err error - ctrClient, err = client.NewClientWithOpts(client.FromEnv, client.WithVersion("1.38")) + ctrClient, err = client.New(client.FromEnv) h.AssertNil(t, err) - info, err := ctrClient.Info(context.TODO()) + info, err := ctrClient.Info(context.TODO(), client.InfoOptions{}) h.AssertNil(t, err) - h.SkipIf(t, info.OSType == "windows", "These tests are not yet compatible with Windows-based containers") + h.SkipIf(t, info.Info.OSType == "windows", "These tests are not yet compatible with Windows-based containers") repoName = "phase.test.lc-" + h.RandString(10) wd, err := os.Getwd() @@ -73,7 +68,7 @@ func testPhase(t *testing.T, when spec.G, it spec.S) { lifecycleExec *build.LifecycleExecution phaseFactory build.PhaseFactory outBuf, errBuf bytes.Buffer - docker client.CommonAPIClient + docker client.APIClient logger logging.Logger osType string ) @@ -82,12 +77,12 @@ func testPhase(t *testing.T, when spec.G, it spec.S) { logger = logging.NewLogWithWriters(&outBuf, &outBuf) var err error - docker, err = client.NewClientWithOpts(client.FromEnv, client.WithVersion("1.38")) + docker, err = client.New(client.FromEnv) h.AssertNil(t, err) - info, err := ctrClient.Info(context.Background()) + info, err := ctrClient.Info(context.Background(), client.InfoOptions{}) h.AssertNil(t, err) - osType = info.OSType + osType = info.Info.OSType lifecycleExec, err = CreateFakeLifecycleExecution(logger, docker, filepath.Join("testdata", "fake-app"), repoName) h.AssertNil(t, err) @@ -141,8 +136,8 @@ func testPhase(t *testing.T, when spec.G, it spec.S) { it("runs the phase with provided handlers", func() { var actual string - var handler container.Handler = func(bodyChan <-chan dcontainer.ContainerWaitOKBody, errChan <-chan error, reader io.Reader) error { - data, _ := ioutil.ReadAll(reader) + var handler container.Handler = func(bodyChan <-chan dcontainer.WaitResponse, errChan <-chan error, reader io.Reader) error { + data, _ := io.ReadAll(reader) actual = string(data) return nil } @@ -225,7 +220,7 @@ func testPhase(t *testing.T, when spec.G, it spec.S) { it.Before(func() { h.SkipIf(t, os.Getuid() == 0, "Skipping b/c current user is root") - tmpFakeAppDir, err = ioutil.TempDir("", "fake-app") + tmpFakeAppDir, err = os.MkdirTemp("", "fake-app") h.AssertNil(t, err) dirWithoutAccess = filepath.Join(tmpFakeAppDir, "bad-dir") err := os.MkdirAll(dirWithoutAccess, 0222) @@ -299,7 +294,7 @@ func testPhase(t *testing.T, when spec.G, it spec.S) { }) it("allows daemon access inside the container", func() { - tmp, err := ioutil.TempDir("", "testSocketDir") + tmp, err := os.MkdirTemp("", "testSocketDir") if err != nil { t.Fatal(err) } @@ -363,7 +358,8 @@ func testPhase(t *testing.T, when spec.G, it spec.S) { when("#WithBinds", func() { it.After(func() { - h.AssertNilE(t, docker.VolumeRemove(context.TODO(), "some-volume", true)) + _, err := docker.VolumeRemove(context.TODO(), "some-volume", client.VolumeRemoveOptions{Force: true}) + h.AssertNilE(t, err) }) it("mounts volumes inside container", func() { @@ -371,12 +367,9 @@ func testPhase(t *testing.T, when spec.G, it spec.S) { phase := phaseFactory.New(configProvider) assertRunSucceeds(t, phase, &outBuf, &errBuf) h.AssertContains(t, outBuf.String(), "binds test") - body, err := docker.VolumeList(context.TODO(), filters.NewArgs(filters.KeyValuePair{ - Key: "name", - Value: "some-volume", - })) + body, err := docker.VolumeList(context.TODO(), client.VolumeListOptions{Filters: client.Filters{"name": {"some-volume": true}}}) h.AssertNil(t, err) - h.AssertEq(t, len(body.Volumes), 1) + h.AssertEq(t, len(body.Items), 1) }) }) @@ -426,17 +419,17 @@ func testPhase(t *testing.T, when spec.G, it spec.S) { when("#WithPostContainerRunOperations", func() { it("runs the operation after the container command", func() { - tarDestinationPath, err := ioutil.TempFile("", "pack.phase.test.") + tarDestinationPath, err := os.CreateTemp("", "pack.phase.test.") h.AssertNil(t, err) defer os.RemoveAll(tarDestinationPath.Name()) handler := func(reader io.ReadCloser) error { defer reader.Close() - contents, err := ioutil.ReadAll(reader) + contents, err := io.ReadAll(reader) h.AssertNil(t, err) - err = ioutil.WriteFile(tarDestinationPath.Name(), contents, 0600) + err = os.WriteFile(tarDestinationPath.Name(), contents, 0600) h.AssertNil(t, err) return nil } @@ -464,23 +457,17 @@ func testPhase(t *testing.T, when spec.G, it spec.S) { }) it("should delete the layers volume", func() { - body, err := docker.VolumeList(context.TODO(), - filters.NewArgs(filters.KeyValuePair{ - Key: "name", - Value: lifecycleExec.LayersVolume(), - })) + body, err := docker.VolumeList(context.TODO(), client.VolumeListOptions{ + Filters: client.Filters{"name": {lifecycleExec.LayersVolume(): true}}}) h.AssertNil(t, err) - h.AssertEq(t, len(body.Volumes), 0) + h.AssertEq(t, len(body.Items), 0) }) it("should delete the app volume", func() { - body, err := docker.VolumeList(context.TODO(), - filters.NewArgs(filters.KeyValuePair{ - Key: "name", - Value: lifecycleExec.AppVolume(), - })) + body, err := docker.VolumeList(context.TODO(), client.VolumeListOptions{ + Filters: client.Filters{"name": {lifecycleExec.AppVolume(): true}}}) h.AssertNil(t, err) - h.AssertEq(t, len(body.Volumes), 0) + h.AssertEq(t, len(body.Items), 0) }) }) } @@ -511,7 +498,7 @@ func assertRunSucceeds(t *testing.T, phase build.RunnerCleaner, outBuf *bytes.Bu h.AssertNilE(t, phase.Cleanup()) } -func CreateFakeLifecycleExecution(logger logging.Logger, docker client.CommonAPIClient, appDir string, repoName string, handler ...container.Handler) (*build.LifecycleExecution, error) { +func CreateFakeLifecycleExecution(logger logging.Logger, docker client.APIClient, appDir string, repoName string, handler ...container.Handler) (*build.LifecycleExecution, error) { builderImage, err := local.NewImage(repoName, docker, local.FromBaseImage(repoName)) if err != nil { return nil, err @@ -535,7 +522,7 @@ func CreateFakeLifecycleExecution(logger logging.Logger, docker client.CommonAPI termui = &fakes.FakeTermui{HandlerFunc: handler[0]} } - return build.NewLifecycleExecution(logger, docker, build.LifecycleOptions{ + return build.NewLifecycleExecution(logger, docker, "some-temp-dir", build.LifecycleOptions{ AppPath: appDir, Builder: fakeBuilder, HTTPProxy: "some-http-proxy", diff --git a/internal/build/testdata/fake-lifecycle/Dockerfile b/internal/build/testdata/fake-lifecycle/Dockerfile index 5086489c40..de89901a25 100644 --- a/internal/build/testdata/fake-lifecycle/Dockerfile +++ b/internal/build/testdata/fake-lifecycle/Dockerfile @@ -1,9 +1,9 @@ -FROM golang +FROM golang:1.23 RUN mkdir /lifecycle WORKDIR /go/src/step COPY . . -RUN GO111MODULE=on go build -o /cnb/lifecycle/phase ./phase.go +RUN GO111MODULE=on GOFLAGS=-mod=mod go build -o /cnb/lifecycle/phase ./phase.go ENV CNB_USER_ID 111 ENV CNB_GROUP_ID 222 diff --git a/internal/build/testdata/fake-lifecycle/go.mod b/internal/build/testdata/fake-lifecycle/go.mod index df5fe54279..9be36d47ea 100644 --- a/internal/build/testdata/fake-lifecycle/go.mod +++ b/internal/build/testdata/fake-lifecycle/go.mod @@ -1,9 +1,85 @@ module step require ( - github.com/buildpacks/lifecycle v0.5.1-0.20191212164213-3b2b120be460 - github.com/docker/docker v1.4.2-0.20190924003213-a8608b5b67c7 - github.com/google/go-containerregistry v0.0.0-20191018211754-b77a90c667af + github.com/buildpacks/lifecycle v0.19.3 + github.com/docker/docker v28.5.1+incompatible + github.com/google/go-containerregistry v0.19.2 ) -go 1.17 +require ( + github.com/Azure/azure-sdk-for-go v68.0.0+incompatible // indirect + github.com/Azure/go-autorest v14.2.0+incompatible // indirect + github.com/Azure/go-autorest/autorest v0.11.30 // indirect + github.com/Azure/go-autorest/autorest/adal v0.9.24 // indirect + github.com/Azure/go-autorest/autorest/azure/auth v0.5.13 // indirect + github.com/Azure/go-autorest/autorest/azure/cli v0.4.7 // indirect + github.com/Azure/go-autorest/autorest/date v0.3.1 // indirect + github.com/Azure/go-autorest/logger v0.2.2 // indirect + github.com/Azure/go-autorest/tracing v0.6.1 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect + github.com/aws/aws-sdk-go-v2 v1.36.3 // indirect + github.com/aws/aws-sdk-go-v2/config v1.29.14 // indirect + github.com/aws/aws-sdk-go-v2/credentials v1.17.67 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.30 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34 // indirect + github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 // indirect + github.com/aws/aws-sdk-go-v2/service/ecr v1.44.0 // indirect + github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.33.0 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.3 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.25.3 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.1 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.33.19 // indirect + github.com/aws/smithy-go v1.22.3 // indirect + github.com/awslabs/amazon-ecr-credential-helper/ecr-login v0.9.1 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/chrismellard/docker-credential-acr-env v0.0.0-20230304212654-82a0ddb27589 // indirect + github.com/containerd/errdefs v1.0.0 // indirect + github.com/containerd/errdefs/pkg v0.3.0 // indirect + github.com/containerd/log v0.1.0 // indirect + github.com/containerd/stargz-snapshotter/estargz v0.16.3 // indirect + github.com/dimchansky/utfbom v1.1.1 // indirect + github.com/distribution/reference v0.6.0 // indirect + github.com/docker/cli v28.2.2+incompatible // indirect + github.com/docker/distribution v2.8.3+incompatible // indirect + github.com/docker/docker-credential-helpers v0.9.3 // indirect + github.com/docker/go-connections v0.5.0 // indirect + github.com/docker/go-metrics v0.0.1 // indirect + github.com/docker/go-units v0.5.0 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/go-logr/logr v1.4.3 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/golang-jwt/jwt/v4 v4.5.2 // indirect + github.com/gorilla/mux v1.8.1 // indirect + github.com/klauspost/compress v1.18.0 // indirect + github.com/mitchellh/go-homedir v1.1.0 // indirect + github.com/moby/docker-image-spec v1.3.1 // indirect + github.com/moby/sys/atomicwriter v0.1.0 // indirect + github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect + github.com/opencontainers/go-digest v1.0.0 // indirect + github.com/opencontainers/image-spec v1.1.1 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/prometheus/client_golang v1.22.0 // indirect + github.com/prometheus/client_model v0.6.2 // indirect + github.com/prometheus/common v0.64.0 // indirect + github.com/prometheus/procfs v0.16.1 // indirect + github.com/sirupsen/logrus v1.9.3 // indirect + github.com/vbatts/tar-split v0.12.1 // indirect + go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect + go.opentelemetry.io/otel v1.38.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0 // indirect + go.opentelemetry.io/otel/metric v1.38.0 // indirect + go.opentelemetry.io/otel/sdk v1.38.0 // indirect + go.opentelemetry.io/otel/sdk/metric v1.38.0 // indirect + go.opentelemetry.io/otel/trace v1.38.0 // indirect + go.opentelemetry.io/proto/otlp v1.9.0 // indirect + golang.org/x/crypto v0.38.0 // indirect + golang.org/x/sync v0.15.0 // indirect + golang.org/x/sys v0.35.0 // indirect + google.golang.org/protobuf v1.36.10 // indirect +) + +go 1.23.0 diff --git a/internal/build/testdata/fake-lifecycle/go.sum b/internal/build/testdata/fake-lifecycle/go.sum index 989a8cdf7a..3b99d393bf 100644 --- a/internal/build/testdata/fake-lifecycle/go.sum +++ b/internal/build/testdata/fake-lifecycle/go.sum @@ -1,238 +1,318 @@ -cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -github.com/Azure/azure-sdk-for-go v19.1.1+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= -github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78 h1:w+iIsaOQNcT7OZ575w+acHgRric5iCyQh+xv+KJ4HB8= -github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8= -github.com/Azure/go-autorest v10.15.5+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= -github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/Microsoft/go-winio v0.4.14 h1:+hMXMk01us9KgxGb7ftKQt2Xpf5hH/yky+TDA+qxleU= -github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA= -github.com/apex/log v1.1.2-0.20190827100214-baa5455d1012 h1:r9k3B0K539tmbDOdyCIuz/6qtn8q+lp+qvEStcFUIdM= -github.com/apex/log v1.1.2-0.20190827100214-baa5455d1012/go.mod h1:Ls949n1HFtXfbDcjiTTFQqkVUrte0puoIBfO3SVgwOA= -github.com/aphistic/golf v0.0.0-20180712155816-02c07f170c5a/go.mod h1:3NqKYiepwy8kCu4PNA+aP7WUV72eXWJeP9/r3/K9aLE= -github.com/aphistic/sweet v0.2.0/go.mod h1:fWDlIh/isSE9n6EPsRmC0det+whmX6dJid3stzu0Xys= -github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= -github.com/aws/aws-sdk-go v1.15.90/go.mod h1:es1KtYUFs7le0xQ3rOihkuoVD90z7D0fR2Qm4S00/gU= -github.com/aws/aws-sdk-go v1.20.6/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= -github.com/aybabtme/rgbterm v0.0.0-20170906152045-cc83f3b3ce59/go.mod h1:q/89r3U2H7sSsE2t6Kca0lfwTK8JdoNGS/yzM/4iH5I= -github.com/buildpacks/imgutil v0.0.0-20191212154113-dc184e0d403b h1:SDsB0hJtURA+5i5vIjLntzRNPEqdoz6q+0MShCWdctw= -github.com/buildpacks/imgutil v0.0.0-20191212154113-dc184e0d403b/go.mod h1:E3lXJcNXcRefJQAHW5rqboonet+jtOml4qImbJhYGAo= -github.com/buildpacks/lifecycle v0.5.1-0.20191212164213-3b2b120be460 h1:wwnys/9oBN1XX86SuePWXeOIyh07O4gTFZuD3o+iL/g= -github.com/buildpacks/lifecycle v0.5.1-0.20191212164213-3b2b120be460/go.mod h1:1517xYIXEWiehR7ndWdGCkIlD0KJDLvtVuzZGp7K6Ok= -github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/containerd/containerd v1.3.0 h1:xjvXQWABwS2uiv3TWgQt5Uth60Gu86LTGZXMJkjc7rY= -github.com/containerd/containerd v1.3.0/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= -github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= -github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= -github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= -github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= +github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU= +github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/Azure/go-autorest v14.2.0+incompatible h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs= +github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= +github.com/Azure/go-autorest/autorest v0.11.28/go.mod h1:MrkzG3Y3AH668QyF9KRk5neJnGgmhQ6krbhR8Q5eMvA= +github.com/Azure/go-autorest/autorest v0.11.30 h1:iaZ1RGz/ALZtN5eq4Nr1SOFSlf2E4pDI3Tcsl+dZPVE= +github.com/Azure/go-autorest/autorest v0.11.30/go.mod h1:t1kpPIOpIVX7annvothKvb0stsrXa37i7b+xpmBW8Fs= +github.com/Azure/go-autorest/autorest/adal v0.9.18/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ= +github.com/Azure/go-autorest/autorest/adal v0.9.22/go.mod h1:XuAbAEUv2Tta//+voMI038TrJBqjKam0me7qR+L8Cmk= +github.com/Azure/go-autorest/autorest/adal v0.9.24 h1:BHZfgGsGwdkHDyZdtQRQk1WeUdW0m2WPAwuHZwUi5i4= +github.com/Azure/go-autorest/autorest/adal v0.9.24/go.mod h1:7T1+g0PYFmACYW5LlG2fcoPiPlFHjClyRGL7dRlP5c8= +github.com/Azure/go-autorest/autorest/azure/auth v0.5.13 h1:Ov8avRZi2vmrE2JcXw+tu5K/yB41r7xK9GZDiBF7NdM= +github.com/Azure/go-autorest/autorest/azure/auth v0.5.13/go.mod h1:5BAVfWLWXihP47vYrPuBKKf4cS0bXI+KM9Qx6ETDJYo= +github.com/Azure/go-autorest/autorest/azure/cli v0.4.6/go.mod h1:piCfgPho7BiIDdEQ1+g4VmKyD5y+p/XtSNqE6Hc4QD0= +github.com/Azure/go-autorest/autorest/azure/cli v0.4.7 h1:Q9R3utmFg9K1B4OYtAZ7ZUUvIUdzQt7G2MN5Hi/d670= +github.com/Azure/go-autorest/autorest/azure/cli v0.4.7/go.mod h1:bVrAueELJ0CKLBpUHDIvD516TwmHmzqwCpvONWRsw3s= +github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSYnokU+TrmwEsOqdt8Y6sso74= +github.com/Azure/go-autorest/autorest/date v0.3.1 h1:o9Z8Jyt+VJJTCZ/UORishuHOusBwolhjokt9s5k8I4w= +github.com/Azure/go-autorest/autorest/date v0.3.1/go.mod h1:Dz/RDmXlfiFFS/eW+b/xMUSFs1tboPVy6UjgADToWDM= +github.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k= +github.com/Azure/go-autorest/autorest/mocks v0.4.2 h1:PGN4EDXnuQbojHbU0UWoNvmu9AGVwYHG9/fkDYhtAfw= +github.com/Azure/go-autorest/autorest/mocks v0.4.2/go.mod h1:Vy7OitM9Kei0i1Oj+LvyAWMXJHeKH1MVlzFugfVrmyU= +github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= +github.com/Azure/go-autorest/logger v0.2.2 h1:hYqBsEBywrrOSW24kkOCXRcKfKhK76OzLTfF+MYDE2o= +github.com/Azure/go-autorest/logger v0.2.2/go.mod h1:I5fg9K52o+iuydlWfa9T5K6WFos9XYr9dYTFzpqgibw= +github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU= +github.com/Azure/go-autorest/tracing v0.6.1 h1:YUMSrC/CeD1ZnnXcNYU4a/fzsO35u2Fsful9L/2nyR0= +github.com/Azure/go-autorest/tracing v0.6.1/go.mod h1:/3EgjbsjraOqiicERAeu3m7/z0x1TzjQGAwDrJrXGkc= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/apex/log v1.9.0 h1:FHtw/xuaM8AgmvDDTI9fiwoAL25Sq2cxojnZICUU8l0= +github.com/apex/log v1.9.0/go.mod h1:m82fZlWIuiWzWP04XCTXmnX0xRkYYbCdYn8jbJeLBEA= +github.com/aws/aws-sdk-go-v2 v1.36.3 h1:mJoei2CxPutQVxaATCzDUjcZEjVRdpsiiXi2o38yqWM= +github.com/aws/aws-sdk-go-v2 v1.36.3/go.mod h1:LLXuLpgzEbD766Z5ECcRmi8AzSwfZItDtmABVkRLGzg= +github.com/aws/aws-sdk-go-v2/config v1.29.14 h1:f+eEi/2cKCg9pqKBoAIwRGzVb70MRKqWX4dg1BDcSJM= +github.com/aws/aws-sdk-go-v2/config v1.29.14/go.mod h1:wVPHWcIFv3WO89w0rE10gzf17ZYy+UVS1Geq8Iei34g= +github.com/aws/aws-sdk-go-v2/credentials v1.17.67 h1:9KxtdcIA/5xPNQyZRgUSpYOE6j9Bc4+D7nZua0KGYOM= +github.com/aws/aws-sdk-go-v2/credentials v1.17.67/go.mod h1:p3C44m+cfnbv763s52gCqrjaqyPikj9Sg47kUVaNZQQ= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.30 h1:x793wxmUWVDhshP8WW2mlnXuFrO4cOd3HLBroh1paFw= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.30/go.mod h1:Jpne2tDnYiFascUEs2AWHJL9Yp7A5ZVy3TNyxaAjD6M= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34 h1:ZK5jHhnrioRkUNOc+hOgQKlUL5JeC3S6JgLxtQ+Rm0Q= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34/go.mod h1:p4VfIceZokChbA9FzMbRGz5OV+lekcVtHlPKEO0gSZY= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34 h1:SZwFm17ZUNNg5Np0ioo/gq8Mn6u9w19Mri8DnJ15Jf0= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34/go.mod h1:dFZsC0BLo346mvKQLWmoJxT+Sjp+qcVR1tRVHQGOH9Q= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 h1:bIqFDwgGXXN1Kpp99pDOdKMTTb5d2KyU5X/BZxjOkRo= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3/go.mod h1:H5O/EsxDWyU+LP/V8i5sm8cxoZgc2fdNR9bxlOFrQTo= +github.com/aws/aws-sdk-go-v2/service/ecr v1.44.0 h1:E+UTVTDH6XTSjqxHWRuY8nB6s+05UllneWxnycplHFk= +github.com/aws/aws-sdk-go-v2/service/ecr v1.44.0/go.mod h1:iQ1skgw1XRK+6Lgkb0I9ODatAP72WoTILh0zXQ5DtbU= +github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.33.0 h1:wA2O6pZ2r5smqJunFP4hp7qptMW4EQxs8O6RVHPulOE= +github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.33.0/go.mod h1:RZL7ov7c72wSmoM8bIiVxRHgcVdzhNkVW2J36C8RF4s= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.3 h1:eAh2A4b5IzM/lum78bZ590jy36+d/aFLgKF/4Vd1xPE= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.3/go.mod h1:0yKJC/kb8sAnmlYa6Zs3QVYqaC8ug2AbnNChv5Ox3uA= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15 h1:dM9/92u2F1JbDaGooxTq18wmmFzbJRfXfVfy96/1CXM= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15/go.mod h1:SwFBy2vjtA0vZbjjaFtfN045boopadnoVPhu4Fv66vY= +github.com/aws/aws-sdk-go-v2/service/sso v1.25.3 h1:1Gw+9ajCV1jogloEv1RRnvfRFia2cL6c9cuKV2Ps+G8= +github.com/aws/aws-sdk-go-v2/service/sso v1.25.3/go.mod h1:qs4a9T5EMLl/Cajiw2TcbNt2UNo/Hqlyp+GiuG4CFDI= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.1 h1:hXmVKytPfTy5axZ+fYbR5d0cFmC3JvwLm5kM83luako= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.1/go.mod h1:MlYRNmYu/fGPoxBQVvBYr9nyr948aY/WLUvwBMBJubs= +github.com/aws/aws-sdk-go-v2/service/sts v1.33.19 h1:1XuUZ8mYJw9B6lzAkXhqHlJd/XvaX32evhproijJEZY= +github.com/aws/aws-sdk-go-v2/service/sts v1.33.19/go.mod h1:cQnB8CUnxbMU82JvlqjKR2HBOm3fe9pWorWBza6MBJ4= +github.com/aws/smithy-go v1.22.3 h1:Z//5NuZCSW6R4PhQ93hShNbyBbn8BWCmCVCt+Q8Io5k= +github.com/aws/smithy-go v1.22.3/go.mod h1:t1ufH5HMublsJYulve2RKmHDC15xu1f26kHCp/HgceI= +github.com/awslabs/amazon-ecr-credential-helper/ecr-login v0.9.1 h1:50sS0RWhGpW/yZx2KcDNEb1u1MANv5BMEkJgcieEDTA= +github.com/awslabs/amazon-ecr-credential-helper/ecr-login v0.9.1/go.mod h1:ErZOtbzuHabipRTDTor0inoRlYwbsV1ovwSxjGs/uJo= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/buildpacks/lifecycle v0.19.3 h1:T6dwX+/Nq7Q41Pb2zVu54MLrJPt93KEMNj4dHkXINbA= +github.com/buildpacks/lifecycle v0.19.3/go.mod h1:BoLvGP1fjOqab59dariHDhVh5uIQuQ7yoIfj0orvL8M= +github.com/cenkalti/backoff/v5 v5.0.3 h1:ZN+IMa753KfX5hd8vVaMixjnqRZ3y8CuJKRKj1xcsSM= +github.com/cenkalti/backoff/v5 v5.0.3/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chrismellard/docker-credential-acr-env v0.0.0-20230304212654-82a0ddb27589 h1:krfRl01rzPzxSxyLyrChD+U+MzsBXbm0OwYYB67uF+4= +github.com/chrismellard/docker-credential-acr-env v0.0.0-20230304212654-82a0ddb27589/go.mod h1:OuDyvmLnMCwa2ep4Jkm6nyA0ocJuZlGyk2gGseVzERM= +github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI= +github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M= +github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE= +github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/containerd/stargz-snapshotter/estargz v0.16.3 h1:7evrXtoh1mSbGj/pfRccTampEyKpjpOnS3CyiV1Ebr8= +github.com/containerd/stargz-snapshotter/estargz v0.16.3/go.mod h1:uyr4BfYfOj3G9WBVE8cOlQmXAbPN9VEQpBBeJIuOipU= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= -github.com/docker/cli v0.0.0-20191017083524-a8ff7f821017 h1:2HQmlpI3yI9deH18Q6xiSOIjXD4sLI55Y/gfpa8/558= -github.com/docker/cli v0.0.0-20191017083524-a8ff7f821017/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= -github.com/docker/distribution v2.6.0-rc.1.0.20180327202408-83389a148052+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/distribution v2.7.1+incompatible h1:a5mlkVzth6W5A4fOsS3D2EO5BUmsJpcB+cRlLU7cSug= -github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/docker v1.4.2-0.20190924003213-a8608b5b67c7 h1:Cvj7S8I4Xpx78KAl6TwTmMHuHlZ/0SM60NUneGJQ7IE= -github.com/docker/docker v1.4.2-0.20190924003213-a8608b5b67c7/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/docker-credential-helpers v0.6.3 h1:zI2p9+1NQYdnG6sMU26EX4aVGlqbInSQxQXLvzJ4RPQ= -github.com/docker/docker-credential-helpers v0.6.3/go.mod h1:WRaJzqw3CTB9bk10avuGsjVBZsD05qeibJ1/TYlvc0Y= -github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= -github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= -github.com/docker/go-units v0.4.0 h1:3uh0PgVws3nIA0Q+MwDC8yjEPf9zjRfZZWXZYDct3Tw= -github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= -github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys= -github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= -github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi/U= +github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/docker/cli v28.2.2+incompatible h1:qzx5BNUDFqlvyq4AHzdNB7gSyVTmU4cgsyN9SdInc1A= +github.com/docker/cli v28.2.2+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk= +github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/docker v28.5.1+incompatible h1:Bm8DchhSD2J6PsFzxC35TZo4TLGR2PdW/E69rU45NhM= +github.com/docker/docker v28.5.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker-credential-helpers v0.9.3 h1:gAm/VtF9wgqJMoxzT3Gj5p4AqIjCBS4wrsOh9yRqcz8= +github.com/docker/docker-credential-helpers v0.9.3/go.mod h1:x+4Gbw9aGmChi3qTLZj8Dfn0TD20M/fuWy0E5+WDeCo= +github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= +github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= +github.com/docker/go-metrics v0.0.1 h1:AgB/0SvBxihN0X8OR4SjsblXkbMvalQ8cjmtKQ2rQV8= +github.com/docker/go-metrics v0.0.1/go.mod h1:cG1hvH2utMXtqgqqYE9plW6lDxS3/5ayHzueweSI3Vw= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/docker/libtrust v0.0.0-20160708172513-aabc10ec26b7 h1:UhxFibDNY/bfvqU5CAUmr9zpesgbU6SWc8/B4mflAE4= +github.com/docker/libtrust v0.0.0-20160708172513-aabc10ec26b7/go.mod h1:cyGadeNEkKy96OOhEzfZl+yxihPEzKnqJwvfuSUqbZE= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= -github.com/gogo/protobuf v1.2.1 h1:/s5zKNz0uPFCZ5hddgPdo2TK2TVrUNMn0OOX8/aZMTE= -github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= +github.com/golang-jwt/jwt/v4 v4.2.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= +github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/golang-jwt/jwt/v4 v4.5.2 h1:YtQM7lnr8iZ+j5q71MGKkNw9Mn7AjHM68uc9g5fXeUI= +github.com/golang-jwt/jwt/v4 v4.5.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2 h1:6nsPYzhq5kReh6QImI3k5qWzO4PEbvbIW2cwSfR/6xs= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/google/btree v0.0.0-20180124185431-e89373fe6b4a/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-containerregistry v0.0.0-20191018211754-b77a90c667af h1:gCcst+zXdvatJYFDtPd3C3VNEGcoFOOMwFydnFIfnF0= -github.com/google/go-containerregistry v0.0.0-20191018211754-b77a90c667af/go.mod h1:9kIomAeXUmwhqeYS2zoEuQ0sc2GOVmNW7t3y9aNQL1o= -github.com/google/gofuzz v0.0.0-20170612174753-24818f796faf/go.mod h1:HP5RmnzzSNb993RKQDq4+1A4ia9nllfqcQFTQJedwGI= -github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= -github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/googleapis/gnostic v0.2.2/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY= -github.com/gorilla/mux v1.7.3 h1:gnP5JzjVOuiZD07fKKToCAOjS0yOpj/qPETTXCCS6hw= -github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= -github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= -github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/heroku/color v0.0.6 h1:UTFFMrmMLFcL3OweqP1lAdp8i1y/9oHqkeHjQ/b/Ny0= -github.com/heroku/color v0.0.6/go.mod h1:ZBvOcx7cTF2QKOv4LbmoBtNl5uB17qWxGuzZrsi1wLU= -github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= -github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= -github.com/jpillora/backoff v0.0.0-20180909062703-3050d21c67d7/go.mod h1:2iMrUgbbvHEiQClaW2NsSzMyGHqN+rDFqY705q49KG0= -github.com/json-iterator/go v0.0.0-20180701071628-ab8a2e0c74be/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= -github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/go-containerregistry v0.19.2 h1:TannFKE1QSajsP6hPWb5oJNgKe1IKjHukIKDUmvsV6w= +github.com/google/go-containerregistry v0.19.2/go.mod h1:YCMFNQeeXeLF+dnhhWkqDItx/JSkH01j1Kis4PsjzFI= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= +github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 h1:8Tjv8EJ+pM1xP8mK6egEbD1OgnVTyacbefKhmbLhIhU= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2/go.mod h1:pkJQ2tZHJ0aFOVEEot6oZmaVEZcRme73eIFmhiVuRWs= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.2 h1:DB17ag19krx9CFsz4o3enTrPXyIXCl+2iCXH/aMAp9s= -github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= -github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= -github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= -github.com/mattn/go-colorable v0.1.2 h1:/bC9yWikZXAL9uJdulbSfyVNIR3n3trXl+v8+1sx8mU= -github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-colorable v0.1.4 h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA= -github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.10 h1:qxFzApOv4WsAL965uUPIsXzAKCZxN2p9UqdhFS4ZW10= -github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= -github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/sys/atomicwriter v0.1.0 h1:kw5D/EqkBwsBFi0ss9v1VG3wIkVhzGvLklJ+w3A14Sw= +github.com/moby/sys/atomicwriter v0.1.0/go.mod h1:Ul8oqv2ZMNHOceF643P6FKPXeCmYtlQMvpizfsSoaWs= +github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU= +github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko= +github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= +github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= -github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= -github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= -github.com/opencontainers/go-digest v1.0.0-rc1 h1:WzifXhOVOEOuFYOJAW6aQqW0TooG2iki3E3Ii+WN7gQ= -github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= -github.com/opencontainers/image-spec v1.0.1 h1:JMemWkRwHx4Zj+fVxWoMCFm/8sYGGrUVojFA6h/TRcI= -github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= -github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= -github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= -github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= -github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040= +github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/rogpeppe/fastuuid v1.1.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= -github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= -github.com/sclevine/spec v1.0.0/go.mod h1:W4J29eT/Kzv7/b9IWLB055Z+qvVC9vt0Arko24q7p+U= -github.com/sclevine/spec v1.2.0 h1:1Jwdf9jSfDl9NVmt8ndHqbTZ7XCCPbh1jI3hkDBHVYA= -github.com/sclevine/spec v1.2.0/go.mod h1:W4J29eT/Kzv7/b9IWLB055Z+qvVC9vt0Arko24q7p+U= -github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= -github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= -github.com/sirupsen/logrus v1.4.2 h1:SPIRibHv4MatM3XXNO2BJeFLZwZ2LvZgfQ5+UNI2im4= -github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= -github.com/smartystreets/assertions v1.0.0/go.mod h1:kHHU4qYBaI3q23Pp3VPrmWhuIUrLW/7eUrw0BU5VaoM= -github.com/smartystreets/go-aws-auth v0.0.0-20180515143844-0c1422d1fdb9/go.mod h1:SnhjPscd9TpLiy1LpzGSKh3bXCfxxXuqd9xmQJy3slM= -github.com/smartystreets/gunit v1.0.0/go.mod h1:qwPWnhz6pn0NnRBP++URONOVyNkPyr4SauJk4cUOwJs= -github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= -github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= -github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= -github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.1.0/go.mod h1:I1FGZT9+L76gKKOs5djB6ezCbFQP1xR9D75/vuwEF3g= +github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/Y92Vm0Zc6Q= +github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk= +github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.6.0/go.mod h1:eBmuwkDJBwy6iBfxCBob6t6dR6ENT/y+J+Zk0j9GMYc= +github.com/prometheus/common v0.64.0 h1:pdZeA+g617P7oGv1CzdTzyeShxAGrTBsolKNOLQPGO4= +github.com/prometheus/common v0.64.0/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.3/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDaekg4FpcdQ= +github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg= +github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is= +github.com/sclevine/spec v1.4.0 h1:z/Q9idDcay5m5irkZ28M7PtQM4aOISzOpj4bUPkDee8= +github.com/sclevine/spec v1.4.0/go.mod h1:LvpgJaFyvQzRvc1kaDs0bulYwzC70PbiYjC4QnFHkOM= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/tj/assert v0.0.0-20171129193455-018094318fb0/go.mod h1:mZ9/Rh9oLWpLLDRpvE+3b7gP/C2YyLFYxNmcLnPTMe0= -github.com/tj/go-elastic v0.0.0-20171221160941-36157cbbebc2/go.mod h1:WjeM0Oo1eNAjXGDx2yma7uG2XoyRZTq1uv3M/o7imD0= -github.com/tj/go-kinesis v0.0.0-20171128231115-08b17f58cb1b/go.mod h1:/yhzCV0xPfx6jb1bBgRFjl5lytqVqZXEaeqWP8lTEao= -github.com/tj/go-spin v1.1.0/go.mod h1:Mg1mzmePZm4dva8Qz60H2lHwmJ2loum4VIrLgVnKwh4= -github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= -github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= -golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/vbatts/tar-split v0.12.1 h1:CqKoORW7BUWBe7UL/iqTVvkTBOF8UvOMKOIZykxnnbo= +github.com/vbatts/tar-split v0.12.1/go.mod h1:eF6B6i6ftWQcDqEn3/iGFRFRo8cBIMSJVOpnNdfTMFA= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0/go.mod h1:UHB22Z8QsdRDrnAtX4PntOl36ajSxcdUMt1sF7Y6E7Q= +go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8= +go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0 h1:GqRJVj7UmLjCVyVJ3ZFLdPRmhDUp2zFmQe3RHIOsw24= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0/go.mod h1:ri3aaHSmCTVYu2AWv44YMauwAQc0aqI9gHKIcSbI1pU= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0 h1:aTL7F04bJHUlztTsNGJ2l+6he8c+y/b//eR0jjjemT4= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0/go.mod h1:kldtb7jDTeol0l3ewcmd8SDvx3EmIE7lyvqbasU3QC4= +go.opentelemetry.io/otel/metric v1.38.0 h1:Kl6lzIYGAh5M159u9NgiRkmoMKjvbsKtYRwgfrA6WpA= +go.opentelemetry.io/otel/metric v1.38.0/go.mod h1:kB5n/QoRM8YwmUahxvI3bO34eVtQf2i4utNVLr9gEmI= +go.opentelemetry.io/otel/sdk v1.38.0 h1:l48sr5YbNf2hpCUj/FoGhW9yDkl+Ma+LrVl8qaM5b+E= +go.opentelemetry.io/otel/sdk v1.38.0/go.mod h1:ghmNdGlVemJI3+ZB5iDEuk4bWA3GkTpW+DOoZMYBVVg= +go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6qT5wthqPoM= +go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA= +go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE= +go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs= +go.opentelemetry.io/proto/otlp v1.9.0 h1:l706jCMITVouPOqEnii2fIAuO3IVGBRPV5ICjceRb/A= +go.opentelemetry.io/proto/otlp v1.9.0/go.mod h1:xE+Cx5E/eEHw+ISFkwPLwCZefwVjY+pqKg1qcK03+/4= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= +golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8= +golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190724013045-ca1201d0de80 h1:Ao/3l156eZf2AW5wK8a7/smtodRU+gha3+BeqJ69lRk= -golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f h1:wMNYb4v58l5UBM7MYRLPG6ZhfOqbKu7X5eyFl8ZhKvA= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE= +golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= +golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191010194322-b09406accb47 h1:/XfQ9z7ib8eEJX2hdgFTZJ/ntt0swNk5oYBziWeTCvY= -golang.org/x/sys v0.0.0-20191010194322-b09406accb47/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190801041406-cbf593c0f2f3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI= +golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= -golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= -golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20190308202827-9d24e82272b4 h1:SvFZT6jyqRaOeXpc5h/JSfZenJ2O330aBsf7JfSUXmQ= -golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng= +golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= +golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= +golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20191017205301-920acffc3e65/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190508193815-b515fa19cec8 h1:x913Lq/RebkvUmRSdQ8MNb0GZKn+SR1ESfoetcQSeak= -google.golang.org/genproto v0.0.0-20190508193815-b515fa19cec8/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.24.0 h1:vb/1TCsVn3DcJlQ0Gs1yB1pKI6Do2/QNwxdKqmc/b0s= -google.golang.org/grpc v1.24.0/go.mod h1:XDChyiUovWa60DnaeDeZmSW86xtLtjtZbwvSiRnRtcA= +google.golang.org/genproto v0.0.0-20240213162025-012b6fc9bca9 h1:9+tzLLstTlPTRyJTh+ah5wIMsBW5c4tQwGTN3thOW9Y= +google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5 h1:BIRfGDEjiHRrk0QKZe3Xv2ieMhtgRGeLcZQ0mIVn4EY= +google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5/go.mod h1:j3QtIyytwqGr1JUDtYXwtMXWPKsEa5LtzIFN1Wn5WvE= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 h1:eaY8u2EuxbRv7c3NiGK0/NedzVsCcV6hDuU5qPX5EGE= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5/go.mod h1:M4/wBTSeyLxupu3W3tJtOgB14jILAS/XWPSSa3TAlJc= +google.golang.org/grpc v1.75.1 h1:/ODCNEuf9VghjgO3rqLcfg8fiOP0nSluljWFlDxELLI= +google.golang.org/grpc v1.75.1/go.mod h1:JtPAzKiq4v1xcAB2hydNlWI2RnF85XXcV0mhKXr2ecQ= +google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aOOE= +google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= -gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo= -gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw= -honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= -k8s.io/api v0.0.0-20180904230853-4e7be11eab3f/go.mod h1:iuAfoD4hCxJ8Onx9kaTIt30j7jUFS00AXQi6QMi99vA= -k8s.io/apimachinery v0.0.0-20180904193909-def12e63c512/go.mod h1:ccL7Eh7zubPUSh9A3USN90/OzHNSVN6zxzde07TDCL0= -k8s.io/client-go v0.0.0-20180910083459-2cefa64ff137/go.mod h1:7vJpHMYJwNQCWgzmNV+VYUl1zCObLyodBc8nIyt8L5s= -k8s.io/kube-openapi v0.0.0-20180731170545-e3762e86a74c/go.mod h1:BXM9ceUBTj2QnfH2MK1odQs778ajze1RxcmP6S8RVVc= -k8s.io/kubernetes v1.11.10/go.mod h1:ocZa8+6APFNC2tX1DZASIbocyYT5jHzqFVsY5aoB7Jk= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools/v3 v3.0.3 h1:4AuOwCGf4lLR9u3YOe2awrHygurzhO/HeQ6laiA6Sx0= +gotest.tools/v3 v3.0.3/go.mod h1:Z7Lb0S5l+klDB31fvDQX8ss/FlKDxtlFlw3Oa8Ymbl8= diff --git a/internal/build/testdata/fake-lifecycle/phase.go b/internal/build/testdata/fake-lifecycle/phase.go index 33871f428d..b097b806e1 100644 --- a/internal/build/testdata/fake-lifecycle/phase.go +++ b/internal/build/testdata/fake-lifecycle/phase.go @@ -3,7 +3,6 @@ package main import ( "context" "fmt" - "io/ioutil" "net" "net/http" "os" @@ -12,7 +11,7 @@ import ( "syscall" "github.com/buildpacks/lifecycle/auth" - "github.com/docker/docker/api/types" + "github.com/docker/docker/api/types/container" dockercli "github.com/docker/docker/client" v1remote "github.com/google/go-containerregistry/pkg/v1/remote" ) @@ -72,12 +71,12 @@ func testWrite(filename, contents string) { func testDaemon() { fmt.Println("daemon test") - cli, err := dockercli.NewClientWithOpts(dockercli.FromEnv, dockercli.WithVersion("1.38")) + cli, err := dockercli.NewClientWithOpts(dockercli.FromEnv, dockercli.WithAPIVersionNegotiation()) if err != nil { fmt.Printf("failed to create new docker client: %s\n", err) os.Exit(1) } - _, err = cli.ContainerList(context.TODO(), types.ContainerListOptions{}) + _, err = cli.ContainerList(context.TODO(), container.ListOptions{}) if err != nil { fmt.Printf("failed to access docker daemon: %s\n", err) os.Exit(1) @@ -87,7 +86,12 @@ func testDaemon() { func testRegistryAccess(repoName string) { fmt.Println("registry test") fmt.Printf("CNB_REGISTRY_AUTH=%+v\n", os.Getenv("CNB_REGISTRY_AUTH")) - ref, authenticator, err := auth.ReferenceForRepoName(auth.EnvKeychain("CNB_REGISTRY_AUTH"), repoName) + keychain, err := auth.NewEnvKeychain("CNB_REGISTRY_AUTH") + if err != nil { + fmt.Println("fail creating keychain:", err) + os.Exit(1) + } + ref, authenticator, err := auth.ReferenceForRepoName(keychain, repoName) if err != nil { fmt.Println("fail:", err) os.Exit(1) @@ -102,7 +106,7 @@ func testRegistryAccess(repoName string) { func testRead(filename string) { fmt.Println("read test") - contents, err := ioutil.ReadFile(filepath.Clean(filename)) + contents, err := os.ReadFile(filepath.Clean(filename)) if err != nil { fmt.Printf("failed to read file '%s'\n", filename) os.Exit(1) @@ -120,13 +124,13 @@ func testRead(filename string) { func testEnv() { fmt.Println("env test") - fis, err := ioutil.ReadDir("/platform/env") + fis, err := os.ReadDir("/platform/env") if err != nil { fmt.Printf("failed to read /plaform/env dir: %s\n", err) os.Exit(1) } for _, fi := range fis { - contents, err := ioutil.ReadFile(filepath.Join("/", "platform", "env", fi.Name())) + contents, err := os.ReadFile(filepath.Join("/", "platform", "env", fi.Name())) if err != nil { fmt.Printf("failed to read file /plaform/env/%s: %s\n", fi.Name(), err) os.Exit(1) @@ -186,14 +190,19 @@ func testBinds() { } func readDir(dir string) { - fis, err := ioutil.ReadDir(dir) + fis, err := os.ReadDir(dir) if err != nil { fmt.Printf("failed to read %s dir: %s\n", dir, err) os.Exit(1) } for _, fi := range fis { absPath := filepath.Join(dir, fi.Name()) - stat := fi.Sys().(*syscall.Stat_t) + info, err := fi.Info() + if err != nil { + fmt.Printf("failed to dir info %s err: %s\n", fi.Name(), err) + os.Exit(1) + } + stat := info.Sys().(*syscall.Stat_t) fmt.Printf("%s %d/%d \n", absPath, stat.Uid, stat.Gid) if fi.IsDir() { readDir(absPath) diff --git a/internal/build/testdata/fake-lifecycle/test-phase b/internal/build/testdata/fake-lifecycle/test-phase new file mode 100755 index 0000000000..25c77951bc Binary files /dev/null and b/internal/build/testdata/fake-lifecycle/test-phase differ diff --git a/internal/builder/builder.go b/internal/builder/builder.go index 9bf14b233b..5c1c95d1d5 100644 --- a/internal/builder/builder.go +++ b/internal/builder/builder.go @@ -3,13 +3,14 @@ package builder import ( "archive/tar" "bytes" + e "errors" "fmt" "io" - "io/ioutil" "os" "path" "path/filepath" "regexp" + "sort" "strconv" "strings" "time" @@ -21,13 +22,18 @@ import ( "github.com/buildpacks/pack/builder" "github.com/buildpacks/pack/internal/layer" "github.com/buildpacks/pack/internal/stack" + istrings "github.com/buildpacks/pack/internal/strings" "github.com/buildpacks/pack/internal/style" "github.com/buildpacks/pack/pkg/archive" "github.com/buildpacks/pack/pkg/buildpack" "github.com/buildpacks/pack/pkg/dist" "github.com/buildpacks/pack/pkg/logging" + + lifecycleplatform "github.com/buildpacks/lifecycle/platform" ) +var buildConfigDir = cnbBuildConfigDir() + const ( packName = "Pack CLI" @@ -36,23 +42,27 @@ const ( orderPath = "/cnb/order.toml" stackPath = "/cnb/stack.toml" + systemPath = "/cnb/system.toml" + runPath = "/cnb/run.toml" platformDir = "/platform" lifecycleDir = "/cnb/lifecycle" compatLifecycleDir = "/lifecycle" workspaceDir = "/workspace" layersDir = "/layers" + emptyTarDiffID = "sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" + metadataLabel = "io.buildpacks.builder.metadata" stackLabel = "io.buildpacks.stack.id" EnvUID = "CNB_USER_ID" EnvGID = "CNB_GROUP_ID" - BuildpackOnBuilderMessage = `buildpack %s already exists on builder and will be overwritten + ModuleOnBuilderMessage = `%s %s already exists on builder and will be overwritten - existing diffID: %s - new diffID: %s` - BuildpackPreviouslyDefinedMessage = `buildpack %s was previously defined with different contents and will be overwritten + ModulePreviouslyDefinedMessage = `%s %s was previously defined with different contents and will be overwritten - previous diffID: %s - using diffID: %s` ) @@ -60,11 +70,13 @@ const ( // Builder represents a pack builder, used to build images type Builder struct { baseImageName string + buildConfigEnv map[string]string image imgutil.Image layerWriterFactory archive.TarWriterFactory lifecycle Lifecycle lifecycleDescriptor LifecycleDescriptor - additionalBuildpacks []buildpack.Buildpack + additionalBuildpacks buildpack.ManagedCollection + additionalExtensions buildpack.ManagedCollection metadata Metadata mixins []string env map[string]string @@ -72,33 +84,81 @@ type Builder struct { StackID string replaceOrder bool order dist.Order + orderExtensions dist.Order + system dist.System + validateMixins bool + saveProhibited bool } type orderTOML struct { - Order dist.Order `toml:"order"` + Order dist.Order `toml:"order,omitempty"` + OrderExt dist.Order `toml:"order-extensions,omitempty"` +} + +type systemTOML struct { + System dist.System `toml:"system"` +} + +// moduleWithDiffID is a Build Module which content was written on disk in a tar file and the content hash was calculated +type moduleWithDiffID struct { + tarPath string + diffID string + module buildpack.BuildModule +} + +type BuilderOption func(*options) error + +type options struct { + toFlatten buildpack.FlattenModuleInfos + labels map[string]string + runImage string + saveProhibited bool +} + +func WithRunImage(name string) BuilderOption { + return func(o *options) error { + o.runImage = name + return nil + } +} + +func WithoutSave() BuilderOption { + return func(o *options) error { + o.saveProhibited = true + return nil + } } // FromImage constructs a builder from a builder image func FromImage(img imgutil.Image) (*Builder, error) { + return constructBuilder(img, "", true) +} + +// New constructs a new builder from a base image +func New(baseImage imgutil.Image, name string, ops ...BuilderOption) (*Builder, error) { + return constructBuilder(baseImage, name, false, ops...) +} + +func constructBuilder(img imgutil.Image, newName string, errOnMissingLabel bool, ops ...BuilderOption) (*Builder, error) { var metadata Metadata if ok, err := dist.GetLabel(img, metadataLabel, &metadata); err != nil { return nil, errors.Wrapf(err, "getting label %s", metadataLabel) - } else if !ok { + } else if !ok && errOnMissingLabel { return nil, fmt.Errorf("builder %s missing label %s -- try recreating builder", style.Symbol(img.Name()), style.Symbol(metadataLabel)) } - return constructBuilder(img, "", metadata) -} -// New constructs a new builder from a base image -func New(baseImage imgutil.Image, name string) (*Builder, error) { - var metadata Metadata - if _, err := dist.GetLabel(baseImage, metadataLabel, &metadata); err != nil { - return nil, errors.Wrapf(err, "getting label %s", metadataLabel) + system := dist.System{} + if _, err := dist.GetLabel(img, SystemLabel, &system); err != nil { + return nil, errors.Wrapf(err, "getting label %s", SystemLabel) + } + + opts := &options{} + for _, op := range ops { + if err := op(opts); err != nil { + return nil, err + } } - return constructBuilder(baseImage, name, metadata) -} -func constructBuilder(img imgutil.Image, newName string, metadata Metadata) (*Builder, error) { imageOS, err := img.OS() if err != nil { return nil, errors.Wrap(err, "getting image OS") @@ -108,13 +168,32 @@ func constructBuilder(img imgutil.Image, newName string, metadata Metadata) (*Bu return nil, err } + if opts.runImage != "" { + // FIXME: for now the mirrors are gone if you override the run-image (open an issue if preserving the mirrors is desired) + metadata.RunImages = []RunImageMetadata{{Image: opts.runImage}} + metadata.Stack.RunImage = RunImageMetadata{Image: opts.runImage} + } + + for labelKey, labelValue := range opts.labels { + err = img.SetLabel(labelKey, labelValue) + if err != nil { + return nil, errors.Wrapf(err, "adding label %s=%s", labelKey, labelValue) + } + } + bldr := &Builder{ - baseImageName: img.Name(), - image: img, - layerWriterFactory: layerWriterFactory, - metadata: metadata, - lifecycleDescriptor: constructLifecycleDescriptor(metadata), - env: map[string]string{}, + baseImageName: img.Name(), + image: img, + layerWriterFactory: layerWriterFactory, + metadata: metadata, + lifecycleDescriptor: constructLifecycleDescriptor(metadata), + env: map[string]string{}, + buildConfigEnv: map[string]string{}, + validateMixins: true, + additionalBuildpacks: buildpack.NewManagedCollectionV2(opts.toFlatten), + additionalExtensions: buildpack.NewManagedCollectionV2(opts.toFlatten), + saveProhibited: opts.saveProhibited, + system: system, } if err := addImgLabelsToBuildr(bldr); err != nil { @@ -128,6 +207,20 @@ func constructBuilder(img imgutil.Image, newName string, metadata Metadata) (*Bu return bldr, nil } +func WithFlattened(modules buildpack.FlattenModuleInfos) BuilderOption { + return func(o *options) error { + o.toFlatten = modules + return nil + } +} + +func WithLabels(labels map[string]string) BuilderOption { + return func(o *options) error { + o.labels = labels + return nil + } +} + func constructLifecycleDescriptor(metadata Metadata) LifecycleDescriptor { return CompatDescriptor(LifecycleDescriptor{ Info: LifecycleInfo{ @@ -149,9 +242,6 @@ func addImgLabelsToBuildr(bldr *Builder) error { if err != nil { return errors.Wrapf(err, "get label %s from image %s", style.Symbol(stackLabel), style.Symbol(bldr.image.Name())) } - if bldr.StackID == "" { - return fmt.Errorf("image %s missing label %s", style.Symbol(bldr.image.Name()), style.Symbol(stackLabel)) - } if _, err = dist.GetLabel(bldr.image, stack.MixinsLabel, &bldr.mixins); err != nil { return errors.Wrapf(err, "getting label %s", stack.MixinsLabel) @@ -161,6 +251,10 @@ func addImgLabelsToBuildr(bldr *Builder) error { return errors.Wrapf(err, "getting label %s", OrderLabel) } + if _, err = dist.GetLabel(bldr.image, OrderExtensionsLabel, &bldr.orderExtensions); err != nil { + return errors.Wrapf(err, "getting label %s", OrderExtensionsLabel) + } + return nil } @@ -177,10 +271,15 @@ func (b *Builder) LifecycleDescriptor() LifecycleDescriptor { } // Buildpacks returns the buildpack list -func (b *Builder) Buildpacks() []dist.BuildpackInfo { +func (b *Builder) Buildpacks() []dist.ModuleInfo { return b.metadata.Buildpacks } +// Extensions returns the extensions list +func (b *Builder) Extensions() []dist.ModuleInfo { + return b.metadata.Extensions +} + // CreatedBy returns metadata around the creation of the builder func (b *Builder) CreatedBy() CreatorMetadata { return b.metadata.CreatedBy @@ -191,6 +290,11 @@ func (b *Builder) Order() dist.Order { return b.order } +// OrderExtensions returns the order for extensions +func (b *Builder) OrderExtensions() dist.Order { + return b.orderExtensions +} + // BaseImageName returns the name of the builder base image func (b *Builder) BaseImageName() string { return b.baseImageName @@ -211,6 +315,21 @@ func (b *Builder) Stack() StackMetadata { return b.metadata.Stack } +// System returns the system buildpacks configuration +func (b *Builder) System() dist.System { return b.system } + +// RunImages returns all run image metadata +func (b *Builder) RunImages() []RunImageMetadata { + return append(b.metadata.RunImages, b.Stack().RunImage) +} + +// DefaultRunImage returns the default run image metadata +func (b *Builder) DefaultRunImage() RunImageMetadata { + // run.images are ensured in builder.ValidateConfig() + // per the spec, we use the first one as the default + return b.RunImages()[0] +} + // Mixins returns the mixins of the builder func (b *Builder) Mixins() []string { return b.mixins @@ -226,12 +345,49 @@ func (b *Builder) GID() int { return b.gid } +func (b *Builder) AllModules(kind string) []buildpack.BuildModule { + return b.moduleManager(kind).AllModules() +} + +func (b *Builder) moduleManager(kind string) buildpack.ManagedCollection { + switch kind { + case buildpack.KindBuildpack: + return b.additionalBuildpacks + case buildpack.KindExtension: + return b.additionalExtensions + } + return nil +} + +func (b *Builder) FlattenedModules(kind string) [][]buildpack.BuildModule { + manager := b.moduleManager(kind) + return manager.FlattenedModules() +} + +func (b *Builder) ShouldFlatten(module buildpack.BuildModule) bool { + return b.additionalBuildpacks.ShouldFlatten(module) +} + // Setters // AddBuildpack adds a buildpack to the builder -func (b *Builder) AddBuildpack(bp buildpack.Buildpack) { - b.additionalBuildpacks = append(b.additionalBuildpacks, bp) - b.metadata.Buildpacks = append(b.metadata.Buildpacks, bp.Descriptor().Info) +func (b *Builder) AddBuildpack(bp buildpack.BuildModule) { + b.additionalBuildpacks.AddModules(bp) + b.metadata.Buildpacks = append(b.metadata.Buildpacks, bp.Descriptor().Info()) +} + +func (b *Builder) AddBuildpacks(main buildpack.BuildModule, dependencies []buildpack.BuildModule) { + b.additionalBuildpacks.AddModules(main, dependencies...) + b.metadata.Buildpacks = append(b.metadata.Buildpacks, main.Descriptor().Info()) + for _, dep := range dependencies { + b.metadata.Buildpacks = append(b.metadata.Buildpacks, dep.Descriptor().Info()) + } +} + +// AddExtension adds an extension to the builder +func (b *Builder) AddExtension(bp buildpack.BuildModule) { + b.additionalExtensions.AddModules(bp) + b.metadata.Extensions = append(b.metadata.Extensions, bp.Descriptor().Info()) } // SetLifecycle sets the lifecycle of the builder @@ -245,12 +401,30 @@ func (b *Builder) SetEnv(env map[string]string) { b.env = env } +// SetBuildConfigEnv sets an environment variable to a value that will take action on platform environment variables basedon filename suffix +func (b *Builder) SetBuildConfigEnv(env map[string]string) { + b.buildConfigEnv = env +} + // SetOrder sets the order of the builder func (b *Builder) SetOrder(order dist.Order) { b.order = order b.replaceOrder = true } +// SetOrderExtensions sets the order of the builder +func (b *Builder) SetOrderExtensions(order dist.Order) { + for i, entry := range order { + for j, ref := range entry.Group { + ref.Optional = false // ensure `optional = true` isn't redundantly printed for extensions (as they are always optional) + entry.Group[j] = ref + } + order[i] = entry + } + b.orderExtensions = order + b.replaceOrder = true +} + // SetDescription sets the description of the builder func (b *Builder) SetDescription(description string) { b.metadata.Description = description @@ -266,19 +440,40 @@ func (b *Builder) SetStack(stackConfig builder.StackConfig) { } } +// SetSystem sets the system buildpacks of the builder +func (b *Builder) SetSystem(system dist.System) { + b.system = system +} + +// SetRunImage sets the run image of the builder +func (b *Builder) SetRunImage(runConfig builder.RunConfig) { + var runImages []RunImageMetadata + for _, i := range runConfig.Images { + runImages = append(runImages, RunImageMetadata{ + Image: i.Image, + Mirrors: i.Mirrors, + }) + } + b.metadata.RunImages = runImages +} + +// SetValidateMixins if true instructs the builder to validate mixins +func (b *Builder) SetValidateMixins(to bool) { + b.validateMixins = to +} + // Save saves the builder -func (b *Builder) Save(logger logging.Logger, creatorMetadata CreatorMetadata) error { +func (b *Builder) Save(logger logging.Logger, creatorMetadata CreatorMetadata, additionalTags ...string) error { + if b.saveProhibited { + return fmt.Errorf("failed to save builder %s as saving is not allowed", b.Name()) + } + logger.Debugf("Creating builder with the following buildpacks:") for _, bpInfo := range b.metadata.Buildpacks { logger.Debugf("-> %s", style.Symbol(bpInfo.FullName())) } - resolvedOrder, err := processOrder(b.metadata.Buildpacks, b.order) - if err != nil { - return errors.Wrap(err, "processing order") - } - - tmpDir, err := ioutil.TempDir("", "create-builder-scratch") + tmpDir, err := os.MkdirTemp("", "create-builder-scratch") if err != nil { return err } @@ -306,36 +501,96 @@ func (b *Builder) Save(logger logging.Logger, creatorMetadata CreatorMetadata) e } } - if err := validateBuildpacks(b.StackID, b.Mixins(), b.LifecycleDescriptor(), b.Buildpacks(), b.additionalBuildpacks); err != nil { - return errors.Wrap(err, "validating buildpacks") + if b.validateMixins { + if err := b.validateBuildpacks(); err != nil { + return errors.Wrap(err, "validating buildpacks") + } + } + + if err := validateExtensions(b.lifecycleDescriptor, b.Extensions(), b.AllModules(buildpack.KindExtension)); err != nil { + return errors.Wrap(err, "validating extensions") } - bpLayers := dist.BuildpackLayers{} + bpLayers := dist.ModuleLayers{} if _, err := dist.GetLabel(b.image, dist.BuildpackLayersLabel, &bpLayers); err != nil { return errors.Wrapf(err, "getting label %s", dist.BuildpackLayersLabel) } - err = b.addBuildpacks(logger, tmpDir, b.image, b.additionalBuildpacks, bpLayers) + var excludedBuildpacks []buildpack.BuildModule + excludedBuildpacks, err = b.addFlattenedModules(buildpack.KindBuildpack, logger, tmpDir, b.image, b.additionalBuildpacks.FlattenedModules(), bpLayers) if err != nil { return err } + err = b.addExplodedModules(buildpack.KindBuildpack, logger, tmpDir, b.image, append(b.additionalBuildpacks.ExplodedModules(), excludedBuildpacks...), bpLayers) + if err != nil { + return err + } if err := dist.SetLabel(b.image, dist.BuildpackLayersLabel, bpLayers); err != nil { return err } + extLayers := dist.ModuleLayers{} + if _, err := dist.GetLabel(b.image, dist.ExtensionLayersLabel, &extLayers); err != nil { + return errors.Wrapf(err, "getting label %s", dist.ExtensionLayersLabel) + } + + var excludedExtensions []buildpack.BuildModule + excludedExtensions, err = b.addFlattenedModules(buildpack.KindExtension, logger, tmpDir, b.image, b.additionalExtensions.FlattenedModules(), extLayers) + if err != nil { + return err + } + + err = b.addExplodedModules(buildpack.KindExtension, logger, tmpDir, b.image, append(b.additionalExtensions.ExplodedModules(), excludedExtensions...), extLayers) + if err != nil { + return err + } + + if err := dist.SetLabel(b.image, dist.ExtensionLayersLabel, extLayers); err != nil { + return err + } + if b.replaceOrder { - orderTar, err := b.orderLayer(resolvedOrder, tmpDir) + resolvedOrderBp, err := processOrder(b.metadata.Buildpacks, b.order, buildpack.KindBuildpack) + if err != nil { + return errors.Wrap(err, "processing buildpacks order") + } + resolvedOrderExt, err := processOrder(b.metadata.Extensions, b.orderExtensions, buildpack.KindExtension) + if err != nil { + return errors.Wrap(err, "processing extensions order") + } + + orderTar, err := b.orderLayer(resolvedOrderBp, resolvedOrderExt, tmpDir) if err != nil { return err } if err := b.image.AddLayer(orderTar); err != nil { return errors.Wrap(err, "adding order.tar layer") } - if err := dist.SetLabel(b.image, OrderLabel, b.order); err != nil { return err } + if err := dist.SetLabel(b.image, OrderExtensionsLabel, b.orderExtensions); err != nil { + return err + } + } + + if len(b.system.Pre.Buildpacks) > 0 || len(b.system.Post.Buildpacks) > 0 { + resolvedSystemBp, err := processSystem(b.metadata.Buildpacks, b.system, buildpack.KindBuildpack) + if err != nil { + return errors.Wrap(err, "processing system buildpacks") + } + + systemTar, err := b.systemLayer(resolvedSystemBp, tmpDir) + if err != nil { + return err + } + if err := b.image.AddLayer(systemTar); err != nil { + return errors.Wrap(err, "adding system.tar layer") + } + if err := dist.SetLabel(b.image, SystemLabel, b.system); err != nil { + return err + } } stackTar, err := b.stackLayer(tmpDir) @@ -346,6 +601,26 @@ func (b *Builder) Save(logger logging.Logger, creatorMetadata CreatorMetadata) e return errors.Wrap(err, "adding stack.tar layer") } + runImageTar, err := b.runImageLayer(tmpDir) + if err != nil { + return err + } + if err := b.image.AddLayer(runImageTar); err != nil { + return errors.Wrap(err, "adding run.tar layer") + } + + if len(b.buildConfigEnv) > 0 { + logger.Debugf("Provided Build Config Environment Variables\n %s", style.Map(b.env, " ", "\n")) + buildConfigEnvTar, err := b.buildConfigEnvLayer(tmpDir, b.buildConfigEnv) + if err != nil { + return errors.Wrap(err, "retrieving build-config-env layer") + } + + if err := b.image.AddLayer(buildConfigEnvTar); err != nil { + return errors.Wrap(err, "adding build-config-env layer") + } + } + if len(b.env) > 0 { logger.Debugf("Provided Environment Variables\n %s", style.Map(b.env, " ", "\n")) } @@ -377,49 +652,31 @@ func (b *Builder) Save(logger logging.Logger, creatorMetadata CreatorMetadata) e return errors.Wrap(err, "failed to set working dir") } - return b.image.Save() + logger.Debugf("Builder creation completed, starting image save") + err = b.image.Save(additionalTags...) + logger.Debugf("Image save completed") + return err } // Helpers -func (b *Builder) addBuildpacks(logger logging.Logger, tmpDir string, image imgutil.Image, additionalBuildpacks []buildpack.Buildpack, bpLayers dist.BuildpackLayers) error { - type buildpackToAdd struct { - tarPath string - diffID string - buildpack buildpack.Buildpack +func (b *Builder) addExplodedModules(kind string, logger logging.Logger, tmpDir string, image imgutil.Image, additionalModules []buildpack.BuildModule, layers dist.ModuleLayers) error { + collectionToAdd := map[string]moduleWithDiffID{} + toAdd, errs := explodeModules(kind, tmpDir, additionalModules, logger) + if len(errs) > 0 { + return e.Join(errs...) } - buildpacksToAdd := map[string]buildpackToAdd{} - for i, bp := range additionalBuildpacks { - // create buildpack directory - bpTmpDir := filepath.Join(tmpDir, strconv.Itoa(i)) - if err := os.MkdirAll(bpTmpDir, os.ModePerm); err != nil { - return errors.Wrap(err, "creating buildpack temp dir") - } - - // create tar file - bpLayerTar, err := buildpack.ToLayerTar(bpTmpDir, bp) - if err != nil { - return err - } - - // generate diff id - diffID, err := dist.LayerDiffID(bpLayerTar) - if err != nil { - return errors.Wrapf(err, - "getting content hashes for buildpack %s", - style.Symbol(bp.Descriptor().Info.FullName()), - ) - } + for i, additionalModule := range toAdd { + info, diffID, layerTar, module := additionalModule.module.Descriptor().Info(), additionalModule.diffID, additionalModule.tarPath, additionalModule.module - bpInfo := bp.Descriptor().Info // check against builder layers - if existingBPInfo, ok := bpLayers[bpInfo.ID][bpInfo.Version]; ok { - if existingBPInfo.LayerDiffID == diffID.String() { - logger.Debugf("Buildpack %s already exists on builder with same contents, skipping...", style.Symbol(bpInfo.FullName())) + if existingInfo, ok := layers[info.ID][info.Version]; ok { + if existingInfo.LayerDiffID == diffID { + logger.Debugf("%s %s already exists on builder with same contents, skipping...", istrings.Title(kind), style.Symbol(info.FullName())) continue } else { - whiteoutsTar, err := b.whiteoutLayer(tmpDir, i, bpInfo) + whiteoutsTar, err := b.whiteoutLayer(tmpDir, i, info) if err != nil { return err } @@ -429,123 +686,211 @@ func (b *Builder) addBuildpacks(logger logging.Logger, tmpDir string, image imgu } } - logger.Debugf(BuildpackOnBuilderMessage, style.Symbol(bpInfo.FullName()), style.Symbol(existingBPInfo.LayerDiffID), style.Symbol(diffID.String())) + logger.Debugf(ModuleOnBuilderMessage, kind, style.Symbol(info.FullName()), style.Symbol(existingInfo.LayerDiffID), style.Symbol(diffID)) } - // check against other buildpacks to be added - if otherAdditionalBP, ok := buildpacksToAdd[bp.Descriptor().Info.FullName()]; ok { - if otherAdditionalBP.diffID == diffID.String() { - logger.Debugf("Buildpack %s with same contents is already being added, skipping...", style.Symbol(bpInfo.FullName())) + // check against other modules to be added + if otherAdditionalMod, ok := collectionToAdd[info.FullName()]; ok { + if otherAdditionalMod.diffID == diffID { + logger.Debugf("%s %s with same contents is already being added, skipping...", istrings.Title(kind), style.Symbol(info.FullName())) continue } - logger.Debugf(BuildpackPreviouslyDefinedMessage, style.Symbol(bpInfo.FullName()), style.Symbol(otherAdditionalBP.diffID), style.Symbol(diffID.String())) + logger.Debugf(ModulePreviouslyDefinedMessage, kind, style.Symbol(info.FullName()), style.Symbol(otherAdditionalMod.diffID), style.Symbol(diffID)) } - // note: if same id@version is in additionalBuildpacks, last one wins (see warnings above) - buildpacksToAdd[bp.Descriptor().Info.FullName()] = buildpackToAdd{ - tarPath: bpLayerTar, - diffID: diffID.String(), - buildpack: bp, + // note: if same id@version is in additionalModules, last one wins (see warnings above) + collectionToAdd[info.FullName()] = moduleWithDiffID{ + tarPath: layerTar, + diffID: diffID, + module: module, } } - for _, bp := range buildpacksToAdd { - logger.Debugf("Adding buildpack %s (diffID=%s)", style.Symbol(bp.buildpack.Descriptor().Info.FullName()), bp.diffID) - if err := image.AddLayerWithDiffID(bp.tarPath, bp.diffID); err != nil { + // Fixes 1453 + keys := sortKeys(collectionToAdd) + for _, k := range keys { + module := collectionToAdd[k] + logger.Debugf("Adding %s %s (diffID=%s)", kind, style.Symbol(module.module.Descriptor().Info().FullName()), module.diffID) + if err := image.AddLayerWithDiffID(module.tarPath, module.diffID); err != nil { return errors.Wrapf(err, - "adding layer tar for buildpack %s", - style.Symbol(bp.buildpack.Descriptor().Info.FullName()), + "adding layer tar for %s %s", + kind, + style.Symbol(module.module.Descriptor().Info().FullName()), ) } - dist.AddBuildpackToLayersMD(bpLayers, bp.buildpack.Descriptor(), bp.diffID) + dist.AddToLayersMD(layers, module.module.Descriptor(), module.diffID) } return nil } -func processOrder(buildpacks []dist.BuildpackInfo, order dist.Order) (dist.Order, error) { - resolvedOrder := dist.Order{} +func (b *Builder) addFlattenedModules(kind string, logger logging.Logger, tmpDir string, image imgutil.Image, flattenModules [][]buildpack.BuildModule, layers dist.ModuleLayers) ([]buildpack.BuildModule, error) { + collectionToAdd := map[string]moduleWithDiffID{} + var ( + buildModuleExcluded []buildpack.BuildModule + finalTarPath string + err error + ) - for gi, g := range order { - resolvedOrder = append(resolvedOrder, dist.OrderEntry{}) + buildModuleWriter := buildpack.NewBuildModuleWriter(logger, b.layerWriterFactory) - for _, bpRef := range g.Group { - var matchingBps []dist.BuildpackInfo - for _, bp := range buildpacks { - if bpRef.ID == bp.ID { - matchingBps = append(matchingBps, bp) - } + for i, additionalModules := range flattenModules { + modFlattenTmpDir := filepath.Join(tmpDir, fmt.Sprintf("%s-%s-flatten", kind, strconv.Itoa(i))) + if err := os.MkdirAll(modFlattenTmpDir, os.ModePerm); err != nil { + return nil, errors.Wrap(err, "creating flatten temp dir") + } + + finalTarPath, buildModuleExcluded, err = buildModuleWriter.NToLayerTar(modFlattenTmpDir, fmt.Sprintf("%s-flatten-%s", kind, strconv.Itoa(i)), additionalModules, nil) + if err != nil { + return nil, errors.Wrapf(err, "writing layer %s", finalTarPath) + } + + diffID, err := dist.LayerDiffID(finalTarPath) + if err != nil { + return nil, errors.Wrapf(err, "calculating diff layer %s", finalTarPath) + } + + for _, module := range additionalModules { + collectionToAdd[module.Descriptor().Info().FullName()] = moduleWithDiffID{ + tarPath: finalTarPath, + diffID: diffID.String(), + module: module, } + } + } - if len(matchingBps) == 0 { - return dist.Order{}, fmt.Errorf("no versions of buildpack %s were found on the builder", style.Symbol(bpRef.ID)) + // Fixes 1453 + keys := sortKeys(collectionToAdd) + diffIDAdded := map[string]string{} + for _, k := range keys { + module := collectionToAdd[k] + bp := module.module + addLayer := true + if b.ShouldFlatten(bp) { + if _, ok := diffIDAdded[module.diffID]; !ok { + diffIDAdded[module.diffID] = module.tarPath + } else { + addLayer = false + logger.Debugf("Squashing %s %s (diffID=%s)", kind, style.Symbol(bp.Descriptor().Info().FullName()), module.diffID) + } + } + if addLayer { + logger.Debugf("Adding %s %s (diffID=%s)", kind, style.Symbol(bp.Descriptor().Info().FullName()), module.diffID) + if err = image.AddLayerWithDiffID(module.tarPath, module.diffID); err != nil { + return nil, errors.Wrapf(err, + "adding layer tar for %s %s", + kind, + style.Symbol(module.module.Descriptor().Info().FullName()), + ) } + } + dist.AddToLayersMD(layers, bp.Descriptor(), module.diffID) + } - if bpRef.Version == "" { - if len(uniqueVersions(matchingBps)) > 1 { - return dist.Order{}, fmt.Errorf("unable to resolve version: multiple versions of %s - must specify an explicit version", style.Symbol(bpRef.ID)) - } + return buildModuleExcluded, nil +} - bpRef.Version = matchingBps[0].Version +func processOrder(modulesOnBuilder []dist.ModuleInfo, order dist.Order, kind string) (dist.Order, error) { + resolved := dist.Order{} + for idx, g := range order { + resolved = append(resolved, dist.OrderEntry{}) + for _, ref := range g.Group { + var err error + if ref, err = resolveRef(modulesOnBuilder, ref, kind); err != nil { + return dist.Order{}, err } + resolved[idx].Group = append(resolved[idx].Group, ref) + } + } + return resolved, nil +} - if !hasBuildpackWithVersion(matchingBps, bpRef.Version) { - return dist.Order{}, fmt.Errorf("buildpack %s with version %s was not found on the builder", style.Symbol(bpRef.ID), style.Symbol(bpRef.Version)) - } +func processSystem(modulesOnBuilder []dist.ModuleInfo, system dist.System, kind string) (dist.System, error) { + resolved := dist.System{} + + // Pre buildpacks + for _, bp := range system.Pre.Buildpacks { + var ( + ref dist.ModuleRef + err error + ) + if ref, err = resolveRef(modulesOnBuilder, bp, kind); err != nil { + return dist.System{}, err + } + resolved.Pre.Buildpacks = append(resolved.Pre.Buildpacks, ref) + } + + // Post buildpacks + for _, bp := range system.Post.Buildpacks { + var ( + ref dist.ModuleRef + err error + ) + if ref, err = resolveRef(modulesOnBuilder, bp, kind); err != nil { + return dist.System{}, err + } + resolved.Post.Buildpacks = append(resolved.Post.Buildpacks, ref) + } + + return resolved, nil +} + +func resolveRef(moduleList []dist.ModuleInfo, ref dist.ModuleRef, kind string) (dist.ModuleRef, error) { + var matching []dist.ModuleInfo + for _, bp := range moduleList { + if ref.ID == bp.ID { + matching = append(matching, bp) + } + } + + if len(matching) == 0 { + return dist.ModuleRef{}, + fmt.Errorf("no versions of %s %s were found on the builder", kind, style.Symbol(ref.ID)) + } - resolvedOrder[gi].Group = append(resolvedOrder[gi].Group, bpRef) + if ref.Version == "" { + if len(uniqueVersions(matching)) > 1 { + return dist.ModuleRef{}, + fmt.Errorf("unable to resolve version: multiple versions of %s - must specify an explicit version", style.Symbol(ref.ID)) } + + ref.Version = matching[0].Version + } + + if !hasElementWithVersion(matching, ref.Version) { + return dist.ModuleRef{}, + fmt.Errorf("%s %s with version %s was not found on the builder", kind, style.Symbol(ref.ID), style.Symbol(ref.Version)) } - return resolvedOrder, nil + return ref, nil } -func hasBuildpackWithVersion(bps []dist.BuildpackInfo, version string) bool { - for _, bp := range bps { - if bp.Version == version { +func hasElementWithVersion(moduleList []dist.ModuleInfo, version string) bool { + for _, el := range moduleList { + if el.Version == version { return true } } return false } -func validateBuildpacks(stackID string, mixins []string, lifecycleDescriptor LifecycleDescriptor, allBuildpacks []dist.BuildpackInfo, bpsToValidate []buildpack.Buildpack) error { +func (b *Builder) validateBuildpacks() error { bpLookup := map[string]interface{}{} - for _, bp := range allBuildpacks { + for _, bp := range b.Buildpacks() { bpLookup[bp.FullName()] = nil } - for _, bp := range bpsToValidate { + for _, bp := range b.AllModules(buildpack.KindBuildpack) { bpd := bp.Descriptor() - - // TODO: Warn when Buildpack API is deprecated - https://github.com/buildpacks/pack/issues/788 - compatible := false - for _, version := range append(lifecycleDescriptor.APIs.Buildpack.Supported, lifecycleDescriptor.APIs.Buildpack.Deprecated...) { - compatible = version.Compare(bpd.API) == 0 - if compatible { - break - } - } - - if !compatible { - return fmt.Errorf( - "buildpack %s (Buildpack API %s) is incompatible with lifecycle %s (Buildpack API(s) %s)", - style.Symbol(bpd.Info.FullName()), - bpd.API.String(), - style.Symbol(lifecycleDescriptor.Info.Version.String()), - strings.Join(lifecycleDescriptor.APIs.Buildpack.Supported.AsStrings(), ", "), - ) + if err := validateLifecycleCompat(bpd, b.LifecycleDescriptor()); err != nil { + return err } - if len(bpd.Stacks) >= 1 { // standard buildpack - if err := bpd.EnsureStackSupport(stackID, mixins, false); err != nil { - return err - } - } else { // order buildpack - for _, g := range bpd.Order { + if len(bpd.Order()) > 0 { // order buildpack + for _, g := range bpd.Order() { for _, r := range g.Group { if _, ok := bpLookup[r.FullName()]; !ok { return fmt.Errorf( @@ -555,12 +900,76 @@ func validateBuildpacks(stackID string, mixins []string, lifecycleDescriptor Lif } } } + } else if err := bpd.EnsureStackSupport(b.StackID, b.Mixins(), false); err != nil { + return err + } else { + buildOS, err := b.Image().OS() + if err != nil { + return err + } + buildArch, err := b.Image().Architecture() + if err != nil { + return err + } + buildDistroName, err := b.Image().Label(lifecycleplatform.OSDistroNameLabel) + if err != nil { + return err + } + buildDistroVersion, err := b.Image().Label(lifecycleplatform.OSDistroVersionLabel) + if err != nil { + return err + } + if err := bpd.EnsureTargetSupport(buildOS, buildArch, buildDistroName, buildDistroVersion); err != nil { + return err + } + + // TODO ensure at least one run-image + } + } + + return nil +} + +func validateExtensions(lifecycleDescriptor LifecycleDescriptor, allExtensions []dist.ModuleInfo, extsToValidate []buildpack.BuildModule) error { + extLookup := map[string]interface{}{} + + for _, ext := range allExtensions { + extLookup[ext.FullName()] = nil + } + + for _, ext := range extsToValidate { + extd := ext.Descriptor() + if err := validateLifecycleCompat(extd, lifecycleDescriptor); err != nil { + return err } } return nil } +func validateLifecycleCompat(descriptor buildpack.Descriptor, lifecycleDescriptor LifecycleDescriptor) error { + compatible := false + for _, version := range append(lifecycleDescriptor.APIs.Buildpack.Supported, lifecycleDescriptor.APIs.Buildpack.Deprecated...) { + compatible = version.Compare(descriptor.API()) == 0 + if compatible { + break + } + } + + if !compatible { + return fmt.Errorf( + "%s %s (Buildpack API %s) is incompatible with lifecycle %s (Buildpack API(s) %s)", + descriptor.Kind(), + style.Symbol(descriptor.Info().FullName()), + descriptor.API().String(), + style.Symbol(lifecycleDescriptor.Info.Version.String()), + strings.Join(lifecycleDescriptor.APIs.Buildpack.Supported.AsStrings(), ", "), + ) + } + + return nil +} + func userAndGroupIDs(img imgutil.Image) (int, int, error) { sUID, err := img.Env(EnvUID) if err != nil { @@ -590,7 +999,7 @@ func userAndGroupIDs(img imgutil.Image) (int, int, error) { return uid, gid, nil } -func uniqueVersions(buildpacks []dist.BuildpackInfo) []string { +func uniqueVersions(buildpacks []dist.ModuleInfo) []string { results := []string{} set := map[string]interface{}{} for _, bpInfo := range buildpacks { @@ -622,7 +1031,7 @@ func (b *Builder) defaultDirsLayer(dest string) (string, error) { } // can't use filepath.Join(), to ensure Windows doesn't transform it to Windows join - for _, path := range []string{cnbDir, dist.BuildpacksDir, platformDir, platformDir + "/env"} { + for _, path := range []string{cnbDir, dist.BuildpacksDir, dist.ExtensionsDir, platformDir, platformDir + "/env", buildConfigDir, buildConfigDir + "/env"} { if err := lw.WriteHeader(b.rootOwnedDir(path, ts)); err != nil { return "", errors.Wrapf(err, "creating %s dir in layer", style.Symbol(path)) } @@ -716,7 +1125,7 @@ func (b *Builder) embedLifecycleTar(tw archive.TarWriter) error { return errors.Wrapf(err, "failed to write header for '%s'", header.Name) } - buf, err := ioutil.ReadAll(tr) + buf, err := io.ReadAll(tr) if err != nil { return errors.Wrapf(err, "failed to read contents of '%s'", header.Name) } @@ -731,8 +1140,8 @@ func (b *Builder) embedLifecycleTar(tw archive.TarWriter) error { return nil } -func (b *Builder) orderLayer(order dist.Order, dest string) (string, error) { - contents, err := orderFileContents(order) +func (b *Builder) orderLayer(order dist.Order, orderExt dist.Order, dest string) (string, error) { + contents, err := orderFileContents(order, orderExt) if err != nil { return "", err } @@ -746,19 +1155,46 @@ func (b *Builder) orderLayer(order dist.Order, dest string) (string, error) { return layerTar, nil } -func orderFileContents(order dist.Order) (string, error) { +func orderFileContents(order dist.Order, orderExt dist.Order) (string, error) { buf := &bytes.Buffer{} - - tomlData := orderTOML{Order: order} + tomlData := orderTOML{Order: order, OrderExt: orderExt} if err := toml.NewEncoder(buf).Encode(tomlData); err != nil { return "", errors.Wrapf(err, "failed to marshal order.toml") } return buf.String(), nil } +func (b *Builder) systemLayer(system dist.System, dest string) (string, error) { + contents, err := systemFileContents(system) + if err != nil { + return "", err + } + layerTar := filepath.Join(dest, "system.tar") + err = layer.CreateSingleFileTar(layerTar, systemPath, contents, b.layerWriterFactory) + if err != nil { + return "", errors.Wrapf(err, "failed to create system.toml layer tar") + } + + return layerTar, nil +} + +func systemFileContents(system dist.System) (string, error) { + buf := &bytes.Buffer{} + tomlData := systemTOML{System: system} + if err := toml.NewEncoder(buf).Encode(tomlData); err != nil { + return "", errors.Wrapf(err, "failed to marshal system.toml") + } + return buf.String(), nil +} + func (b *Builder) stackLayer(dest string) (string, error) { buf := &bytes.Buffer{} - err := toml.NewEncoder(buf).Encode(b.metadata.Stack) + var err error + if b.metadata.Stack.RunImage.Image != "" { + err = toml.NewEncoder(buf).Encode(b.metadata.Stack) + } else if len(b.metadata.RunImages) > 0 { + err = toml.NewEncoder(buf).Encode(StackMetadata{RunImage: b.metadata.RunImages[0]}) + } if err != nil { return "", errors.Wrapf(err, "failed to marshal stack.toml") } @@ -772,6 +1208,24 @@ func (b *Builder) stackLayer(dest string) (string, error) { return layerTar, nil } +func (b *Builder) runImageLayer(dest string) (string, error) { + buf := &bytes.Buffer{} + err := toml.NewEncoder(buf).Encode(RunImages{ + Images: b.metadata.RunImages, + }) + if err != nil { + return "", errors.Wrapf(err, "failed to marshal run.toml") + } + + layerTar := filepath.Join(dest, "run.tar") + err = layer.CreateSingleFileTar(layerTar, runPath, buf.String(), b.layerWriterFactory) + if err != nil { + return "", errors.Wrapf(err, "failed to create run.toml layer tar") + } + + return layerTar, nil +} + func (b *Builder) envLayer(dest string, env map[string]string) (string, error) { fh, err := os.Create(filepath.Join(dest, "env.tar")) if err != nil { @@ -799,7 +1253,32 @@ func (b *Builder) envLayer(dest string, env map[string]string) (string, error) { return fh.Name(), nil } -func (b *Builder) whiteoutLayer(tmpDir string, i int, bpInfo dist.BuildpackInfo) (string, error) { +func (b *Builder) buildConfigEnvLayer(dest string, env map[string]string) (string, error) { + fh, err := os.Create(filepath.Join(dest, "build-config-env.tar")) + if err != nil { + return "", err + } + defer fh.Close() + lw := b.layerWriterFactory.NewWriter(fh) + defer lw.Close() + for k, v := range env { + if err := lw.WriteHeader(&tar.Header{ + Name: path.Join(cnbBuildConfigDir(), "env", k), + Size: int64(len(v)), + Mode: 0644, + ModTime: archive.NormalizedDateTime, + }); err != nil { + return "", err + } + if _, err := lw.Write([]byte(v)); err != nil { + return "", err + } + } + + return fh.Name(), nil +} + +func (b *Builder) whiteoutLayer(tmpDir string, i int, bpInfo dist.ModuleInfo) (string, error) { bpWhiteoutsTmpDir := filepath.Join(tmpDir, strconv.Itoa(i)+"_whiteouts") if err := os.MkdirAll(bpWhiteoutsTmpDir, os.ModePerm); err != nil { return "", errors.Wrap(err, "creating buildpack whiteouts temp dir") @@ -830,3 +1309,135 @@ func (b *Builder) whiteoutLayer(tmpDir string, i int, bpInfo dist.BuildpackInfo) return fh.Name(), nil } + +func sortKeys(collection map[string]moduleWithDiffID) []string { + keys := make([]string, 0, len(collection)) + for k := range collection { + keys = append(keys, k) + } + sort.Strings(keys) + return keys +} + +// explodeModules takes a collection of build modules and concurrently reads their tar files. +// It assumes the modules were extracted with `buildpack.extractBuildpacks`, which when provided a flattened buildpack package containing N buildpacks, +// will return N modules: 1 module with a single tar containing ALL N buildpacks, and N-1 modules with empty tar files. +// As we iterate through the modules, in case a flattened module (tar containing all N buildpacks) is found, +// explodeModules will split the module into N modules, each with a single tar containing a single buildpack. +// In case a module with an empty tar file is found, it is ignored. +func explodeModules(kind, tmpDir string, additionalModules []buildpack.BuildModule, logger logging.Logger) ([]moduleWithDiffID, []error) { + modInfoChans := make([]chan modInfo, len(additionalModules)) + for i := range modInfoChans { + modInfoChans[i] = make(chan modInfo, 1) + } + + // Explode modules concurrently + for i, module := range additionalModules { + go func(i int, module buildpack.BuildModule) { + modTmpDir := filepath.Join(tmpDir, fmt.Sprintf("%s-%s", kind, strconv.Itoa(i))) + if err := os.MkdirAll(modTmpDir, os.ModePerm); err != nil { + modInfoChans[i] <- handleError(module, err, fmt.Sprintf("creating %s temp dir %s", kind, modTmpDir)) + } + moduleTars, err := buildpack.ToNLayerTar(modTmpDir, module) + if err != nil { + modInfoChans[i] <- handleError(module, err, fmt.Sprintf("creating %s tar file at path %s", module.Descriptor().Info().FullName(), modTmpDir)) + } + modInfoChans[i] <- modInfo{moduleTars: moduleTars} + }(i, module) + } + + // Iterate over modules sequentially, building up the result. + var ( + result []moduleWithDiffID + errs []error + ) + for i, module := range additionalModules { + mi := <-modInfoChans[i] + if mi.err != nil { + errs = append(errs, mi.err) + continue + } + if len(mi.moduleTars) == 1 { + // This entry is an individual buildpack or extension, or a module with empty tar + moduleTar := mi.moduleTars[0] + diffID, err := dist.LayerDiffID(moduleTar.Path()) + if err != nil { + errs = append(errs, errors.Wrapf(err, "calculating layer diffID for path %s", moduleTar.Path())) + continue + } + if diffID.String() == emptyTarDiffID { + logger.Debugf("%s %s is a component of a flattened buildpack that will be added elsewhere, skipping...", istrings.Title(kind), style.Symbol(moduleTar.Info().FullName())) + continue // we don't need to keep modules with empty tars + } + result = append(result, moduleWithDiffID{ + tarPath: moduleTar.Path(), + diffID: diffID.String(), + module: module, + }) + } else { + // This entry is a flattened buildpack that was exploded, we need to add each exploded buildpack to the result in order + for _, moduleTar := range mi.moduleTars { + diffID, err := dist.LayerDiffID(moduleTar.Path()) + if err != nil { + errs = append(errs, errors.Wrapf(err, "calculating layer diffID for path %s", moduleTar.Path())) + continue + } + explodedMod := moduleWithDiffID{ + tarPath: moduleTar.Path(), + diffID: diffID.String(), + } + // find the module "info" for this buildpack - it could be the current module, or one of the modules with empty tars that was ignored + if namesMatch(module, moduleTar) { + explodedMod.module = module + } else { + for _, additionalModule := range additionalModules { + if namesMatch(additionalModule, moduleTar) { + explodedMod.module = additionalModule + break + } + } + } + result = append(result, explodedMod) + } + } + } + + return result, errs +} + +func handleError(module buildpack.BuildModule, err error, message string) modInfo { + moduleTar := errModuleTar{ + module: module, + } + return modInfo{moduleTars: []buildpack.ModuleTar{moduleTar}, err: errors.Wrap(err, message)} +} + +func namesMatch(module buildpack.BuildModule, moduleOnDisk buildpack.ModuleTar) bool { + return moduleOnDisk.Info().FullName() == fmt.Sprintf("%s@%s", module.Descriptor().EscapedID(), module.Descriptor().Info().Version) || + moduleOnDisk.Info().FullName() == module.Descriptor().Info().FullName() +} + +type modInfo struct { + moduleTars []buildpack.ModuleTar + err error +} + +type errModuleTar struct { + module buildpack.BuildModule +} + +func (e errModuleTar) Info() dist.ModuleInfo { + return e.module.Descriptor().Info() +} + +func (e errModuleTar) Path() string { + return "" +} + +func cnbBuildConfigDir() string { + if env, ok := os.LookupEnv("CNB_BUILD_CONFIG_DIR"); ok { + return env + } + + return "/cnb/build-config" +} diff --git a/internal/builder/builder_test.go b/internal/builder/builder_test.go index 99562ec10c..bf9cde660e 100644 --- a/internal/builder/builder_test.go +++ b/internal/builder/builder_test.go @@ -6,20 +6,20 @@ import ( "encoding/json" "fmt" "io" - "io/ioutil" "os" "path" "path/filepath" "runtime" + "slices" + "strings" "testing" - "github.com/pkg/errors" - "github.com/buildpacks/imgutil" "github.com/buildpacks/imgutil/fakes" "github.com/buildpacks/lifecycle/api" "github.com/golang/mock/gomock" "github.com/heroku/color" + "github.com/pkg/errors" "github.com/sclevine/spec" "github.com/sclevine/spec/report" @@ -46,10 +46,14 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { subject *builder.Builder mockController *gomock.Controller mockLifecycle *testmocks.MockLifecycle - bp1v1 buildpack.Buildpack - bp1v2 buildpack.Buildpack - bp2v1 buildpack.Buildpack - bpOrder buildpack.Buildpack + bp1v1 buildpack.BuildModule + bp1v2 buildpack.BuildModule + bp2v1 buildpack.BuildModule + bp2v2 buildpack.BuildModule + ext1v1 buildpack.BuildModule + ext1v2 buildpack.BuildModule + ext2v1 buildpack.BuildModule + bpOrder buildpack.BuildModule outBuf bytes.Buffer logger logging.Logger ) @@ -64,7 +68,7 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { ".", 0, 0, 0755, true, false, nil, ) - descriptorContents, err := ioutil.ReadFile(filepath.Join("testdata", "lifecycle", "platform-0.4", "lifecycle.toml")) + descriptorContents, err := os.ReadFile(filepath.Join("testdata", "lifecycle", "platform-0.4", "lifecycle.toml")) h.AssertNil(t, err) lifecycleDescriptor, err := builder.ParseDescriptor(string(descriptorContents)) @@ -75,12 +79,12 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { mockLifecycle.EXPECT().Descriptor().Return(builder.CompatDescriptor(lifecycleDescriptor)).AnyTimes() bp1v1, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "buildpack-1-id", Version: "buildpack-1-version-1", }, - Stacks: []dist.Stack{{ + WithStacks: []dist.Stack{{ ID: "some.stack.id", Mixins: []string{"mixinX", "mixinY"}, }}, @@ -88,12 +92,12 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, err) bp1v2, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "buildpack-1-id", Version: "buildpack-1-version-2", }, - Stacks: []dist.Stack{{ + WithStacks: []dist.Stack{{ ID: "some.stack.id", Mixins: []string{"mixinX", "mixinY"}, }}, @@ -101,33 +105,60 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, err) bp2v1, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "buildpack-2-id", Version: "buildpack-2-version-1", }, - Stacks: []dist.Stack{{ + WithStacks: []dist.Stack{{ ID: "some.stack.id", Mixins: []string{"build:mixinA", "run:mixinB"}, }}, }, 0644) h.AssertNil(t, err) + ext1v1, err = ifakes.NewFakeExtension(dist.ExtensionDescriptor{ + WithAPI: api.MustParse("0.9"), + WithInfo: dist.ModuleInfo{ + ID: "extension-1-id", + Version: "extension-1-version-1", + }, + }, 0644) + h.AssertNil(t, err) + + ext1v2, err = ifakes.NewFakeExtension(dist.ExtensionDescriptor{ + WithAPI: api.MustParse("0.9"), + WithInfo: dist.ModuleInfo{ + ID: "extension-1-id", + Version: "extension-1-version-2", + }, + }, 0644) + h.AssertNil(t, err) + + ext2v1, err = ifakes.NewFakeExtension(dist.ExtensionDescriptor{ + WithAPI: api.MustParse("0.9"), + WithInfo: dist.ModuleInfo{ + ID: "extension-2-id", + Version: "extension-2-version-1", + }, + }, 0644) + h.AssertNil(t, err) + bpOrder, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "order-buildpack-id", Version: "order-buildpack-version", }, - Order: []dist.OrderEntry{{ - Group: []dist.BuildpackRef{ + WithOrder: []dist.OrderEntry{{ + Group: []dist.ModuleRef{ { - BuildpackInfo: bp1v1.Descriptor().Info, - Optional: true, + ModuleInfo: bp1v1.Descriptor().Info(), + Optional: true, }, { - BuildpackInfo: bp2v1.Descriptor().Info, - Optional: false, + ModuleInfo: bp2v1.Descriptor().Info(), + Optional: false, }, }, }}, @@ -210,15 +241,15 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { }) }) - when("missing stack id label", func() { + when("missing stack id label and run image", func() { it.Before(func() { h.AssertNil(t, baseImage.SetEnv("CNB_USER_ID", "1234")) h.AssertNil(t, baseImage.SetEnv("CNB_GROUP_ID", "4321")) }) - it("returns an error", func() { + it("does not return an error", func() { _, err := builder.New(baseImage, "some/builder") - h.AssertError(t, err, "image 'base/image' missing label 'io.buildpacks.stack.id'") + h.AssertNilE(t, err) }) }) @@ -354,6 +385,19 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { ) }) + it("creates the build-config dir", func() { + h.AssertNil(t, subject.Save(logger, builder.CreatorMetadata{})) + h.AssertEq(t, baseImage.IsSaved(), true) + + layerTar, err := baseImage.FindLayerWithPath("/cnb/build-config") + h.AssertNil(t, err) + h.AssertOnTarEntry(t, layerTar, "/cnb/build-config", + h.IsDirectory(), + h.HasOwnerAndGroup(0, 0), + h.HasFileMode(0755), + h.HasModTime(archive.NormalizedDateTime), + ) + }) it("creates the buildpacks dir", func() { h.AssertNil(t, subject.Save(logger, builder.CreatorMetadata{})) h.AssertEq(t, baseImage.IsSaved(), true) @@ -392,11 +436,13 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, subject.Save(logger, builder.CreatorMetadata{})) h.AssertEq(t, baseImage.IsSaved(), true) - h.AssertEq(t, baseImage.WorkingDir(), "/layers") + workingDir, err := baseImage.WorkingDir() + h.AssertNil(t, err) + h.AssertEq(t, workingDir, "/layers") }) it("does not overwrite the order layer when SetOrder has not been called", func() { - tmpDir, err := ioutil.TempDir("", "") + tmpDir, err := os.MkdirTemp("", "") h.AssertNil(t, err) defer os.RemoveAll(tmpDir) @@ -415,6 +461,18 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { h.AssertOnTarEntry(t, layerTar, "/cnb/order.toml", h.ContentEquals("some content")) }) + it("adds additional tags as requested", func() { + h.AssertNil(t, subject.Save(logger, builder.CreatorMetadata{}, "additional-tag-one", "additional-tag-two")) + h.AssertEq(t, baseImage.IsSaved(), true) + h.AssertEq(t, baseImage.Name(), "some/builder") + savedNames := baseImage.SavedNames() + slices.Sort(savedNames) + h.AssertEq(t, 3, len(savedNames)) + h.AssertEq(t, "additional-tag-one", savedNames[0]) + h.AssertEq(t, "additional-tag-two", savedNames[1]) + h.AssertEq(t, "some/builder", savedNames[2]) + }) + when("validating order", func() { it.Before(func() { subject.SetLifecycle(mockLifecycle) @@ -427,8 +485,8 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { it("should resolve unset version (to legacy label and order.toml)", func() { subject.SetOrder(dist.Order{{ - Group: []dist.BuildpackRef{ - {BuildpackInfo: dist.BuildpackInfo{ID: bp1v1.Descriptor().Info.ID}}}, + Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: bp1v1.Descriptor().Info().ID}}}, }}) err := subject.Save(logger, builder.CreatorMetadata{}) @@ -447,8 +505,8 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { when("order points to missing buildpack id", func() { it("should error", func() { subject.SetOrder(dist.Order{{ - Group: []dist.BuildpackRef{ - {BuildpackInfo: dist.BuildpackInfo{ID: "missing-buildpack-id"}}}, + Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "missing-buildpack-id"}}}, }}) err := subject.Save(logger, builder.CreatorMetadata{}) @@ -460,8 +518,8 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { when("order points to missing buildpack version", func() { it("should error", func() { subject.SetOrder(dist.Order{{ - Group: []dist.BuildpackRef{ - {BuildpackInfo: dist.BuildpackInfo{ID: "buildpack-1-id", Version: "missing-buildpack-version"}}}, + Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "buildpack-1-id", Version: "missing-buildpack-version"}}}, }}) err := subject.Save(logger, builder.CreatorMetadata{}) @@ -480,16 +538,16 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { when("order omits version", func() { it("should de-duplicate identical buildpacks", func() { subject.SetOrder(dist.Order{ - {Group: []dist.BuildpackRef{{ - BuildpackInfo: dist.BuildpackInfo{ - ID: bp1v1.Descriptor().Info.ID, - Homepage: bp1v1.Descriptor().Info.Homepage, + {Group: []dist.ModuleRef{{ + ModuleInfo: dist.ModuleInfo{ + ID: bp1v1.Descriptor().Info().ID, + Homepage: bp1v1.Descriptor().Info().Homepage, }}}, }, - {Group: []dist.BuildpackRef{{ - BuildpackInfo: dist.BuildpackInfo{ - ID: bp1v1.Descriptor().Info.ID, - Homepage: bp1v1.Descriptor().Info.Homepage, + {Group: []dist.ModuleRef{{ + ModuleInfo: dist.ModuleInfo{ + ID: bp1v1.Descriptor().Info().ID, + Homepage: bp1v1.Descriptor().Info().Homepage, }}}, }, }) @@ -509,8 +567,8 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { when("order explicitly sets version", func() { it("should keep order version", func() { subject.SetOrder(dist.Order{{ - Group: []dist.BuildpackRef{ - {BuildpackInfo: bp1v1.Descriptor().Info}}, + Group: []dist.ModuleRef{ + {ModuleInfo: bp1v1.Descriptor().Info()}}, }}) err := subject.Save(logger, builder.CreatorMetadata{}) @@ -530,8 +588,8 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { when("order version is empty", func() { it("return error", func() { subject.SetOrder(dist.Order{{ - Group: []dist.BuildpackRef{ - {BuildpackInfo: dist.BuildpackInfo{ID: "buildpack-1-id"}}}, + Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "buildpack-1-id"}}}, }}) err := subject.Save(logger, builder.CreatorMetadata{}) @@ -573,9 +631,9 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { when("buildpack stack id does not match", func() { it("returns an error", func() { bp, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: bp1v1.Descriptor().Info, - Stacks: []dist.Stack{{ID: "other.stack.id"}}, + WithAPI: api.MustParse("0.2"), + WithInfo: bp1v1.Descriptor().Info(), + WithStacks: []dist.Stack{{ID: "other.stack.id"}}, }, 0644) h.AssertNil(t, err) @@ -589,9 +647,9 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { when("buildpack is not compatible with lifecycle", func() { it("returns an error", func() { bp, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.1"), - Info: bp1v1.Descriptor().Info, - Stacks: []dist.Stack{{ID: "some.stack.id"}}, + WithAPI: api.MustParse("0.1"), + WithInfo: bp1v1.Descriptor().Info(), + WithStacks: []dist.Stack{{ID: "some.stack.id"}}, }, 0644) h.AssertNil(t, err) @@ -600,16 +658,16 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { h.AssertError(t, err, - "buildpack 'buildpack-1-id@buildpack-1-version-1' (Buildpack API 0.1) is incompatible with lifecycle '0.0.0' (Buildpack API(s) 0.2, 0.3, 0.4)") + "buildpack 'buildpack-1-id@buildpack-1-version-1' (Buildpack API 0.1) is incompatible with lifecycle '0.0.0' (Buildpack API(s) 0.2, 0.3, 0.4, 0.9)") }) }) when("buildpack mixins are not satisfied", func() { it("returns an error", func() { bp, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: bp1v1.Descriptor().Info, - Stacks: []dist.Stack{{ + WithAPI: api.MustParse("0.2"), + WithInfo: bp1v1.Descriptor().Info(), + WithStacks: []dist.Stack{{ ID: "some.stack.id", Mixins: []string{"missing"}, }}, @@ -653,21 +711,22 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { // - 2 from buildpacks // - 1 from orderLayer // - 1 from stackLayer - h.AssertEq(t, baseImage.NumberOfAddedLayers(), 6) + // - 1 from runImageLayer + h.AssertEq(t, baseImage.NumberOfAddedLayers(), 7) }) when("duplicated buildpack, has different contents", func() { - var bp1v1Alt buildpack.Buildpack - var bp1v1AltWithNewContent buildpack.Buildpack + var bp1v1Alt buildpack.BuildModule + var bp1v1AltWithNewContent buildpack.BuildModule it.Before(func() { var err error bp1v1Alt, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "buildpack-1-id", Version: "buildpack-1-version-1", }, - Stacks: []dist.Stack{{ + WithStacks: []dist.Stack{{ ID: "some.stack.id", Mixins: []string{"mixinX", "mixinY"}, }}, @@ -676,12 +735,12 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, err) bp1v1AltWithNewContent, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "buildpack-1-id", Version: "buildpack-1-version-1", }, - Stacks: []dist.Stack{{ + WithStacks: []dist.Stack{{ ID: "some.stack.id", Mixins: []string{"mixinX", "mixinY"}, }}, @@ -731,11 +790,12 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { // - 1 from buildpacks // - 1 from orderLayer // - 1 from stackLayer - h.AssertEq(t, baseImage.NumberOfAddedLayers(), 5) - oldSha256 := "4dc0072c61fc2bd7118bbc93a432eae0012082de094455cf0a9fed20e3c44789" - newSha256 := "29cb2bce4c2350f0e86f3dd30fa3810beb409b910126a18651de750f457fedfb" + // - 1 from runImageLayer + h.AssertEq(t, baseImage.NumberOfAddedLayers(), 6) + oldSha256 := "2ba2e8563f7f43533ba26047a44f3e8bb7dd009043bd73a0e6aadb02c084955c" + newSha256 := "719faea06424d01bb5788ce63c1167e8d382b2d9df8fcf3a0a54ea9b2e3b4045" if runtime.GOOS == "windows" { - newSha256 = "eaed4a1617bba5738ae5672f6aefda8add7abb2f8630c75dc97a6232879d4ae4" + newSha256 = "d99d31efba72ebf98e8101ada9e89464566e943c05367c561b116c2cb86837c9" } h.AssertContains(t, outBuf.String(), fmt.Sprintf(`buildpack 'buildpack-1-id@buildpack-1-version-1' was previously defined with different contents and will be overwritten @@ -760,9 +820,9 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { when("adding buildpack that already exists on the image", func() { it("skips adding buildpack that already exists", func() { logger := logging.NewLogWithWriters(&outBuf, &outBuf, logging.WithVerbose()) - diffID := "4dc0072c61fc2bd7118bbc93a432eae0012082de094455cf0a9fed20e3c44789" - bpLayer := dist.BuildpackLayers{ - "buildpack-1-id": map[string]dist.BuildpackLayerInfo{ + diffID := "2ba2e8563f7f43533ba26047a44f3e8bb7dd009043bd73a0e6aadb02c084955c" + bpLayer := dist.ModuleLayers{ + "buildpack-1-id": map[string]dist.ModuleLayerInfo{ "buildpack-1-version-1": { API: api.MustParse("0.2"), Stacks: nil, @@ -775,7 +835,7 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { bpLayerString, err := json.Marshal(bpLayer) h.AssertNil(t, err) - h.AssertNil(t, baseImage.SetLabel( + h.AssertNil(t, baseImage.SetLabel( // label builder as already having a buildpack with diffID `diffID` dist.BuildpackLayersLabel, string(bpLayerString), )) @@ -793,20 +853,20 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { when("error adding buildpacks to builder", func() { when("unable to convert buildpack to layer tar", func() { - var bp1v1Err buildpack.Buildpack + var bp1v1Err buildpack.BuildModule it.Before(func() { var err error bp1v1Err, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "buildpack-1-id", Version: "buildpack-1-version-1", }, - Stacks: []dist.Stack{{ + WithStacks: []dist.Stack{{ ID: "some.stack.id", Mixins: []string{"mixinX", "mixinY"}, }}, - }, 0644, ifakes.WithOpenError(errors.New("unable to open buildpack"))) + }, 0644, ifakes.WithBpOpenError(errors.New("unable to open buildpack"))) h.AssertNil(t, err) }) it("errors", func() { @@ -818,8 +878,90 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { }) }) }) - }) + when("modules are added in random order", func() { + var fakeLayerImage *h.FakeAddedLayerImage + + it.Before(func() { + var err error + fakeLayerImage = &h.FakeAddedLayerImage{Image: baseImage} + subject, err = builder.New(fakeLayerImage, "some/builder") + h.AssertNil(t, err) + subject.SetLifecycle(mockLifecycle) + + bp2v2, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ + ID: "buildpack-2-id", + Version: "buildpack-2-version-2", + }, + WithStacks: []dist.Stack{{ + ID: "some.stack.id", + Mixins: []string{"build:mixinA", "run:mixinB"}, + }}, + }, 0644) + h.AssertNil(t, err) + }) + + it("layers are written ordered by buildpacks ID & Version", func() { + // add buildpacks in a random order + subject.AddBuildpack(bp2v2) + subject.AddBuildpack(bp1v2) + subject.AddBuildpack(bp1v1) + subject.AddBuildpack(bp2v1) + h.AssertNil(t, subject.Save(logger, builder.CreatorMetadata{})) + + layers := fakeLayerImage.AddedLayersOrder() + h.AssertEq(t, len(layers), 4) + h.AssertTrue(t, strings.Contains(layers[0], h.LayerFileName(bp1v1))) + h.AssertTrue(t, strings.Contains(layers[1], h.LayerFileName(bp1v2))) + h.AssertTrue(t, strings.Contains(layers[2], h.LayerFileName(bp2v1))) + h.AssertTrue(t, strings.Contains(layers[3], h.LayerFileName(bp2v2))) + }) + + it("extensions are written ordered by buildpacks ID & Version", func() { + // add buildpacks in a random order + subject.AddBuildpack(ext2v1) + subject.AddBuildpack(ext1v2) + subject.AddBuildpack(ext1v1) + h.AssertNil(t, subject.Save(logger, builder.CreatorMetadata{})) + + layers := fakeLayerImage.AddedLayersOrder() + h.AssertEq(t, len(layers), 3) + h.AssertTrue(t, strings.Contains(layers[0], h.LayerFileName(ext1v1))) + h.AssertTrue(t, strings.Contains(layers[1], h.LayerFileName(ext1v2))) + h.AssertTrue(t, strings.Contains(layers[2], h.LayerFileName(ext2v1))) + }) + }) + + when("system buildpacks", func() { + it.Before(func() { + subject.SetLifecycle(mockLifecycle) + subject.AddBuildpack(bp1v1) + subject.SetSystem(dist.System{ + Pre: dist.SystemBuildpacks{ + Buildpacks: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: bp1v1.Descriptor().Info().ID}}}, + }, + }) + }) + + it("should write system buildpacks to system.toml)", func() { + err := subject.Save(logger, builder.CreatorMetadata{}) + h.AssertNil(t, err) + + layerTar, err := baseImage.FindLayerWithPath("/cnb/system.toml") + h.AssertNil(t, err) + h.AssertOnTarEntry(t, layerTar, "/cnb/system.toml", h.ContentEquals(`[system] + [system.pre] + + [[system.pre.buildpacks]] + id = "buildpack-1-id" + version = "buildpack-1-version-1" +`)) + }) + }) + }) when("#SetLifecycle", func() { it.Before(func() { h.AssertNil(t, subject.Save(logger, builder.CreatorMetadata{})) @@ -895,7 +1037,7 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { h.AssertEq(t, metadata.Lifecycle.API.PlatformVersion.String(), "0.2") h.AssertNotNil(t, metadata.Lifecycle.APIs) h.AssertEq(t, metadata.Lifecycle.APIs.Buildpack.Deprecated.AsStrings(), []string{}) - h.AssertEq(t, metadata.Lifecycle.APIs.Buildpack.Supported.AsStrings(), []string{"0.2", "0.3", "0.4"}) + h.AssertEq(t, metadata.Lifecycle.APIs.Buildpack.Supported.AsStrings(), []string{"0.2", "0.3", "0.4", "0.9"}) h.AssertEq(t, metadata.Lifecycle.APIs.Platform.Deprecated.AsStrings(), []string{"0.2"}) h.AssertEq(t, metadata.Lifecycle.APIs.Platform.Supported.AsStrings(), []string{"0.3", "0.4"}) }) @@ -949,7 +1091,7 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { label, err := baseImage.Label("io.buildpacks.buildpack.layers") h.AssertNil(t, err) - var layers dist.BuildpackLayers + var layers dist.ModuleLayers h.AssertNil(t, json.Unmarshal([]byte(label), &layers)) h.AssertEq(t, len(layers), 3) h.AssertEq(t, len(layers["buildpack-1-id"]), 2) @@ -1019,7 +1161,7 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { label, err := baseImage.Label("io.buildpacks.buildpack.layers") h.AssertNil(t, err) - var layers dist.BuildpackLayers + var layers dist.ModuleLayers h.AssertNil(t, json.Unmarshal([]byte(label), &layers)) h.AssertEq(t, len(layers), 2) h.AssertEq(t, len(layers["buildpack-1-id"]), 2) @@ -1047,7 +1189,7 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { label, err := baseImage.Label("io.buildpacks.buildpack.layers") h.AssertNil(t, err) - var layers dist.BuildpackLayers + var layers dist.ModuleLayers h.AssertNil(t, json.Unmarshal([]byte(label), &layers)) h.AssertContains(t, @@ -1064,7 +1206,7 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { label, err := baseImage.Label("io.buildpacks.buildpack.layers") h.AssertNil(t, err) - var layers dist.BuildpackLayers + var layers dist.ModuleLayers h.AssertNil(t, json.Unmarshal([]byte(label), &layers)) h.AssertNotContains(t, @@ -1117,22 +1259,205 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { }) }) + when("#AddExtension", func() { + it.Before(func() { + subject.AddExtension(ext1v1) + subject.AddExtension(ext1v2) + subject.AddExtension(ext2v1) + }) + + it("adds the extension as an image layer", func() { + h.AssertNil(t, subject.Save(logger, builder.CreatorMetadata{})) + h.AssertEq(t, baseImage.IsSaved(), true) + assertImageHasExtLayer(t, baseImage, ext1v1) + assertImageHasExtLayer(t, baseImage, ext1v2) + assertImageHasExtLayer(t, baseImage, ext2v1) + }) + + it("adds the extension metadata", func() { + h.AssertNil(t, subject.Save(logger, builder.CreatorMetadata{})) + h.AssertEq(t, baseImage.IsSaved(), true) + + label, err := baseImage.Label("io.buildpacks.builder.metadata") + h.AssertNil(t, err) + + var metadata builder.Metadata + h.AssertNil(t, json.Unmarshal([]byte(label), &metadata)) + h.AssertEq(t, len(metadata.Extensions), 3) + + h.AssertEq(t, metadata.Extensions[0].ID, "extension-1-id") + h.AssertEq(t, metadata.Extensions[0].Version, "extension-1-version-1") + + h.AssertEq(t, metadata.Extensions[1].ID, "extension-1-id") + h.AssertEq(t, metadata.Extensions[1].Version, "extension-1-version-2") + + h.AssertEq(t, metadata.Extensions[2].ID, "extension-2-id") + h.AssertEq(t, metadata.Extensions[2].Version, "extension-2-version-1") + }) + + it("adds the extension layers label", func() { + h.AssertNil(t, subject.Save(logger, builder.CreatorMetadata{})) + h.AssertEq(t, baseImage.IsSaved(), true) + + label, err := baseImage.Label("io.buildpacks.extension.layers") + h.AssertNil(t, err) + + var layers dist.ModuleLayers + h.AssertNil(t, json.Unmarshal([]byte(label), &layers)) + h.AssertEq(t, len(layers), 2) + h.AssertEq(t, len(layers["extension-1-id"]), 2) + h.AssertEq(t, len(layers["extension-2-id"]), 1) + + h.AssertEq(t, layers["extension-1-id"]["extension-1-version-1"].API.String(), "0.9") + h.AssertEq(t, layers["extension-1-id"]["extension-1-version-2"].API.String(), "0.9") + h.AssertEq(t, layers["extension-2-id"]["extension-2-version-1"].API.String(), "0.9") + }) + + when("base image already has extension layers label", func() { + it.Before(func() { + var mdJSON bytes.Buffer + h.AssertNil(t, json.Compact( + &mdJSON, + []byte(`{ + "extension-1-id": { + "extension-1-version-1": { + "layerDiffID": "sha256:extension-1-version-1-diff-id" + }, + "extension-1-version-2": { + "layerDiffID": "sha256:extension-1-version-2-diff-id" + } + } + } + `))) + + h.AssertNil(t, baseImage.SetLabel( + "io.buildpacks.extension.layers", + mdJSON.String(), + )) + + var err error + subject, err = builder.New(baseImage, "some/builder") + h.AssertNil(t, err) + + subject.AddExtension(ext1v2) + subject.AddExtension(ext2v1) + + subject.SetLifecycle(mockLifecycle) + }) + + it("appends extension layer info", func() { + h.AssertNil(t, subject.Save(logger, builder.CreatorMetadata{})) + h.AssertEq(t, baseImage.IsSaved(), true) + + label, err := baseImage.Label("io.buildpacks.extension.layers") + h.AssertNil(t, err) + + var layers dist.ModuleLayers + h.AssertNil(t, json.Unmarshal([]byte(label), &layers)) + h.AssertEq(t, len(layers), 2) + h.AssertEq(t, len(layers["extension-1-id"]), 2) + h.AssertEq(t, len(layers["extension-2-id"]), 1) + + h.AssertEq(t, layers["extension-1-id"]["extension-1-version-1"].LayerDiffID, "sha256:extension-1-version-1-diff-id") + + h.AssertUnique(t, + layers["extension-1-id"]["extension-1-version-1"].LayerDiffID, + layers["extension-1-id"]["extension-1-version-2"].LayerDiffID, + layers["extension-2-id"]["extension-2-version-1"].LayerDiffID, + ) + }) + + it("informs when overriding existing extension, and log level is DEBUG", func() { + logger := logging.NewLogWithWriters(&outBuf, &outBuf, logging.WithVerbose()) + + h.AssertNil(t, subject.Save(logger, builder.CreatorMetadata{})) + h.AssertEq(t, baseImage.IsSaved(), true) + + label, err := baseImage.Label("io.buildpacks.extension.layers") + h.AssertNil(t, err) + + var layers dist.ModuleLayers + h.AssertNil(t, json.Unmarshal([]byte(label), &layers)) + + h.AssertContains(t, + outBuf.String(), + "extension 'extension-1-id@extension-1-version-2' already exists on builder and will be overwritten", + ) + h.AssertNotContains(t, layers["extension-1-id"]["extension-1-version-2"].LayerDiffID, "extension-1-version-2-diff-id") + }) + + it("doesn't message when overriding existing extension when log level is INFO", func() { + h.AssertNil(t, subject.Save(logger, builder.CreatorMetadata{})) + h.AssertEq(t, baseImage.IsSaved(), true) + + label, err := baseImage.Label("io.buildpacks.extension.layers") + h.AssertNil(t, err) + + var layers dist.ModuleLayers + h.AssertNil(t, json.Unmarshal([]byte(label), &layers)) + + h.AssertNotContains(t, + outBuf.String(), + "extension 'extension-1-id@extension-1-version-2' already exists on builder and will be overwritten", + ) + h.AssertNotContains(t, layers["extension-1-id"]["extension-1-version-2"].LayerDiffID, "extension-1-version-2-diff-id") + }) + }) + + when("base image already has metadata", func() { + it.Before(func() { + h.AssertNil(t, baseImage.SetLabel( + "io.buildpacks.builder.metadata", + `{ + "extensions":[{"id":"prev.id"}], + "lifecycle":{"version":"6.6.6","apis":{"buildpack":{"deprecated":["0.1"],"supported":["0.2","0.3","0.9"]},"platform":{"deprecated":[],"supported":["2.3","2.4"]}}} + }`, + )) + + var err error + subject, err = builder.New(baseImage, "some/builder") + h.AssertNil(t, err) + + subject.AddExtension(ext1v1) + h.AssertNil(t, subject.Save(logger, builder.CreatorMetadata{})) + h.AssertEq(t, baseImage.IsSaved(), true) + }) + + it("appends the extensions to the metadata", func() { + label, err := baseImage.Label("io.buildpacks.builder.metadata") + h.AssertNil(t, err) + + var metadata builder.Metadata + h.AssertNil(t, json.Unmarshal([]byte(label), &metadata)) + h.AssertEq(t, len(metadata.Extensions), 2) + + // keeps original metadata + h.AssertEq(t, metadata.Extensions[0].ID, "prev.id") + h.AssertEq(t, subject.LifecycleDescriptor().Info.Version.String(), "6.6.6") + + // adds new extension + h.AssertEq(t, metadata.Extensions[1].ID, "extension-1-id") + h.AssertEq(t, metadata.Extensions[1].Version, "extension-1-version-1") + }) + }) + }) + when("#SetOrder", func() { when("the buildpacks exist in the image", func() { it.Before(func() { subject.AddBuildpack(bp1v1) subject.AddBuildpack(bp2v1) subject.SetOrder(dist.Order{ - {Group: []dist.BuildpackRef{ + {Group: []dist.ModuleRef{ { - BuildpackInfo: dist.BuildpackInfo{ - ID: bp1v1.Descriptor().Info.ID, + ModuleInfo: dist.ModuleInfo{ + ID: bp1v1.Descriptor().Info().ID, // Version excluded intentionally }, }, { - BuildpackInfo: bp2v1.Descriptor().Info, - Optional: true, + ModuleInfo: bp2v1.Descriptor().Info(), + Optional: true, }, }}, }) @@ -1178,6 +1503,66 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { }) }) + when("#SetOrderExtensions", func() { + when("the extensions exist in the image", func() { + it.Before(func() { + subject.AddExtension(ext1v1) + subject.AddExtension(ext2v1) + subject.SetOrderExtensions(dist.Order{ + {Group: []dist.ModuleRef{ + { + ModuleInfo: dist.ModuleInfo{ + ID: ext1v1.Descriptor().Info().ID, + // Version excluded intentionally + }, + }, + { + ModuleInfo: ext2v1.Descriptor().Info(), + Optional: true, // extensions are always optional; this shouldn't be redundantly printed + }, + }}, + }) + + h.AssertNil(t, subject.Save(logger, builder.CreatorMetadata{})) + h.AssertEq(t, baseImage.IsSaved(), true) + }) + + it("adds the order.toml to the image", func() { + layerTar, err := baseImage.FindLayerWithPath("/cnb/order.toml") + h.AssertNil(t, err) + h.AssertOnTarEntry(t, layerTar, "/cnb/order.toml", + h.ContentEquals(`[[order-extensions]] + + [[order-extensions.group]] + id = "extension-1-id" + version = "extension-1-version-1" + + [[order-extensions.group]] + id = "extension-2-id" + version = "extension-2-version-1" +`), + h.HasModTime(archive.NormalizedDateTime), + ) + }) + + it("adds the order for extensions to the order-extensions label", func() { + label, err := baseImage.Label("io.buildpacks.buildpack.order-extensions") + h.AssertNil(t, err) + + var orderExt dist.Order + h.AssertNil(t, json.Unmarshal([]byte(label), &orderExt)) + h.AssertEq(t, len(orderExt), 1) + h.AssertEq(t, len(orderExt[0].Group), 2) + h.AssertEq(t, orderExt[0].Group[0].ID, "extension-1-id") + h.AssertEq(t, orderExt[0].Group[0].Version, "") + h.AssertEq(t, orderExt[0].Group[0].Optional, false) + h.AssertEq(t, orderExt[0].Group[1].ID, "extension-2-id") + h.AssertEq(t, orderExt[0].Group[1].Version, "extension-2-version-1") + h.AssertEq(t, orderExt[0].Group[1].Optional, false) + }) + }) + }) + when("#SetDescription", func() { it.Before(func() { subject.SetDescription("Some description") @@ -1229,6 +1614,117 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { }) }) + when("#SetRunImage", func() { + it.Before(func() { + subject.SetRunImage(pubbldr.RunConfig{Images: []pubbldr.RunImageConfig{{ + Image: "some/run", + Mirrors: []string{"some/mirror", "other/mirror"}, + }}}) + h.AssertNil(t, subject.Save(logger, builder.CreatorMetadata{})) + h.AssertEq(t, baseImage.IsSaved(), true) + }) + + it("adds the run.toml to the image", func() { + layerTar, err := baseImage.FindLayerWithPath("/cnb/run.toml") + h.AssertNil(t, err) + h.AssertOnTarEntry(t, layerTar, "/cnb/run.toml", + h.ContentEquals(`[[images]] + image = "some/run" + mirrors = ["some/mirror", "other/mirror"] +`), + h.HasModTime(archive.NormalizedDateTime), + ) + }) + + it("adds the stack.toml to the image", func() { + layerTar, err := baseImage.FindLayerWithPath("/cnb/stack.toml") + h.AssertNil(t, err) + h.AssertOnTarEntry(t, layerTar, "/cnb/stack.toml", + h.ContentEquals(`[run-image] + image = "some/run" + mirrors = ["some/mirror", "other/mirror"] +`), + h.HasModTime(archive.NormalizedDateTime), + ) + }) + + it("adds the run image to the metadata", func() { + label, err := baseImage.Label("io.buildpacks.builder.metadata") + h.AssertNil(t, err) + + var metadata builder.Metadata + h.AssertNil(t, json.Unmarshal([]byte(label), &metadata)) + h.AssertEq(t, metadata.RunImages[0].Image, "some/run") + h.AssertEq(t, metadata.RunImages[0].Mirrors[0], "some/mirror") + h.AssertEq(t, metadata.RunImages[0].Mirrors[1], "other/mirror") + }) + }) + + when("when CNB_BUILD_CONFIG_DIR is defined", func() { + var buildConfigEnvName = "CNB_BUILD_CONFIG_DIR" + var buildConfigEnvValue = "/cnb/dup-build-config-dir" + it.Before(func() { + os.Setenv(buildConfigEnvName, buildConfigEnvValue) + subject.SetBuildConfigEnv(map[string]string{ + "SOME_KEY": "some-val", + "OTHER_KEY.append": "other-val", + "OTHER_KEY.delim": ":", + }) + h.AssertNil(t, subject.Save(logger, builder.CreatorMetadata{})) + h.AssertEq(t, baseImage.IsSaved(), true) + }) + it.After(func() { + os.Unsetenv(buildConfigEnvName) + }) + + it("adds the env vars as files to the image", func() { + layerTar, err := baseImage.FindLayerWithPath(buildConfigEnvValue + "/env/SOME_KEY") + h.AssertNil(t, err) + h.AssertOnTarEntry(t, layerTar, buildConfigEnvValue+"/env/SOME_KEY", + h.ContentEquals(`some-val`), + h.HasModTime(archive.NormalizedDateTime), + ) + h.AssertOnTarEntry(t, layerTar, buildConfigEnvValue+"/env/OTHER_KEY.append", + h.ContentEquals(`other-val`), + h.HasModTime(archive.NormalizedDateTime), + ) + h.AssertOnTarEntry(t, layerTar, buildConfigEnvValue+"/env/OTHER_KEY.delim", + h.ContentEquals(`:`), + h.HasModTime(archive.NormalizedDateTime), + ) + }) + }) + + when("#SetBuildConfigEnv", func() { + it.Before(func() { + os.Unsetenv("CNB_BUILD_CONFIG_DIR") + subject.SetBuildConfigEnv(map[string]string{ + "SOME_KEY": "some-val", + "OTHER_KEY.append": "other-val", + "OTHER_KEY.delim": ":", + }) + h.AssertNil(t, subject.Save(logger, builder.CreatorMetadata{})) + h.AssertEq(t, baseImage.IsSaved(), true) + }) + + it("adds the env vars as files to the image", func() { + layerTar, err := baseImage.FindLayerWithPath("/cnb/build-config/env/SOME_KEY") + h.AssertNil(t, err) + h.AssertOnTarEntry(t, layerTar, "/cnb/build-config/env/SOME_KEY", + h.ContentEquals(`some-val`), + h.HasModTime(archive.NormalizedDateTime), + ) + h.AssertOnTarEntry(t, layerTar, "/cnb/build-config/env/OTHER_KEY.append", + h.ContentEquals(`other-val`), + h.HasModTime(archive.NormalizedDateTime), + ) + h.AssertOnTarEntry(t, layerTar, "/cnb/build-config/env/OTHER_KEY.delim", + h.ContentEquals(`:`), + h.HasModTime(archive.NormalizedDateTime), + ) + }) + }) + when("#SetEnv", func() { it.Before(func() { subject.SetEnv(map[string]string{ @@ -1252,6 +1748,23 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { ) }) }) + + when("#DefaultRunImage", func() { + it.Before(func() { + subject.SetRunImage(pubbldr.RunConfig{Images: []pubbldr.RunImageConfig{{ + Image: "some/run", + Mirrors: []string{"some/mirror", "other/mirror"}, + }}}) + h.AssertNil(t, subject.Save(logger, builder.CreatorMetadata{})) + h.AssertEq(t, baseImage.IsSaved(), true) + }) + + it("adds the run.toml to the image", func() { + actual := subject.DefaultRunImage() + h.AssertEq(t, actual.Image, "some/run") + h.AssertEq(t, actual.Mirrors, []string{"some/mirror", "other/mirror"}) + }) + }) }) when("builder exists", func() { @@ -1373,13 +1886,159 @@ func testBuilder(t *testing.T, when spec.G, it spec.S) { }) }) }) + + when("#New", func() { + when("#WithRunImage", func() { + // Current runImage information in builder image: + // "stack": {"runImage": {"image": "prev/run", "mirrors": ["prev/mirror"]}} + var newBuilder *builder.Builder + newRunImage := "another/run" + + it.Before(func() { + var err error + newBuilder, err = builder.New(builderImage, "newBuilder/image", builder.WithRunImage(newRunImage)) + h.AssertNil(t, err) + }) + + it("overrides the run image metadata (which becomes run.toml)", func() { + // RunImages() returns Stacks + RunImages metadata. + metadata := newBuilder.RunImages() + h.AssertTrue(t, len(metadata) == 2) + for _, m := range metadata { + // Both images must be equal to the expected run-image + h.AssertEq(t, m.Image, newRunImage) + h.AssertEq(t, len(m.Mirrors), 0) + } + }) + }) + }) + }) + + when("flatten", func() { + var ( + bldr *builder.Builder + builderImage imgutil.Image + deps []buildpack.BuildModule + ) + + it.Before(func() { + h.AssertNil(t, baseImage.SetEnv("CNB_USER_ID", "1234")) + h.AssertNil(t, baseImage.SetEnv("CNB_GROUP_ID", "4321")) + h.AssertNil(t, baseImage.SetLabel("io.buildpacks.stack.id", "some.stack.id")) + h.AssertNil(t, baseImage.SetLabel("io.buildpacks.stack.mixins", `["mixinX", "mixinY", "build:mixinA"]`)) + h.AssertNil(t, baseImage.SetLabel( + "io.buildpacks.builder.metadata", + `{"description": "some-description", "createdBy": {"name": "some-name", "version": "1.2.3"}, "buildpacks": [{"id": "buildpack-1-id"}, {"id": "buildpack-2-id"}], "groups": [{"buildpacks": [{"id": "buildpack-1-id", "version": "buildpack-1-version", "optional": false}, {"id": "buildpack-2-id", "version": "buildpack-2-version-1", "optional": true}]}], "stack": {"runImage": {"image": "prev/run", "mirrors": ["prev/mirror"]}}, "lifecycle": {"version": "6.6.6"}}`, + )) + h.AssertNil(t, baseImage.SetLabel( + "io.buildpacks.buildpack.order", + `[{"group": [{"id": "buildpack-1-id", "optional": false}, {"id": "buildpack-2-id", "version": "buildpack-2-version-1", "optional": true}]}]`, + )) + + builderImage = baseImage + deps = []buildpack.BuildModule{bp2v1, bp1v2} + }) + + when("buildpacks to be flattened are NOT defined", func() { + it.Before(func() { + var err error + bldr, err = builder.New(builderImage, "some-builder") + h.AssertNil(t, err) + + // Let's add the buildpacks + bldr.AddBuildpacks(bp1v1, deps) + }) + + when("#FlattenedModules", func() { + it("it return an empty array", func() { + h.AssertEq(t, len(bldr.FlattenedModules(buildpack.KindBuildpack)), 0) + }) + }) + + when("#AllModules", func() { + it("it returns each buildpack individually", func() { + h.AssertEq(t, len(bldr.AllModules(buildpack.KindBuildpack)), 3) + }) + }) + + when("#ShouldFlatten", func() { + it("it returns false for each buildpack", func() { + h.AssertFalse(t, bldr.ShouldFlatten(bp1v1)) + h.AssertFalse(t, bldr.ShouldFlatten(bp2v1)) + h.AssertFalse(t, bldr.ShouldFlatten(bp1v2)) + }) + }) + }) + + when("buildpacks to be flattened are defined", func() { + it.Before(func() { + var err error + flattenModules, err := buildpack.ParseFlattenBuildModules([]string{"buildpack-1-id@buildpack-1-version-1,buildpack-1-id@buildpack-1-version-2,buildpack-2-id@buildpack-2-version-1"}) + h.AssertNil(t, err) + + bldr, err = builder.New(builderImage, "some-builder", builder.WithFlattened(flattenModules)) + h.AssertNil(t, err) + + // Let's add the buildpacks + bldr.AddBuildpacks(bp1v1, deps) + }) + + when("#FlattenedModules", func() { + it("it return one array with all buildpacks on it", func() { + h.AssertEq(t, len(bldr.FlattenedModules(buildpack.KindBuildpack)), 1) + h.AssertEq(t, len(bldr.FlattenedModules(buildpack.KindBuildpack)[0]), 3) + }) + }) + + when("#AllModules", func() { + it("it returns each buildpack individually", func() { + h.AssertEq(t, len(bldr.AllModules(buildpack.KindBuildpack)), 3) + }) + }) + + when("#ShouldFlatten", func() { + it("it returns true for each buildpack", func() { + h.AssertTrue(t, bldr.ShouldFlatten(bp1v1)) + h.AssertTrue(t, bldr.ShouldFlatten(bp2v1)) + h.AssertTrue(t, bldr.ShouldFlatten(bp1v2)) + }) + }) + }) + }) + + when("labels", func() { + var ( + customLabels, imageLabels map[string]string + err error + ) + it.Before(func() { + h.AssertNil(t, baseImage.SetEnv("CNB_USER_ID", "1234")) + h.AssertNil(t, baseImage.SetEnv("CNB_GROUP_ID", "4321")) + h.AssertNil(t, baseImage.SetLabel("io.buildpacks.stack.id", "some.stack.id")) + h.AssertNil(t, baseImage.SetLabel("io.buildpacks.stack.mixins", `["mixinX", "mixinY", "build:mixinA"]`)) + }) + + it.After(func() { + h.AssertNilE(t, baseImage.Cleanup()) + }) + + it("should set labels to the image", func() { + customLabels = map[string]string{"test.label.one": "1", "test.label.two": "2"} + subject, err = builder.New(baseImage, "some/builder", builder.WithLabels(customLabels)) + h.AssertNil(t, err) + + imageLabels, err = baseImage.Labels() + h.AssertNil(t, err) + h.AssertEq(t, imageLabels["test.label.one"], "1") + h.AssertEq(t, imageLabels["test.label.two"], "2") + }) }) } -func assertImageHasBPLayer(t *testing.T, image *fakes.Image, bp buildpack.Buildpack) { +func assertImageHasBPLayer(t *testing.T, image *fakes.Image, bp buildpack.BuildModule) { t.Helper() - dirPath := fmt.Sprintf("/cnb/buildpacks/%s/%s", bp.Descriptor().Info.ID, bp.Descriptor().Info.Version) + dirPath := fmt.Sprintf("/cnb/buildpacks/%s/%s", bp.Descriptor().Info().ID, bp.Descriptor().Info().Version) layerTar, err := image.FindLayerWithPath(dirPath) h.AssertNil(t, err) @@ -1400,10 +2059,34 @@ func assertImageHasBPLayer(t *testing.T, image *fakes.Image, bp buildpack.Buildp ) } -func assertImageHasOrderBpLayer(t *testing.T, image *fakes.Image, bp buildpack.Buildpack) { +func assertImageHasExtLayer(t *testing.T, image *fakes.Image, ext buildpack.BuildModule) { + t.Helper() + + dirPath := fmt.Sprintf("/cnb/extensions/%s/%s", ext.Descriptor().Info().ID, ext.Descriptor().Info().Version) + layerTar, err := image.FindLayerWithPath(dirPath) + h.AssertNil(t, err) + + h.AssertOnTarEntry(t, layerTar, dirPath, + h.IsDirectory(), + ) + + h.AssertOnTarEntry(t, layerTar, path.Dir(dirPath), + h.IsDirectory(), + ) + + h.AssertOnTarEntry(t, layerTar, dirPath+"/bin/generate", + h.ContentEquals("generate-contents"), + ) + + h.AssertOnTarEntry(t, layerTar, dirPath+"/bin/detect", + h.ContentEquals("detect-contents"), + ) +} + +func assertImageHasOrderBpLayer(t *testing.T, image *fakes.Image, bp buildpack.BuildModule) { t.Helper() - dirPath := fmt.Sprintf("/cnb/buildpacks/%s/%s", bp.Descriptor().Info.ID, bp.Descriptor().Info.Version) + dirPath := fmt.Sprintf("/cnb/buildpacks/%s/%s", bp.Descriptor().Info().ID, bp.Descriptor().Info().Version) layerTar, err := image.FindLayerWithPath(dirPath) h.AssertNil(t, err) diff --git a/internal/builder/detection_order_calculator.go b/internal/builder/detection_order_calculator.go index d62ad49057..2936b95804 100644 --- a/internal/builder/detection_order_calculator.go +++ b/internal/builder/detection_order_calculator.go @@ -12,11 +12,11 @@ func NewDetectionOrderCalculator() *DetectionOrderCalculator { } type detectionOrderRecurser struct { - layers dist.BuildpackLayers + layers dist.ModuleLayers maxDepth int } -func newDetectionOrderRecurser(layers dist.BuildpackLayers, maxDepth int) *detectionOrderRecurser { +func newDetectionOrderRecurser(layers dist.ModuleLayers, maxDepth int) *detectionOrderRecurser { return &detectionOrderRecurser{ layers: layers, maxDepth: maxDepth, @@ -25,17 +25,17 @@ func newDetectionOrderRecurser(layers dist.BuildpackLayers, maxDepth int) *detec func (c *DetectionOrderCalculator) Order( order dist.Order, - layers dist.BuildpackLayers, + layers dist.ModuleLayers, maxDepth int, ) (pubbldr.DetectionOrder, error) { recurser := newDetectionOrderRecurser(layers, maxDepth) - return recurser.detectionOrderFromOrder(order, dist.BuildpackRef{}, 0, map[string]interface{}{}), nil + return recurser.detectionOrderFromOrder(order, dist.ModuleRef{}, 0, map[string]interface{}{}), nil } func (r *detectionOrderRecurser) detectionOrderFromOrder( order dist.Order, - parentBuildpack dist.BuildpackRef, + parentBuildpack dist.ModuleRef, currentDepth int, visited map[string]interface{}, ) pubbldr.DetectionOrder { @@ -45,7 +45,7 @@ func (r *detectionOrderRecurser) detectionOrderFromOrder( groupDetectionOrder := r.detectionOrderFromGroup(orderEntry.Group, currentDepth, visitedCopy) detectionOrderEntry := pubbldr.DetectionOrderEntry{ - BuildpackRef: parentBuildpack, + ModuleRef: parentBuildpack, GroupDetectionOrder: groupDetectionOrder, } @@ -56,7 +56,7 @@ func (r *detectionOrderRecurser) detectionOrderFromOrder( } func (r *detectionOrderRecurser) detectionOrderFromGroup( - group []dist.BuildpackRef, + group []dist.ModuleRef, currentDepth int, visited map[string]interface{}, ) pubbldr.DetectionOrder { @@ -74,8 +74,8 @@ func (r *detectionOrderRecurser) detectionOrderFromGroup( groupDetectionOrder = append(groupDetectionOrder, groupOrder...) } else { groupDetectionOrderEntry := pubbldr.DetectionOrderEntry{ - BuildpackRef: bp, - Cyclical: bpSeen, + ModuleRef: bp, + Cyclical: bpSeen, } groupDetectionOrder = append(groupDetectionOrder, groupDetectionOrderEntry) } diff --git a/internal/builder/detection_order_calculator_test.go b/internal/builder/detection_order_calculator_test.go index f84bd4b2e3..426ce275b5 100644 --- a/internal/builder/detection_order_calculator_test.go +++ b/internal/builder/detection_order_calculator_test.go @@ -26,59 +26,59 @@ func testDetectionOrderCalculator(t *testing.T, when spec.G, it spec.S) { var ( assert = h.NewAssertionManager(t) - testBuildpackOne = dist.BuildpackInfo{ + testBuildpackOne = dist.ModuleInfo{ ID: "test.buildpack", Version: "test.buildpack.version", } - testBuildpackTwo = dist.BuildpackInfo{ + testBuildpackTwo = dist.ModuleInfo{ ID: "test.buildpack.2", Version: "test.buildpack.2.version", } - testTopNestedBuildpack = dist.BuildpackInfo{ + testTopNestedBuildpack = dist.ModuleInfo{ ID: "test.top.nested", Version: "test.top.nested.version", } - testLevelOneNestedBuildpack = dist.BuildpackInfo{ + testLevelOneNestedBuildpack = dist.ModuleInfo{ ID: "test.nested.level.one", Version: "test.nested.level.one.version", } - testLevelOneNestedBuildpackTwo = dist.BuildpackInfo{ + testLevelOneNestedBuildpackTwo = dist.ModuleInfo{ ID: "test.nested.level.one.two", Version: "test.nested.level.one.two.version", } - testLevelOneNestedBuildpackThree = dist.BuildpackInfo{ + testLevelOneNestedBuildpackThree = dist.ModuleInfo{ ID: "test.nested.level.one.three", Version: "test.nested.level.one.three.version", } - testLevelTwoNestedBuildpack = dist.BuildpackInfo{ + testLevelTwoNestedBuildpack = dist.ModuleInfo{ ID: "test.nested.level.two", Version: "test.nested.level.two.version", } topLevelOrder = dist.Order{ { - Group: []dist.BuildpackRef{ - {BuildpackInfo: testBuildpackOne}, - {BuildpackInfo: testBuildpackTwo}, - {BuildpackInfo: testTopNestedBuildpack}, + Group: []dist.ModuleRef{ + {ModuleInfo: testBuildpackOne}, + {ModuleInfo: testBuildpackTwo}, + {ModuleInfo: testTopNestedBuildpack}, }, }, } - buildpackLayers = dist.BuildpackLayers{ + buildpackLayers = dist.ModuleLayers{ "test.buildpack": { - "test.buildpack.version": dist.BuildpackLayerInfo{ + "test.buildpack.version": dist.ModuleLayerInfo{ API: api.MustParse("0.2"), LayerDiffID: "layer:diff", }, }, "test.top.nested": { - "test.top.nested.version": dist.BuildpackLayerInfo{ + "test.top.nested.version": dist.ModuleLayerInfo{ API: api.MustParse("0.2"), Order: dist.Order{ { - Group: []dist.BuildpackRef{ - {BuildpackInfo: testLevelOneNestedBuildpack}, - {BuildpackInfo: testLevelOneNestedBuildpackTwo}, - {BuildpackInfo: testLevelOneNestedBuildpackThree}, + Group: []dist.ModuleRef{ + {ModuleInfo: testLevelOneNestedBuildpack}, + {ModuleInfo: testLevelOneNestedBuildpackTwo}, + {ModuleInfo: testLevelOneNestedBuildpackThree}, }, }, }, @@ -86,12 +86,12 @@ func testDetectionOrderCalculator(t *testing.T, when spec.G, it spec.S) { }, }, "test.nested.level.one": { - "test.nested.level.one.version": dist.BuildpackLayerInfo{ + "test.nested.level.one.version": dist.ModuleLayerInfo{ API: api.MustParse("0.2"), Order: dist.Order{ { - Group: []dist.BuildpackRef{ - {BuildpackInfo: testLevelTwoNestedBuildpack}, + Group: []dist.ModuleRef{ + {ModuleInfo: testLevelTwoNestedBuildpack}, }, }, }, @@ -99,13 +99,13 @@ func testDetectionOrderCalculator(t *testing.T, when spec.G, it spec.S) { }, }, "test.nested.level.one.three": { - "test.nested.level.one.three.version": dist.BuildpackLayerInfo{ + "test.nested.level.one.three.version": dist.ModuleLayerInfo{ API: api.MustParse("0.2"), Order: dist.Order{ { - Group: []dist.BuildpackRef{ - {BuildpackInfo: testLevelTwoNestedBuildpack}, - {BuildpackInfo: testTopNestedBuildpack}, + Group: []dist.ModuleRef{ + {ModuleInfo: testLevelTwoNestedBuildpack}, + {ModuleInfo: testTopNestedBuildpack}, }, }, }, @@ -124,9 +124,9 @@ func testDetectionOrderCalculator(t *testing.T, when spec.G, it spec.S) { expectedOrder := pubbldr.DetectionOrder{ { GroupDetectionOrder: pubbldr.DetectionOrder{ - {BuildpackRef: dist.BuildpackRef{BuildpackInfo: testBuildpackOne}}, - {BuildpackRef: dist.BuildpackRef{BuildpackInfo: testBuildpackTwo}}, - {BuildpackRef: dist.BuildpackRef{BuildpackInfo: testTopNestedBuildpack}}, + {ModuleRef: dist.ModuleRef{ModuleInfo: testBuildpackOne}}, + {ModuleRef: dist.ModuleRef{ModuleInfo: testBuildpackTwo}}, + {ModuleRef: dist.ModuleRef{ModuleInfo: testTopNestedBuildpack}}, }, }, } @@ -144,28 +144,28 @@ func testDetectionOrderCalculator(t *testing.T, when spec.G, it spec.S) { expectedOrder := pubbldr.DetectionOrder{ { GroupDetectionOrder: pubbldr.DetectionOrder{ - {BuildpackRef: dist.BuildpackRef{BuildpackInfo: testBuildpackOne}}, - {BuildpackRef: dist.BuildpackRef{BuildpackInfo: testBuildpackTwo}}, + {ModuleRef: dist.ModuleRef{ModuleInfo: testBuildpackOne}}, + {ModuleRef: dist.ModuleRef{ModuleInfo: testBuildpackTwo}}, { - BuildpackRef: dist.BuildpackRef{BuildpackInfo: testTopNestedBuildpack}, + ModuleRef: dist.ModuleRef{ModuleInfo: testTopNestedBuildpack}, GroupDetectionOrder: pubbldr.DetectionOrder{ { - BuildpackRef: dist.BuildpackRef{BuildpackInfo: testLevelOneNestedBuildpack}, + ModuleRef: dist.ModuleRef{ModuleInfo: testLevelOneNestedBuildpack}, GroupDetectionOrder: pubbldr.DetectionOrder{ - {BuildpackRef: dist.BuildpackRef{BuildpackInfo: testLevelTwoNestedBuildpack}}, + {ModuleRef: dist.ModuleRef{ModuleInfo: testLevelTwoNestedBuildpack}}, }, }, - {BuildpackRef: dist.BuildpackRef{BuildpackInfo: testLevelOneNestedBuildpackTwo}}, + {ModuleRef: dist.ModuleRef{ModuleInfo: testLevelOneNestedBuildpackTwo}}, { - BuildpackRef: dist.BuildpackRef{BuildpackInfo: testLevelOneNestedBuildpackThree}, + ModuleRef: dist.ModuleRef{ModuleInfo: testLevelOneNestedBuildpackThree}, GroupDetectionOrder: pubbldr.DetectionOrder{ { - BuildpackRef: dist.BuildpackRef{BuildpackInfo: testLevelTwoNestedBuildpack}, - Cyclical: false, + ModuleRef: dist.ModuleRef{ModuleInfo: testLevelTwoNestedBuildpack}, + Cyclical: false, }, { - BuildpackRef: dist.BuildpackRef{BuildpackInfo: testTopNestedBuildpack}, - Cyclical: true, + ModuleRef: dist.ModuleRef{ModuleInfo: testTopNestedBuildpack}, + Cyclical: true, }, }, }, @@ -188,14 +188,14 @@ func testDetectionOrderCalculator(t *testing.T, when spec.G, it spec.S) { expectedOrder := pubbldr.DetectionOrder{ { GroupDetectionOrder: pubbldr.DetectionOrder{ - {BuildpackRef: dist.BuildpackRef{BuildpackInfo: testBuildpackOne}}, - {BuildpackRef: dist.BuildpackRef{BuildpackInfo: testBuildpackTwo}}, + {ModuleRef: dist.ModuleRef{ModuleInfo: testBuildpackOne}}, + {ModuleRef: dist.ModuleRef{ModuleInfo: testBuildpackTwo}}, { - BuildpackRef: dist.BuildpackRef{BuildpackInfo: testTopNestedBuildpack}, + ModuleRef: dist.ModuleRef{ModuleInfo: testTopNestedBuildpack}, GroupDetectionOrder: pubbldr.DetectionOrder{ - {BuildpackRef: dist.BuildpackRef{BuildpackInfo: testLevelOneNestedBuildpack}}, - {BuildpackRef: dist.BuildpackRef{BuildpackInfo: testLevelOneNestedBuildpackTwo}}, - {BuildpackRef: dist.BuildpackRef{BuildpackInfo: testLevelOneNestedBuildpackThree}}, + {ModuleRef: dist.ModuleRef{ModuleInfo: testLevelOneNestedBuildpack}}, + {ModuleRef: dist.ModuleRef{ModuleInfo: testLevelOneNestedBuildpackTwo}}, + {ModuleRef: dist.ModuleRef{ModuleInfo: testLevelOneNestedBuildpackThree}}, }, }, }, @@ -208,14 +208,14 @@ func testDetectionOrderCalculator(t *testing.T, when spec.G, it spec.S) { when("a buildpack is referenced in a sub detection group", func() { it("marks the buildpack is cyclic and doesn't attempt to calculate that buildpacks order", func() { - cyclicBuildpackLayers := dist.BuildpackLayers{ + cyclicBuildpackLayers := dist.ModuleLayers{ "test.top.nested": { - "test.top.nested.version": dist.BuildpackLayerInfo{ + "test.top.nested.version": dist.ModuleLayerInfo{ API: api.MustParse("0.2"), Order: dist.Order{ { - Group: []dist.BuildpackRef{ - {BuildpackInfo: testLevelOneNestedBuildpack}, + Group: []dist.ModuleRef{ + {ModuleInfo: testLevelOneNestedBuildpack}, }, }, }, @@ -223,12 +223,12 @@ func testDetectionOrderCalculator(t *testing.T, when spec.G, it spec.S) { }, }, "test.nested.level.one": { - "test.nested.level.one.version": dist.BuildpackLayerInfo{ + "test.nested.level.one.version": dist.ModuleLayerInfo{ API: api.MustParse("0.2"), Order: dist.Order{ { - Group: []dist.BuildpackRef{ - {BuildpackInfo: testTopNestedBuildpack}, + Group: []dist.ModuleRef{ + {ModuleInfo: testTopNestedBuildpack}, }, }, }, @@ -238,7 +238,7 @@ func testDetectionOrderCalculator(t *testing.T, when spec.G, it spec.S) { } cyclicOrder := dist.Order{ { - Group: []dist.BuildpackRef{{BuildpackInfo: testTopNestedBuildpack}}, + Group: []dist.ModuleRef{{ModuleInfo: testTopNestedBuildpack}}, }, } @@ -250,14 +250,14 @@ func testDetectionOrderCalculator(t *testing.T, when spec.G, it spec.S) { { GroupDetectionOrder: pubbldr.DetectionOrder{ { - BuildpackRef: dist.BuildpackRef{BuildpackInfo: testTopNestedBuildpack}, + ModuleRef: dist.ModuleRef{ModuleInfo: testTopNestedBuildpack}, GroupDetectionOrder: pubbldr.DetectionOrder{ { - BuildpackRef: dist.BuildpackRef{BuildpackInfo: testLevelOneNestedBuildpack}, + ModuleRef: dist.ModuleRef{ModuleInfo: testLevelOneNestedBuildpack}, GroupDetectionOrder: pubbldr.DetectionOrder{ { - BuildpackRef: dist.BuildpackRef{ - BuildpackInfo: testTopNestedBuildpack, + ModuleRef: dist.ModuleRef{ + ModuleInfo: testTopNestedBuildpack, }, Cyclical: true, }, diff --git a/internal/builder/fakes/fake_detection_calculator.go b/internal/builder/fakes/fake_detection_calculator.go index e3aa1c52fe..e393a79c7f 100644 --- a/internal/builder/fakes/fake_detection_calculator.go +++ b/internal/builder/fakes/fake_detection_calculator.go @@ -11,13 +11,13 @@ type FakeDetectionCalculator struct { ErrorForOrder error ReceivedTopOrder dist.Order - ReceivedLayers dist.BuildpackLayers + ReceivedLayers dist.ModuleLayers ReceivedDepth int } func (c *FakeDetectionCalculator) Order( topOrder dist.Order, - layers dist.BuildpackLayers, + layers dist.ModuleLayers, depth int, ) (builder.DetectionOrder, error) { c.ReceivedTopOrder = topOrder diff --git a/internal/builder/fakes/fake_label_manager.go b/internal/builder/fakes/fake_label_manager.go index 7641b87acb..e3dbadc4e3 100644 --- a/internal/builder/fakes/fake_label_manager.go +++ b/internal/builder/fakes/fake_label_manager.go @@ -10,13 +10,15 @@ type FakeLabelManager struct { ReturnForStackID string ReturnForMixins []string ReturnForOrder dist.Order - ReturnForBuildpackLayers dist.BuildpackLayers + ReturnForBuildpackLayers dist.ModuleLayers + ReturnForOrderExtensions dist.Order ErrorForMetadata error ErrorForStackID error ErrorForMixins error ErrorForOrder error ErrorForBuildpackLayers error + ErrorForOrderExtensions error } func (m *FakeLabelManager) Metadata() (builder.Metadata, error) { @@ -35,6 +37,10 @@ func (m *FakeLabelManager) Order() (dist.Order, error) { return m.ReturnForOrder, m.ErrorForOrder } -func (m *FakeLabelManager) BuildpackLayers() (dist.BuildpackLayers, error) { +func (m *FakeLabelManager) OrderExtensions() (dist.Order, error) { + return m.ReturnForOrder, m.ErrorForOrderExtensions +} + +func (m *FakeLabelManager) BuildpackLayers() (dist.ModuleLayers, error) { return m.ReturnForBuildpackLayers, m.ErrorForBuildpackLayers } diff --git a/internal/builder/image_fetcher_wrapper.go b/internal/builder/image_fetcher_wrapper.go index dfb0de4a83..16bf39b2a7 100644 --- a/internal/builder/image_fetcher_wrapper.go +++ b/internal/builder/image_fetcher_wrapper.go @@ -13,6 +13,14 @@ type ImageFetcher interface { // If daemon is true, it will look return a `local.Image`. Pull, applicable only when daemon is true, will // attempt to pull a remote image first. Fetch(ctx context.Context, name string, options image.FetchOptions) (imgutil.Image, error) + + // CheckReadAccess verifies if an image is accessible with read permissions + // When FetchOptions.Daemon is true and the image doesn't exist in the daemon, + // the behavior is dictated by the pull policy, which can have the following behavior + // - PullNever: returns false + // - PullAlways Or PullIfNotPresent: it will check read access for the remote image. + // When FetchOptions.Daemon is false it will check read access for the remote image. + CheckReadAccess(repo string, options image.FetchOptions) bool } type ImageFetcherWrapper struct { @@ -32,3 +40,7 @@ func (w *ImageFetcherWrapper) Fetch( ) (Inspectable, error) { return w.fetcher.Fetch(ctx, name, options) } + +func (w *ImageFetcherWrapper) CheckReadAccessValidator(repo string, options image.FetchOptions) bool { + return w.fetcher.CheckReadAccess(repo, options) +} diff --git a/internal/builder/inspect.go b/internal/builder/inspect.go index 7a3cc6ca09..01f8fc9ea5 100644 --- a/internal/builder/inspect.go +++ b/internal/builder/inspect.go @@ -15,13 +15,14 @@ type Info struct { Description string StackID string Mixins []string - RunImage string - RunImageMirrors []string - Buildpacks []dist.BuildpackInfo + RunImages []pubbldr.RunImageConfig + Buildpacks []dist.ModuleInfo Order pubbldr.DetectionOrder - BuildpackLayers dist.BuildpackLayers + BuildpackLayers dist.ModuleLayers Lifecycle LifecycleDescriptor CreatedBy CreatorMetadata + Extensions []dist.ModuleInfo + OrderExtensions pubbldr.DetectionOrder } type Inspectable interface { @@ -41,11 +42,12 @@ type LabelInspector interface { StackID() (string, error) Mixins() ([]string, error) Order() (dist.Order, error) - BuildpackLayers() (dist.BuildpackLayers, error) + BuildpackLayers() (dist.ModuleLayers, error) + OrderExtensions() (dist.Order, error) } type DetectionCalculator interface { - Order(topOrder dist.Order, layers dist.BuildpackLayers, depth int) (pubbldr.DetectionOrder, error) + Order(topOrder dist.Order, layers dist.ModuleLayers, depth int) (pubbldr.DetectionOrder, error) } type Inspector struct { @@ -75,10 +77,7 @@ func (i *Inspector) Inspect(name string, daemon bool, orderDetectionDepth int) ( return Info{}, fmt.Errorf("reading image metadata: %w", err) } - stackID, err := labelManager.StackID() - if err != nil { - return Info{}, fmt.Errorf("reading image stack id: %w", err) - } + stackID, _ := labelManager.StackID() // ignore error because stack is optional mixins, err := labelManager.Mixins() if err != nil { @@ -95,6 +94,11 @@ func (i *Inspector) Inspect(name string, daemon bool, orderDetectionDepth int) ( } } + orderExtensions, err := labelManager.OrderExtensions() + if err != nil { + return Info{}, fmt.Errorf("reading image order extensions: %w", err) + } + order, err := labelManager.Order() if err != nil { return Info{}, fmt.Errorf("reading image order: %w", err) @@ -110,29 +114,66 @@ func (i *Inspector) Inspect(name string, daemon bool, orderDetectionDepth int) ( return Info{}, fmt.Errorf("calculating detection order: %w", err) } + detectionOrderExtensions := orderExttoPubbldrDetectionOrderExt(orderExtensions) + lifecycle := CompatDescriptor(LifecycleDescriptor{ Info: LifecycleInfo{Version: metadata.Lifecycle.Version}, API: metadata.Lifecycle.API, APIs: metadata.Lifecycle.APIs, }) + var runImages []pubbldr.RunImageConfig + for _, ri := range metadata.RunImages { + runImages = append(runImages, pubbldr.RunImageConfig{ + Image: ri.Image, + Mirrors: ri.Mirrors, + }) + } + addStackRunImage := true + for _, ri := range runImages { + if ri.Image == metadata.Stack.RunImage.Image { + addStackRunImage = false + } + } + if addStackRunImage && metadata.Stack.RunImage.Image != "" { + runImages = append(runImages, pubbldr.RunImageConfig{ + Image: metadata.Stack.RunImage.Image, + Mirrors: metadata.Stack.RunImage.Mirrors, + }) + } + return Info{ Description: metadata.Description, StackID: stackID, Mixins: append(commonMixins, buildMixins...), - RunImage: metadata.Stack.RunImage.Image, - RunImageMirrors: metadata.Stack.RunImage.Mirrors, + RunImages: runImages, Buildpacks: sortBuildPacksByID(uniqueBuildpacks(metadata.Buildpacks)), Order: detectionOrder, BuildpackLayers: layers, Lifecycle: lifecycle, CreatedBy: metadata.CreatedBy, + Extensions: metadata.Extensions, + OrderExtensions: detectionOrderExtensions, }, nil } -func uniqueBuildpacks(buildpacks []dist.BuildpackInfo) []dist.BuildpackInfo { +func orderExttoPubbldrDetectionOrderExt(orderExt dist.Order) pubbldr.DetectionOrder { + var detectionOrderExt pubbldr.DetectionOrder + + for _, orderEntry := range orderExt { + var detectionOrderEntry pubbldr.DetectionOrderEntry + for _, moduleRef := range orderEntry.Group { + detectionOrderEntry.ModuleRef = moduleRef + } + detectionOrderExt = append(detectionOrderExt, detectionOrderEntry) + } + + return detectionOrderExt +} + +func uniqueBuildpacks(buildpacks []dist.ModuleInfo) []dist.ModuleInfo { foundBuildpacks := map[string]interface{}{} - var uniqueBuildpacks []dist.BuildpackInfo + var uniqueBuildpacks []dist.ModuleInfo for _, bp := range buildpacks { _, ok := foundBuildpacks[bp.FullName()] @@ -145,7 +186,7 @@ func uniqueBuildpacks(buildpacks []dist.BuildpackInfo) []dist.BuildpackInfo { return uniqueBuildpacks } -func sortBuildPacksByID(buildpacks []dist.BuildpackInfo) []dist.BuildpackInfo { +func sortBuildPacksByID(buildpacks []dist.ModuleInfo) []dist.ModuleInfo { sort.Slice(buildpacks, func(i int, j int) bool { if buildpacks[i].ID == buildpacks[j].ID { return buildpacks[i].Version < buildpacks[j].Version diff --git a/internal/builder/inspect_test.go b/internal/builder/inspect_test.go index 6285844f0c..38411ba2cf 100644 --- a/internal/builder/inspect_test.go +++ b/internal/builder/inspect_test.go @@ -23,27 +23,28 @@ const ( testBuilderDescription = "Test Builder Description" testStackID = "test-builder-stack-id" testRunImage = "test/run-image" + testStackRunImage = "test/stack-run-image" ) var ( - testTopNestedBuildpack = dist.BuildpackInfo{ + testTopNestedBuildpack = dist.ModuleInfo{ ID: "test.top.nested", Version: "test.top.nested.version", } - testNestedBuildpack = dist.BuildpackInfo{ + testNestedBuildpack = dist.ModuleInfo{ ID: "test.nested", Version: "test.nested.version", Homepage: "http://geocities.com/top-bp", } - testBuildpack = dist.BuildpackInfo{ + testBuildpack = dist.ModuleInfo{ ID: "test.bp.two", Version: "test.bp.two.version", } - testBuildpackVersionTwo = dist.BuildpackInfo{ + testBuildpackVersionTwo = dist.ModuleInfo{ ID: "test.bp.two", Version: "test.bp.two.version-2", } - testBuildpacks = []dist.BuildpackInfo{ + testBuildpacks = []dist.ModuleInfo{ testBuildpack, testNestedBuildpack, testTopNestedBuildpack, @@ -75,26 +76,41 @@ var ( Stack: testStack, Lifecycle: inspectTestLifecycle, CreatedBy: testCreatorData, + RunImages: []builder.RunImageMetadata{ + { + Image: testRunImage, + Mirrors: testRunImageMirrors, + }, + }, } testMixins = []string{"build:mixinA", "mixinX", "mixinY"} expectedTestMixins = []string{"mixinX", "mixinY", "build:mixinA"} testRunImageMirrors = []string{"test/first-run-image-mirror", "test/second-run-image-mirror"} testStack = builder.StackMetadata{ RunImage: builder.RunImageMetadata{ - Image: testRunImage, + Image: testStackRunImage, Mirrors: testRunImageMirrors, }, } testOrder = dist.Order{ - dist.OrderEntry{Group: []dist.BuildpackRef{ - {BuildpackInfo: testBuildpack, Optional: false}, + dist.OrderEntry{Group: []dist.ModuleRef{ + {ModuleInfo: testBuildpack, Optional: false}, + }}, + dist.OrderEntry{Group: []dist.ModuleRef{ + {ModuleInfo: testNestedBuildpack, Optional: false}, + {ModuleInfo: testTopNestedBuildpack, Optional: true}, }}, - dist.OrderEntry{Group: []dist.BuildpackRef{ - {BuildpackInfo: testNestedBuildpack, Optional: false}, - {BuildpackInfo: testTopNestedBuildpack, Optional: true}, + } + testOrderExtensions = dist.Order{ + dist.OrderEntry{Group: []dist.ModuleRef{ + {ModuleInfo: testBuildpack, Optional: false}, + }}, + dist.OrderEntry{Group: []dist.ModuleRef{ + {ModuleInfo: testNestedBuildpack, Optional: false}, + {ModuleInfo: testTopNestedBuildpack, Optional: true}, }}, } - testLayers = dist.BuildpackLayers{ + testLayers = dist.ModuleLayers{ "test.top.nested": { "test.top.nested.version": { API: api.MustParse("0.2"), @@ -125,18 +141,18 @@ var ( } expectedDetectionTestOrder = pubbldr.DetectionOrder{ { - BuildpackRef: dist.BuildpackRef{ - BuildpackInfo: testBuildpack, + ModuleRef: dist.ModuleRef{ + ModuleInfo: testBuildpack, }, }, { - BuildpackRef: dist.BuildpackRef{ - BuildpackInfo: testTopNestedBuildpack, + ModuleRef: dist.ModuleRef{ + ModuleInfo: testTopNestedBuildpack, }, GroupDetectionOrder: pubbldr.DetectionOrder{ { - BuildpackRef: dist.BuildpackRef{ - BuildpackInfo: testNestedBuildpack, + ModuleRef: dist.ModuleRef{ + ModuleInfo: testNestedBuildpack, }, }, }, @@ -209,8 +225,11 @@ func testInspect(t *testing.T, when spec.G, it spec.S) { assert.Equal(info.Description, testBuilderDescription) assert.Equal(info.StackID, testStackID) assert.Equal(info.Mixins, expectedTestMixins) - assert.Equal(info.RunImage, testRunImage) - assert.Equal(info.RunImageMirrors, testRunImageMirrors) + assert.Equal(len(info.RunImages), 2) + assert.Equal(info.RunImages[0].Image, testRunImage) + assert.Equal(info.RunImages[1].Image, testStackRunImage) + assert.Equal(info.RunImages[0].Mirrors, testRunImageMirrors) + assert.Equal(info.RunImages[1].Mirrors, testRunImageMirrors) assert.Equal(info.Buildpacks, testBuildpacks) assert.Equal(info.Order, expectedDetectionTestOrder) assert.Equal(info.BuildpackLayers, testLayers) @@ -221,7 +240,7 @@ func testInspect(t *testing.T, when spec.G, it spec.S) { it("sorts buildPacks by ID then Version", func() { metadata := builder.Metadata{ Description: testBuilderDescription, - Buildpacks: []dist.BuildpackInfo{ + Buildpacks: []dist.ModuleInfo{ testNestedBuildpack, testBuildpackVersionTwo, testBuildpack, @@ -238,14 +257,14 @@ func testInspect(t *testing.T, when spec.G, it spec.S) { info, err := inspector.Inspect(testBuilderName, true, pubbldr.OrderDetectionNone) assert.Nil(err) - assert.Equal(info.Buildpacks, []dist.BuildpackInfo{testBuildpack, testBuildpackVersionTwo, testNestedBuildpack}) + assert.Equal(info.Buildpacks, []dist.ModuleInfo{testBuildpack, testBuildpackVersionTwo, testNestedBuildpack}) }) when("there are duplicated buildpacks in metadata", func() { it("returns deduplicated buildpacks", func() { metadata := builder.Metadata{ Description: testBuilderDescription, - Buildpacks: []dist.BuildpackInfo{ + Buildpacks: []dist.ModuleInfo{ testTopNestedBuildpack, testNestedBuildpack, testTopNestedBuildpack, @@ -261,7 +280,7 @@ func testInspect(t *testing.T, when spec.G, it spec.S) { info, err := inspector.Inspect(testBuilderName, true, pubbldr.OrderDetectionNone) assert.Nil(err) - assert.Equal(info.Buildpacks, []dist.BuildpackInfo{testNestedBuildpack, testTopNestedBuildpack}) + assert.Equal(info.Buildpacks, []dist.ModuleInfo{testNestedBuildpack, testTopNestedBuildpack}) }) }) @@ -282,7 +301,7 @@ func testInspect(t *testing.T, when spec.G, it spec.S) { }) }) - when("label manager returns an error for `StackID`", func() { + when("label manager does not return an error for `StackID`", func() { it("returns the wrapped error", func() { expectedBaseError := errors.New("label not found") @@ -295,7 +314,7 @@ func testInspect(t *testing.T, when spec.G, it spec.S) { ) _, err := inspector.Inspect(testBuilderName, true, pubbldr.OrderDetectionNone) - assert.ErrorWithMessage(err, "reading image stack id: label not found") + assert.Nil(err) }) }) @@ -333,7 +352,24 @@ func testInspect(t *testing.T, when spec.G, it spec.S) { }) }) - when("label manager returns an error for `BuildpackLayers`", func() { + when("label manager returns an error for `OrderExtensions`", func() { + it("returns the wrapped error", func() { + expectedBaseError := errors.New("label not found") + + labelManager := newLabelManager(errorForOrderExtensions(expectedBaseError)) + + inspector := builder.NewInspector( + newDefaultInspectableFetcher(), + newLabelManagerFactory(labelManager), + newDefaultDetectionCalculator(), + ) + _, err := inspector.Inspect(testBuilderName, true, pubbldr.OrderDetectionNone) + + assert.ErrorWithMessage(err, "reading image order extensions: label not found") + }) + }) + + when("label manager returns an error for `ModuleLayers`", func() { it("returns the wrapped error", func() { expectedBaseError := errors.New("label not found") @@ -392,6 +428,7 @@ func newDefaultLabelManager() *fakes.FakeLabelManager { ReturnForMixins: testMixins, ReturnForOrder: testOrder, ReturnForBuildpackLayers: testLayers, + ReturnForOrderExtensions: testOrderExtensions, } } @@ -427,6 +464,12 @@ func errorForOrder(err error) labelManagerModifier { } } +func errorForOrderExtensions(err error) labelManagerModifier { + return func(manager *fakes.FakeLabelManager) { + manager.ErrorForOrderExtensions = err + } +} + func errorForBuildpackLayers(err error) labelManagerModifier { return func(manager *fakes.FakeLabelManager) { manager.ErrorForBuildpackLayers = err diff --git a/internal/builder/label_manager.go b/internal/builder/label_manager.go index cdbfe760fd..e3d6807780 100644 --- a/internal/builder/label_manager.go +++ b/internal/builder/label_manager.go @@ -39,8 +39,14 @@ func (m *LabelManager) Order() (dist.Order, error) { return parsedOrder, err } -func (m *LabelManager) BuildpackLayers() (dist.BuildpackLayers, error) { - parsedLayers := dist.BuildpackLayers{} +func (m *LabelManager) OrderExtensions() (dist.Order, error) { + parsedOrder := dist.Order{} + err := m.labelJSONDefaultEmpty(OrderExtensionsLabel, &parsedOrder) + return parsedOrder, err +} + +func (m *LabelManager) BuildpackLayers() (dist.ModuleLayers, error) { + parsedLayers := dist.ModuleLayers{} err := m.labelJSONDefaultEmpty(dist.BuildpackLayersLabel, &parsedLayers) return parsedLayers, err } diff --git a/internal/builder/label_manager_test.go b/internal/builder/label_manager_test.go index fda7eba4b9..18ebb6e76e 100644 --- a/internal/builder/label_manager_test.go +++ b/internal/builder/label_manager_test.go @@ -65,7 +65,7 @@ func testLabelManager(t *testing.T, when spec.G, it spec.S) { expectedDescription = "Test image description" expectedRunImage = "some/run-image" expectedRunImageMirror = "gcr.io/some/default" - expectedBuildpacks = []dist.BuildpackInfo{ + expectedBuildpacks = []dist.ModuleInfo{ { ID: "test.buildpack", Version: "test.buildpack.version", @@ -325,14 +325,14 @@ func testLabelManager(t *testing.T, when spec.G, it spec.S) { expectedOrder := dist.Order{ { - Group: []dist.BuildpackRef{ + Group: []dist.ModuleRef{ { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "buildpack-1-id", }, }, { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "buildpack-2-id", Version: "buildpack-2-version-1", }, @@ -398,7 +398,92 @@ func testLabelManager(t *testing.T, when spec.G, it spec.S) { }) }) - when("BuildpackLayers", func() { + when("OrderExtensions", func() { + var rawOrder = `[{"group": [{"id": "buildpack-1-id", "optional": false}, {"id": "buildpack-2-id", "version": "buildpack-2-version-1", "optional": true}]}]` + + it("returns the order", func() { + inspectable := newInspectable(returnForLabel(rawOrder)) + + labelManager := builder.NewLabelManager(inspectable) + mixins, err := labelManager.OrderExtensions() + assert.Nil(err) + + expectedOrder := dist.Order{ + { + Group: []dist.ModuleRef{ + { + ModuleInfo: dist.ModuleInfo{ + ID: "buildpack-1-id", + }, + }, + { + ModuleInfo: dist.ModuleInfo{ + ID: "buildpack-2-id", + Version: "buildpack-2-version-1", + }, + Optional: true, + }, + }, + }, + } + + assert.Equal(mixins, expectedOrder) + }) + + it("requests the expected label", func() { + inspectable := newInspectable(returnForLabel(rawOrder)) + + labelManager := builder.NewLabelManager(inspectable) + _, err := labelManager.OrderExtensions() + assert.Nil(err) + + assert.Equal(inspectable.ReceivedName, "io.buildpacks.buildpack.order-extensions") + }) + + when("inspectable return empty content for `Label`", func() { + it("returns an empty order object", func() { + inspectable := newInspectable(returnForLabel("")) + + labelManager := builder.NewLabelManager(inspectable) + order, err := labelManager.OrderExtensions() + assert.Nil(err) + + assert.Equal(order, dist.Order{}) + }) + }) + + when("inspectable returns an error for `Label`", func() { + it("returns a wrapped error", func() { + expectedError := errors.New("couldn't find label") + + inspectable := newInspectable(errorForLabel(expectedError)) + + labelManager := builder.NewLabelManager(inspectable) + _, err := labelManager.OrderExtensions() + + assert.ErrorWithMessage( + err, + "getting label io.buildpacks.buildpack.order-extensions: couldn't find label", + ) + }) + }) + + when("inspectable returns invalid json for `Label`", func() { + it("returns a wrapped error", func() { + inspectable := newInspectable(returnForLabel("{")) + + labelManager := builder.NewLabelManager(inspectable) + _, err := labelManager.OrderExtensions() + + assert.ErrorWithMessage( + err, + "parsing label content for io.buildpacks.buildpack.order-extensions: unexpected end of JSON input", + ) + }) + }) + }) + + when("ModuleLayers", func() { var rawLayers = ` { "buildpack-1-id": { @@ -421,13 +506,13 @@ func testLabelManager(t *testing.T, when spec.G, it spec.S) { layers, err := labelManager.BuildpackLayers() assert.Nil(err) - expectedLayers := dist.BuildpackLayers{ + expectedLayers := dist.ModuleLayers{ "buildpack-1-id": { - "buildpack-1-version-1": dist.BuildpackLayerInfo{ + "buildpack-1-version-1": dist.ModuleLayerInfo{ API: api.MustParse("0.1"), LayerDiffID: "sha256:buildpack-1-version-1-diff-id", }, - "buildpack-1-version-2": dist.BuildpackLayerInfo{ + "buildpack-1-version-2": dist.ModuleLayerInfo{ API: api.MustParse("0.2"), LayerDiffID: "sha256:buildpack-1-version-2-diff-id", }, @@ -455,7 +540,7 @@ func testLabelManager(t *testing.T, when spec.G, it spec.S) { layers, err := labelManager.BuildpackLayers() assert.Nil(err) - assert.Equal(layers, dist.BuildpackLayers{}) + assert.Equal(layers, dist.ModuleLayers{}) }) }) diff --git a/internal/builder/lifecycle.go b/internal/builder/lifecycle.go index 151dcc0bfb..be430523d4 100644 --- a/internal/builder/lifecycle.go +++ b/internal/builder/lifecycle.go @@ -12,10 +12,9 @@ import ( "github.com/buildpacks/pack/pkg/archive" ) -// A snapshot of the latest tested lifecycle version values +// DefaultLifecycleVersion A snapshot of the latest tested lifecycle version values const ( - DefaultLifecycleVersion = "0.13.3" - DefaultBuildpackAPIVersion = "0.2" + DefaultLifecycleVersion = "0.21.0" ) // Blob is an interface to wrap opening blobs @@ -24,6 +23,7 @@ type Blob interface { } // Lifecycle is an implementation of the CNB Lifecycle spec +// //go:generate mockgen -package testmocks -destination testmocks/mock_lifecycle.go github.com/buildpacks/pack/internal/builder Lifecycle type Lifecycle interface { Blob @@ -119,3 +119,8 @@ func (l *lifecycle) binaries() []string { } return binaries } + +// SupportedLinuxArchitecture returns true for each binary architecture available at https://github.com/buildpacks/lifecycle/releases/ +func SupportedLinuxArchitecture(arch string) bool { + return arch == "arm64" || arch == "ppc64le" || arch == "s390x" +} diff --git a/internal/builder/lifecycle_test.go b/internal/builder/lifecycle_test.go index d7d8b90dda..282f05e478 100644 --- a/internal/builder/lifecycle_test.go +++ b/internal/builder/lifecycle_test.go @@ -3,7 +3,6 @@ package builder_test import ( "archive/tar" "io" - "io/ioutil" "os" "path/filepath" "testing" @@ -58,10 +57,10 @@ func testLifecycle(t *testing.T, when spec.G, it spec.S) { it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "lifecycle") + tmpDir, err = os.MkdirTemp("", "lifecycle") h.AssertNil(t, err) - h.AssertNil(t, ioutil.WriteFile(filepath.Join(tmpDir, "lifecycle.toml"), []byte(` + h.AssertNil(t, os.WriteFile(filepath.Join(tmpDir, "lifecycle.toml"), []byte(` [api] platform "0.1" `), 0711)) @@ -82,10 +81,10 @@ func testLifecycle(t *testing.T, when spec.G, it spec.S) { it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "") + tmpDir, err = os.MkdirTemp("", "") h.AssertNil(t, err) - h.AssertNil(t, ioutil.WriteFile(filepath.Join(tmpDir, "lifecycle.toml"), []byte(` + h.AssertNil(t, os.WriteFile(filepath.Join(tmpDir, "lifecycle.toml"), []byte(` [api] platform = "0.2" buildpack = "0.3" @@ -95,9 +94,9 @@ func testLifecycle(t *testing.T, when spec.G, it spec.S) { `), os.ModePerm)) h.AssertNil(t, os.Mkdir(filepath.Join(tmpDir, "lifecycle"), os.ModePerm)) - h.AssertNil(t, ioutil.WriteFile(filepath.Join(tmpDir, "lifecycle", "analyzer"), []byte("content"), os.ModePerm)) - h.AssertNil(t, ioutil.WriteFile(filepath.Join(tmpDir, "lifecycle", "detector"), []byte("content"), os.ModePerm)) - h.AssertNil(t, ioutil.WriteFile(filepath.Join(tmpDir, "lifecycle", "builder"), []byte("content"), os.ModePerm)) + h.AssertNil(t, os.WriteFile(filepath.Join(tmpDir, "lifecycle", "analyzer"), []byte("content"), os.ModePerm)) + h.AssertNil(t, os.WriteFile(filepath.Join(tmpDir, "lifecycle", "detector"), []byte("content"), os.ModePerm)) + h.AssertNil(t, os.WriteFile(filepath.Join(tmpDir, "lifecycle", "builder"), []byte("content"), os.ModePerm)) }) it.After(func() { diff --git a/internal/builder/metadata.go b/internal/builder/metadata.go index 111ec4ef83..c8c7083c88 100644 --- a/internal/builder/metadata.go +++ b/internal/builder/metadata.go @@ -3,15 +3,19 @@ package builder import "github.com/buildpacks/pack/pkg/dist" const ( - OrderLabel = "io.buildpacks.buildpack.order" + OrderLabel = "io.buildpacks.buildpack.order" + OrderExtensionsLabel = "io.buildpacks.buildpack.order-extensions" + SystemLabel = "io.buildpacks.buildpack.system" ) type Metadata struct { - Description string `json:"description"` - Buildpacks []dist.BuildpackInfo `json:"buildpacks"` - Stack StackMetadata `json:"stack"` - Lifecycle LifecycleMetadata `json:"lifecycle"` - CreatedBy CreatorMetadata `json:"createdBy"` + Description string `json:"description"` + Buildpacks []dist.ModuleInfo `json:"buildpacks"` + Extensions []dist.ModuleInfo `json:"extensions"` + Stack StackMetadata `json:"stack"` + Lifecycle LifecycleMetadata `json:"lifecycle"` + CreatedBy CreatorMetadata `json:"createdBy"` + RunImages []RunImageMetadata `json:"images"` } type CreatorMetadata struct { @@ -30,6 +34,10 @@ type StackMetadata struct { RunImage RunImageMetadata `json:"runImage" toml:"run-image"` } +type RunImages struct { + Images []RunImageMetadata `json:"images" toml:"images"` +} + type RunImageMetadata struct { Image string `json:"image" toml:"image"` Mirrors []string `json:"mirrors" toml:"mirrors"` diff --git a/internal/builder/suggested_builder.go b/internal/builder/suggested_builder.go deleted file mode 100644 index 378ad72e0e..0000000000 --- a/internal/builder/suggested_builder.go +++ /dev/null @@ -1,35 +0,0 @@ -package builder - -type SuggestedBuilder struct { - Vendor string - Image string - DefaultDescription string -} - -var SuggestedBuilders = []SuggestedBuilder{ - { - Vendor: "Google", - Image: "gcr.io/buildpacks/builder:v1", - DefaultDescription: "GCP Builder for all runtimes", - }, - { - Vendor: "Heroku", - Image: "heroku/buildpacks:20", - DefaultDescription: "heroku-20 base image with buildpacks for Ruby, Java, Node.js, Python, Golang, & PHP", - }, - { - Vendor: "Paketo Buildpacks", - Image: "paketobuildpacks/builder:base", - DefaultDescription: "Small base image with buildpacks for Java, Node.js, Golang, & .NET Core", - }, - { - Vendor: "Paketo Buildpacks", - Image: "paketobuildpacks/builder:full", - DefaultDescription: "Larger base image with buildpacks for Java, Node.js, Golang, .NET Core, & PHP", - }, - { - Vendor: "Paketo Buildpacks", - Image: "paketobuildpacks/builder:tiny", - DefaultDescription: "Tiny base image (bionic build image, distroless run image) with buildpacks for Golang", - }, -} diff --git a/internal/builder/testdata/lifecycle/platform-0.4/lifecycle.toml b/internal/builder/testdata/lifecycle/platform-0.4/lifecycle.toml index 3fbda4fa10..bfd495a89d 100644 --- a/internal/builder/testdata/lifecycle/platform-0.4/lifecycle.toml +++ b/internal/builder/testdata/lifecycle/platform-0.4/lifecycle.toml @@ -4,7 +4,7 @@ version = "0.0.0" [apis] [apis.buildpack] deprecated = [] -supported = ["0.2", "0.3", "0.4"] +supported = ["0.2", "0.3", "0.4", "0.9"] [apis.platform] deprecated = ["0.2"] diff --git a/internal/builder/testmocks/mock_lifecycle.go b/internal/builder/testmocks/mock_lifecycle.go index c0753bd846..88aacd786f 100644 --- a/internal/builder/testmocks/mock_lifecycle.go +++ b/internal/builder/testmocks/mock_lifecycle.go @@ -13,30 +13,30 @@ import ( builder "github.com/buildpacks/pack/internal/builder" ) -// MockLifecycle is a mock of Lifecycle interface +// MockLifecycle is a mock of Lifecycle interface. type MockLifecycle struct { ctrl *gomock.Controller recorder *MockLifecycleMockRecorder } -// MockLifecycleMockRecorder is the mock recorder for MockLifecycle +// MockLifecycleMockRecorder is the mock recorder for MockLifecycle. type MockLifecycleMockRecorder struct { mock *MockLifecycle } -// NewMockLifecycle creates a new mock instance +// NewMockLifecycle creates a new mock instance. func NewMockLifecycle(ctrl *gomock.Controller) *MockLifecycle { mock := &MockLifecycle{ctrl: ctrl} mock.recorder = &MockLifecycleMockRecorder{mock} return mock } -// EXPECT returns an object that allows the caller to indicate expected use +// EXPECT returns an object that allows the caller to indicate expected use. func (m *MockLifecycle) EXPECT() *MockLifecycleMockRecorder { return m.recorder } -// Descriptor mocks base method +// Descriptor mocks base method. func (m *MockLifecycle) Descriptor() builder.LifecycleDescriptor { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Descriptor") @@ -44,13 +44,13 @@ func (m *MockLifecycle) Descriptor() builder.LifecycleDescriptor { return ret0 } -// Descriptor indicates an expected call of Descriptor +// Descriptor indicates an expected call of Descriptor. func (mr *MockLifecycleMockRecorder) Descriptor() *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Descriptor", reflect.TypeOf((*MockLifecycle)(nil).Descriptor)) } -// Open mocks base method +// Open mocks base method. func (m *MockLifecycle) Open() (io.ReadCloser, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Open") @@ -59,7 +59,7 @@ func (m *MockLifecycle) Open() (io.ReadCloser, error) { return ret0, ret1 } -// Open indicates an expected call of Open +// Open indicates an expected call of Open. func (mr *MockLifecycleMockRecorder) Open() *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Open", reflect.TypeOf((*MockLifecycle)(nil).Open)) diff --git a/internal/builder/trusted_builder.go b/internal/builder/trusted_builder.go new file mode 100644 index 0000000000..c164aaeeba --- /dev/null +++ b/internal/builder/trusted_builder.go @@ -0,0 +1,113 @@ +package builder + +import ( + "github.com/google/go-containerregistry/pkg/name" + + "github.com/buildpacks/pack/internal/config" +) + +type KnownBuilder struct { + Vendor string + Image string + DefaultDescription string + Suggested bool + Trusted bool +} + +var KnownBuilders = []KnownBuilder{ + { + Vendor: "Google", + Image: "gcr.io/buildpacks/builder:google-22", + DefaultDescription: "Ubuntu 22.04 base image with buildpacks for .NET, Dart, Go, Java, Node.js, PHP, Python, and Ruby", + Suggested: true, + Trusted: true, + }, + { + Vendor: "Heroku", + Image: "heroku/builder:24", + DefaultDescription: "Ubuntu 24.04 AMD64+ARM64 base image with buildpacks for Go, Java, Node.js, PHP, Python, Ruby & Scala.", + Suggested: true, + Trusted: true, + }, + { + Vendor: "Heroku", + Image: "heroku/builder:22", + DefaultDescription: "Ubuntu 22.04 AMD64 base image with buildpacks for Go, Java, Node.js, PHP, Python, Ruby & Scala.", + Suggested: false, + Trusted: true, + }, + { + Vendor: "Heroku", + Image: "heroku/builder:20", + DefaultDescription: "Ubuntu 20.04 AMD64 base image with buildpacks for Go, Java, Node.js, PHP, Python, Ruby & Scala.", + Suggested: false, + Trusted: true, + }, + { + Vendor: "Paketo Buildpacks", + Image: "paketobuildpacks/builder-jammy-base", + DefaultDescription: "Small base image with buildpacks for Java, Node.js, Golang, .NET Core, Python & Ruby", + Suggested: true, + Trusted: true, + }, + { + Vendor: "Paketo Buildpacks", + Image: "paketobuildpacks/builder-jammy-full", + DefaultDescription: "Larger base image with buildpacks for Java, Node.js, Golang, .NET Core, Python, Ruby, & PHP", + Suggested: true, + Trusted: true, + }, + { + Vendor: "Paketo Buildpacks", + Image: "paketobuildpacks/builder-jammy-tiny", + DefaultDescription: "Tiny base image (jammy build image, distroless run image) with buildpacks for Golang & Java", + Suggested: true, + Trusted: true, + }, + { + Vendor: "Paketo Buildpacks", + Image: "paketobuildpacks/builder-jammy-buildpackless-static", + DefaultDescription: "Static base image (jammy build image, distroless run image) suitable for static binaries like Go or Rust", + Suggested: true, + Trusted: true, + }, + { + Vendor: "Paketo Buildpacks", + Image: "paketobuildpacks/builder-ubi8-base", + DefaultDescription: "Universal Base Image (RHEL8) with buildpacks to build Node.js or Java runtimes. Support also the new extension feature (aka apply Dockerfile)", + Suggested: true, + Trusted: true, + }, +} + +func IsKnownTrustedBuilder(builderName string) bool { + for _, knownBuilder := range KnownBuilders { + if builderName == knownBuilder.Image && knownBuilder.Trusted { + return true + } + } + return false +} + +func IsTrustedBuilder(cfg config.Config, builderName string) (bool, error) { + builderReference, err := name.ParseReference(builderName, name.WithDefaultTag("")) + if err != nil { + return false, err + } + for _, trustedBuilder := range cfg.TrustedBuilders { + trustedBuilderReference, err := name.ParseReference(trustedBuilder.Name, name.WithDefaultTag("")) + if err != nil { + return false, err + } + if trustedBuilderReference.Identifier() != "" { + if builderReference.Name() == trustedBuilderReference.Name() { + return true, nil + } + } else { + if builderReference.Context().RepositoryStr() == trustedBuilderReference.Context().RepositoryStr() { + return true, nil + } + } + } + return false, nil +} diff --git a/internal/builder/trusted_builder_test.go b/internal/builder/trusted_builder_test.go new file mode 100644 index 0000000000..90e7599cd8 --- /dev/null +++ b/internal/builder/trusted_builder_test.go @@ -0,0 +1,102 @@ +package builder_test + +import ( + "testing" + + "github.com/heroku/color" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + bldr "github.com/buildpacks/pack/internal/builder" + "github.com/buildpacks/pack/internal/config" + + h "github.com/buildpacks/pack/testhelpers" +) + +func TestTrustedBuilder(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "Trusted Builder", trustedBuilder, spec.Parallel(), spec.Report(report.Terminal{})) +} + +func trustedBuilder(t *testing.T, when spec.G, it spec.S) { + when("IsKnownTrustedBuilder", func() { + it("matches exactly", func() { + h.AssertTrue(t, bldr.IsKnownTrustedBuilder("paketobuildpacks/builder-jammy-base")) + h.AssertFalse(t, bldr.IsKnownTrustedBuilder("paketobuildpacks/builder-jammy-base:latest")) + h.AssertFalse(t, bldr.IsKnownTrustedBuilder("paketobuildpacks/builder-jammy-base:1.2.3")) + h.AssertFalse(t, bldr.IsKnownTrustedBuilder("my/private/builder")) + }) + }) + + when("IsTrustedBuilder", func() { + it("trust image without tag", func() { + cfg := config.Config{ + TrustedBuilders: []config.TrustedBuilder{ + { + Name: "my/trusted/builder-jammy", + }, + }, + } + + trustedBuilders := []string{ + "my/trusted/builder-jammy", + "my/trusted/builder-jammy:latest", + "my/trusted/builder-jammy:1.2.3", + } + + untrustedBuilders := []string{ + "my/private/builder", // random builder + "my/trusted/builder-jammy-base", // shared prefix + } + + for _, builder := range trustedBuilders { + isTrusted, err := bldr.IsTrustedBuilder(cfg, builder) + h.AssertNil(t, err) + h.AssertTrue(t, isTrusted) + } + + for _, builder := range untrustedBuilders { + isTrusted, err := bldr.IsTrustedBuilder(cfg, builder) + h.AssertNil(t, err) + h.AssertFalse(t, isTrusted) + } + }) + it("trust image with tag", func() { + cfg := config.Config{ + TrustedBuilders: []config.TrustedBuilder{ + { + Name: "my/trusted/builder-jammy:1.2.3", + }, + { + Name: "my/trusted/builder-jammy:latest", + }, + }, + } + + trustedBuilders := []string{ + "my/trusted/builder-jammy:1.2.3", + "my/trusted/builder-jammy:latest", + } + + untrustedBuilders := []string{ + "my/private/builder", + "my/trusted/builder-jammy", + "my/trusted/builder-jammy:2.0.0", + "my/trusted/builder-jammy-base", + } + + for _, builder := range trustedBuilders { + isTrusted, err := bldr.IsTrustedBuilder(cfg, builder) + h.AssertNil(t, err) + h.AssertTrue(t, isTrusted) + } + + for _, builder := range untrustedBuilders { + isTrusted, err := bldr.IsTrustedBuilder(cfg, builder) + h.AssertNil(t, err) + h.AssertFalse(t, isTrusted) + } + }) + }) +} diff --git a/internal/builder/version.go b/internal/builder/version.go index f926419bed..7ecbef6809 100644 --- a/internal/builder/version.go +++ b/internal/builder/version.go @@ -31,7 +31,7 @@ func (v *Version) Equal(other *Version) bool { // MarshalText makes Version satisfy the encoding.TextMarshaler interface. func (v *Version) MarshalText() ([]byte, error) { - return []byte(v.Version.Original()), nil + return []byte(v.Original()), nil } // UnmarshalText makes Version satisfy the encoding.TextUnmarshaler interface. diff --git a/internal/builder/writer/human_readable.go b/internal/builder/writer/human_readable.go index 6f649c6a28..dc88df881c 100644 --- a/internal/builder/writer/human_readable.go +++ b/internal/builder/writer/human_readable.go @@ -27,6 +27,7 @@ const ( writerMinWidth = 0 writerTabWidth = 0 buildpacksTabWidth = 8 + extensionsTabWidth = 8 defaultTabWidth = 4 writerPadChar = ' ' writerFlags = 0 @@ -46,8 +47,8 @@ Created By: Trusted: {{.Trusted}} -Stack: - ID: {{ .Info.Stack }} +{{ if ne .Info.Stack "" -}}Stack: + ID: {{ .Info.Stack }}{{ end -}} {{- if .Verbose}} {{- if ne (len .Info.Mixins) 0 }} Mixins: @@ -59,7 +60,13 @@ Stack: {{ .Lifecycle }} {{ .RunImages }} {{ .Buildpacks }} -{{ .Order }}` +{{ .Order }} +{{- if ne .Extensions "" }} +{{ .Extensions }} +{{- end }} +{{- if ne .OrderExtensions "" }} +{{ .OrderExtensions }} +{{- end }}` ) type HumanReadable struct{} @@ -118,7 +125,7 @@ func writeBuilderInfo( var warnings []string - runImagesString, runImagesWarnings, err := runImagesOutput(info.RunImage, localRunImages, info.RunImageMirrors, sharedInfo.Name) + runImagesString, runImagesWarnings, err := runImagesOutput(info.RunImages, localRunImages, sharedInfo.Name) if err != nil { return fmt.Errorf("compiling run images output: %w", err) } @@ -126,28 +133,54 @@ func writeBuilderInfo( if err != nil { return fmt.Errorf("compiling detection order output: %w", err) } + + var orderExtString string + var orderExtWarnings []string + + if info.Extensions != nil { + orderExtString, orderExtWarnings, err = detectionOrderExtOutput(info.OrderExtensions, sharedInfo.Name) + if err != nil { + return fmt.Errorf("compiling detection order extensions output: %w", err) + } + } buildpacksString, buildpacksWarnings, err := buildpacksOutput(info.Buildpacks, sharedInfo.Name) if err != nil { return fmt.Errorf("compiling buildpacks output: %w", err) } lifecycleString, lifecycleWarnings := lifecycleOutput(info.Lifecycle, sharedInfo.Name) + var extensionsString string + var extensionsWarnings []string + + if info.Extensions != nil { + extensionsString, extensionsWarnings, err = extensionsOutput(info.Extensions, sharedInfo.Name) + if err != nil { + return fmt.Errorf("compiling extensions output: %w", err) + } + } + warnings = append(warnings, runImagesWarnings...) warnings = append(warnings, orderWarnings...) warnings = append(warnings, buildpacksWarnings...) warnings = append(warnings, lifecycleWarnings...) - + if info.Extensions != nil { + warnings = append(warnings, extensionsWarnings...) + warnings = append(warnings, orderExtWarnings...) + } outputTemplate, _ := template.New("").Parse(outputTemplate) + err = outputTemplate.Execute( logger.Writer(), &struct { - Info client.BuilderInfo - Verbose bool - Buildpacks string - RunImages string - Order string - Trusted string - Lifecycle string + Info client.BuilderInfo + Verbose bool + Buildpacks string + RunImages string + Order string + Trusted string + Lifecycle string + Extensions string + OrderExtensions string }{ *info, logger.IsVerbose(), @@ -156,6 +189,8 @@ func writeBuilderInfo( orderString, stringFromBool(sharedInfo.Trusted), lifecycleString, + extensionsString, + orderExtString, }, ) @@ -208,9 +243,8 @@ func stringFromBool(subject bool) string { } func runImagesOutput( - runImage string, + runImages []pubbldr.RunImageConfig, localRunImages []config.RunImage, - buildRunImages []string, builderName string, ) (string, []string, error) { output := "Run Images:\n" @@ -218,36 +252,39 @@ func runImagesOutput( tabWriterBuf := bytes.Buffer{} localMirrorTabWriter := tabwriter.NewWriter(&tabWriterBuf, writerMinWidth, writerTabWidth, defaultTabWidth, writerPadChar, writerFlags) - err := writeLocalMirrors(localMirrorTabWriter, runImage, localRunImages) + err := writeLocalMirrors(localMirrorTabWriter, runImages, localRunImages) if err != nil { return "", []string{}, fmt.Errorf("writing local mirrors: %w", err) } var warnings []string - if runImage != "" { - _, err = fmt.Fprintf(localMirrorTabWriter, " %s\n", runImage) - if err != nil { - return "", []string{}, fmt.Errorf("writing to tabwriter: %w", err) - } - } else { + if len(runImages) == 0 { warnings = append( warnings, fmt.Sprintf("%s does not specify a run image", builderName), "Users must build with an explicitly specified run image", ) - } - for _, m := range buildRunImages { - _, err = fmt.Fprintf(localMirrorTabWriter, " %s\n", m) - if err != nil { - return "", []string{}, fmt.Errorf("writing to tab writer: %w", err) + } else { + for _, runImage := range runImages { + if runImage.Image != "" { + _, err = fmt.Fprintf(localMirrorTabWriter, " %s\n", runImage.Image) + if err != nil { + return "", []string{}, fmt.Errorf("writing to tabwriter: %w", err) + } + } + for _, m := range runImage.Mirrors { + _, err = fmt.Fprintf(localMirrorTabWriter, " %s\n", m) + if err != nil { + return "", []string{}, fmt.Errorf("writing to tab writer: %w", err) + } + } + err = localMirrorTabWriter.Flush() + if err != nil { + return "", []string{}, fmt.Errorf("flushing tab writer: %w", err) + } } } - err = localMirrorTabWriter.Flush() - if err != nil { - return "", []string{}, fmt.Errorf("flushing tab writer: %w", err) - } - runImageOutput := tabWriterBuf.String() if runImageOutput == "" { runImageOutput = fmt.Sprintf(" %s\n", none) @@ -258,13 +295,15 @@ func runImagesOutput( return output, warnings, nil } -func writeLocalMirrors(logWriter io.Writer, runImage string, localRunImages []config.RunImage) error { +func writeLocalMirrors(logWriter io.Writer, runImages []pubbldr.RunImageConfig, localRunImages []config.RunImage) error { for _, i := range localRunImages { - if i.Image == runImage { - for _, m := range i.Mirrors { - _, err := fmt.Fprintf(logWriter, " %s\t(user-configured)\n", m) - if err != nil { - return fmt.Errorf("writing local mirror: %s: %w", m, err) + for _, ri := range runImages { + if i.Image == ri.Image { + for _, m := range i.Mirrors { + _, err := fmt.Fprintf(logWriter, " %s\t(user-configured)\n", m) + if err != nil { + return fmt.Errorf("writing local mirror: %s: %w", m, err) + } } } } @@ -273,7 +312,43 @@ func writeLocalMirrors(logWriter io.Writer, runImage string, localRunImages []co return nil } -func buildpacksOutput(buildpacks []dist.BuildpackInfo, builderName string) (string, []string, error) { +func extensionsOutput(extensions []dist.ModuleInfo, builderName string) (string, []string, error) { + output := "Extensions:\n" + + if len(extensions) == 0 { + return fmt.Sprintf("%s %s\n", output, none), nil, nil + } + + var ( + tabWriterBuf = bytes.Buffer{} + spaceStrippingWriter = &trailingSpaceStrippingWriter{ + output: &tabWriterBuf, + } + extensionsTabWriter = tabwriter.NewWriter(spaceStrippingWriter, writerMinWidth, writerPadChar, extensionsTabWidth, writerPadChar, writerFlags) + ) + + _, err := fmt.Fprint(extensionsTabWriter, " ID\tNAME\tVERSION\tHOMEPAGE\n") + if err != nil { + return "", []string{}, fmt.Errorf("writing to tab writer: %w", err) + } + + for _, b := range extensions { + _, err = fmt.Fprintf(extensionsTabWriter, " %s\t%s\t%s\t%s\n", b.ID, strs.ValueOrDefault(b.Name, "-"), b.Version, strs.ValueOrDefault(b.Homepage, "-")) + if err != nil { + return "", []string{}, fmt.Errorf("writing to tab writer: %w", err) + } + } + + err = extensionsTabWriter.Flush() + if err != nil { + return "", []string{}, fmt.Errorf("flushing tab writer: %w", err) + } + + output += tabWriterBuf.String() + return output, []string{}, nil +} + +func buildpacksOutput(buildpacks []dist.ModuleInfo, builderName string) (string, []string, error) { output := "Buildpacks:\n" if len(buildpacks) == 0 { @@ -402,6 +477,32 @@ func detectionOrderOutput(order pubbldr.DetectionOrder, builderName string) (str return output, []string{}, nil } +func detectionOrderExtOutput(order pubbldr.DetectionOrder, builderName string) (string, []string, error) { + output := "Detection Order (Extensions):\n" + + if len(order) == 0 { + return fmt.Sprintf("%s %s\n", output, none), nil, nil + } + + tabWriterBuf := bytes.Buffer{} + spaceStrippingWriter := &trailingSpaceStrippingWriter{ + output: &tabWriterBuf, + } + + detectionOrderExtTabWriter := tabwriter.NewWriter(spaceStrippingWriter, writerMinWidth, writerTabWidth, defaultTabWidth, writerPadChar, writerFlags) + err := writeDetectionOrderGroup(detectionOrderExtTabWriter, order, "") + if err != nil { + return "", []string{}, fmt.Errorf("writing detection order group: %w", err) + } + err = detectionOrderExtTabWriter.Flush() + if err != nil { + return "", []string{}, fmt.Errorf("flushing tab writer: %w", err) + } + + output += tabWriterBuf.String() + return output, []string{}, nil +} + func writeDetectionOrderGroup(writer io.Writer, order pubbldr.DetectionOrder, prefix string) error { groupNumber := 0 diff --git a/internal/builder/writer/human_readable_test.go b/internal/builder/writer/human_readable_test.go index a9d5bb5671..ab4fe5f039 100644 --- a/internal/builder/writer/human_readable_test.go +++ b/internal/builder/writer/human_readable_test.go @@ -46,6 +46,70 @@ Created By: Trusted: No +Stack: + ID: test.stack.id + +Lifecycle: + Version: 6.7.8 + Buildpack APIs: + Deprecated: (none) + Supported: 1.2, 2.3 + Platform APIs: + Deprecated: 0.1, 1.2 + Supported: 4.5 + +Run Images: + first/local (user-configured) + second/local (user-configured) + some/run-image + first/default + second/default + +Buildpacks: + ID NAME VERSION HOMEPAGE + test.top.nested - test.top.nested.version - + test.nested - http://geocities.com/top-bp + test.bp.one - test.bp.one.version http://geocities.com/cool-bp + test.bp.two - test.bp.two.version - + test.bp.three - test.bp.three.version - + +Detection Order: + ├ Group #1: + │ ├ test.top.nested@test.top.nested.version + │ │ └ Group #1: + │ │ ├ test.nested + │ │ │ └ Group #1: + │ │ │ └ test.bp.one@test.bp.one.version (optional) + │ │ ├ test.bp.three@test.bp.three.version (optional) + │ │ └ test.nested.two@test.nested.two.version + │ │ └ Group #2: + │ │ └ test.bp.one@test.bp.one.version (optional)[cyclic] + │ └ test.bp.two@test.bp.two.version (optional) + └ test.bp.three@test.bp.three.version + +Extensions: + ID NAME VERSION HOMEPAGE + test.bp.one - test.bp.one.version http://geocities.com/cool-bp + test.bp.two - test.bp.two.version - + test.bp.three - test.bp.three.version - + +Detection Order (Extensions): + ├ test.top.nested@test.top.nested.version + ├ test.bp.one@test.bp.one.version (optional) + ├ test.bp.two@test.bp.two.version (optional) + └ test.bp.three@test.bp.three.version +` + expectedRemoteOutputWithoutExtensions = ` +REMOTE: + +Description: Some remote description + +Created By: + Name: Pack CLI + Version: 1.2.3 + +Trusted: No + Stack: ID: test.stack.id @@ -99,6 +163,71 @@ Created By: Trusted: No +Stack: + ID: test.stack.id + +Lifecycle: + Version: 4.5.6 + Buildpack APIs: + Deprecated: 4.5, 6.7 + Supported: 8.9, 10.11 + Platform APIs: + Deprecated: (none) + Supported: 7.8 + +Run Images: + first/local (user-configured) + second/local (user-configured) + some/run-image + first/local-default + second/local-default + +Buildpacks: + ID NAME VERSION HOMEPAGE + test.top.nested - test.top.nested.version - + test.nested - http://geocities.com/top-bp + test.bp.one - test.bp.one.version http://geocities.com/cool-bp + test.bp.two - test.bp.two.version - + test.bp.three - test.bp.three.version - + +Detection Order: + ├ Group #1: + │ ├ test.top.nested@test.top.nested.version + │ │ └ Group #1: + │ │ ├ test.nested + │ │ │ └ Group #1: + │ │ │ └ test.bp.one@test.bp.one.version (optional) + │ │ ├ test.bp.three@test.bp.three.version (optional) + │ │ └ test.nested.two@test.nested.two.version + │ │ └ Group #2: + │ │ └ test.bp.one@test.bp.one.version (optional)[cyclic] + │ └ test.bp.two@test.bp.two.version (optional) + └ test.bp.three@test.bp.three.version + +Extensions: + ID NAME VERSION HOMEPAGE + test.bp.one - test.bp.one.version http://geocities.com/cool-bp + test.bp.two - test.bp.two.version - + test.bp.three - test.bp.three.version - + +Detection Order (Extensions): + ├ test.top.nested@test.top.nested.version + ├ test.bp.one@test.bp.one.version (optional) + ├ test.bp.two@test.bp.two.version (optional) + └ test.bp.three@test.bp.three.version +` + + expectedLocalOutputWithoutExtensions = ` +LOCAL: + +Description: Some local description + +Created By: + Name: Pack CLI + Version: 4.5.6 + +Trusted: No + Stack: ID: test.stack.id @@ -140,6 +269,7 @@ Detection Order: │ └ test.bp.two@test.bp.two.version (optional) └ test.bp.three@test.bp.three.version ` + expectedVerboseStack = ` Stack: ID: test.stack.id @@ -164,6 +294,10 @@ Buildpacks: expectedEmptyOrder = ` Detection Order: (none) +` + expectedEmptyOrderExt = ` +Detection Order (Extensions): + (none) ` expectedMissingLocalInfo = ` LOCAL: @@ -181,11 +315,12 @@ REMOTE: Description: "Some remote description", Stack: "test.stack.id", Mixins: []string{"mixin1", "mixin2", "build:mixin3", "build:mixin4"}, - RunImage: "some/run-image", - RunImageMirrors: []string{"first/default", "second/default"}, + RunImages: []pubbldr.RunImageConfig{{Image: "some/run-image", Mirrors: []string{"first/default", "second/default"}}}, Buildpacks: buildpacks, Order: order, - BuildpackLayers: dist.BuildpackLayers{}, + Extensions: extensions, + OrderExtensions: orderExtensions, + BuildpackLayers: dist.ModuleLayers{}, Lifecycle: builder.LifecycleDescriptor{ Info: builder.LifecycleInfo{ Version: &builder.Version{ @@ -213,11 +348,12 @@ REMOTE: Description: "Some local description", Stack: "test.stack.id", Mixins: []string{"mixin1", "mixin2", "build:mixin3", "build:mixin4"}, - RunImage: "some/run-image", - RunImageMirrors: []string{"first/local-default", "second/local-default"}, + RunImages: []pubbldr.RunImageConfig{{Image: "some/run-image", Mirrors: []string{"first/local-default", "second/local-default"}}}, Buildpacks: buildpacks, Order: order, - BuildpackLayers: dist.BuildpackLayers{}, + Extensions: extensions, + OrderExtensions: orderExtensions, + BuildpackLayers: dist.ModuleLayers{}, Lifecycle: builder.LifecycleDescriptor{ Info: builder.LifecycleInfo{ Version: &builder.Version{ @@ -436,10 +572,8 @@ REMOTE: when("no run images are specified", func() { it("displays run images as (none) and warns about unset run image", func() { - localInfo.RunImage = "" - localInfo.RunImageMirrors = []string{} - remoteInfo.RunImage = "" - remoteInfo.RunImageMirrors = []string{} + localInfo.RunImages = []pubbldr.RunImageConfig{} + remoteInfo.RunImages = []pubbldr.RunImageConfig{} emptyLocalRunImages := []config.RunImage{} humanReadableWriter := writer.NewHumanReadable() @@ -456,8 +590,8 @@ REMOTE: when("no buildpacks are specified", func() { it("displays buildpacks as (none) and prints warnings", func() { - localInfo.Buildpacks = []dist.BuildpackInfo{} - remoteInfo.Buildpacks = []dist.BuildpackInfo{} + localInfo.Buildpacks = []dist.ModuleInfo{} + remoteInfo.Buildpacks = []dist.ModuleInfo{} humanReadableWriter := writer.NewHumanReadable() @@ -471,6 +605,23 @@ REMOTE: }) }) + when("no extensions are specified", func() { + it("displays no extensions as (none)", func() { + localInfo.Extensions = []dist.ModuleInfo{} + remoteInfo.Extensions = []dist.ModuleInfo{} + + humanReadableWriter := writer.NewHumanReadable() + + logger := logging.NewLogWithWriters(&outBuf, &outBuf) + err := humanReadableWriter.Print(logger, localRunImages, localInfo, remoteInfo, nil, nil, sharedBuilderInfo) + assert.Nil(err) + + assert.Contains(outBuf.String(), "Inspecting builder: 'test-builder'") + assert.Contains(outBuf.String(), expectedRemoteOutputWithoutExtensions) + assert.Contains(outBuf.String(), expectedLocalOutputWithoutExtensions) + }) + }) + when("multiple top level groups", func() { it("displays order correctly", func() { @@ -493,5 +644,20 @@ REMOTE: assert.Contains(outBuf.String(), "Users must build with explicitly specified buildpacks") }) }) + + when("no detection order for extension is specified", func() { + it("displays detection order for extensions as (none)", func() { + localInfo.OrderExtensions = pubbldr.DetectionOrder{} + remoteInfo.OrderExtensions = pubbldr.DetectionOrder{} + + humanReadableWriter := writer.NewHumanReadable() + + logger := logging.NewLogWithWriters(&outBuf, &outBuf) + err := humanReadableWriter.Print(logger, localRunImages, localInfo, remoteInfo, nil, nil, sharedBuilderInfo) + assert.Nil(err) + + assert.Contains(outBuf.String(), expectedEmptyOrderExt) + }) + }) }) } diff --git a/internal/builder/writer/json_test.go b/internal/builder/writer/json_test.go index e09b31361e..a7d2b597d9 100644 --- a/internal/builder/writer/json_test.go +++ b/internal/builder/writer/json_test.go @@ -93,6 +93,22 @@ func testJSON(t *testing.T, when spec.G, it spec.S) { "version": "test.bp.three.version" } ]` + + expectedExtensions = `"extensions": [ + { + "homepage": "http://geocities.com/cool-bp", + "id": "test.bp.one", + "version": "test.bp.one.version" + }, + { + "id": "test.bp.two", + "version": "test.bp.two.version" + }, + { + "id": "test.bp.three", + "version": "test.bp.three.version" + } + ]` expectedDetectionOrder = `"detection_order": [ { "buildpacks": [ @@ -143,6 +159,27 @@ func testJSON(t *testing.T, when spec.G, it spec.S) { "id": "test.bp.three", "version": "test.bp.three.version" } + ]` + expectedOrderExtensions = `"order_extensions": [ + { + "id": "test.top.nested", + "version": "test.top.nested.version" + }, + { + "homepage": "http://geocities.com/cool-bp", + "id": "test.bp.one", + "version": "test.bp.one.version", + "optional": true + }, + { + "id": "test.bp.two", + "version": "test.bp.two.version", + "optional": true + }, + { + "id": "test.bp.three", + "version": "test.bp.three.version" + } ]` expectedStackWithMixins = `"stack": { "id": "test.stack.id", @@ -192,8 +229,10 @@ func testJSON(t *testing.T, when spec.G, it spec.S) { }, %s, %s, - %s - }`, expectedRemoteRunImages, expectedBuildpacks, expectedDetectionOrder) + %s, + %s, + %s + }`, expectedRemoteRunImages, expectedBuildpacks, expectedDetectionOrder, expectedExtensions, expectedOrderExtensions) expectedLocalInfo = fmt.Sprintf(`"local_info": { "description": "Some local description", @@ -225,8 +264,10 @@ func testJSON(t *testing.T, when spec.G, it spec.S) { }, %s, %s, - %s - }`, expectedLocalRunImages, expectedBuildpacks, expectedDetectionOrder) + %s, + %s, + %s + }`, expectedLocalRunImages, expectedBuildpacks, expectedDetectionOrder, expectedExtensions, expectedOrderExtensions) expectedPrettifiedJSON = fmt.Sprintf(`{ "builder_name": "test-builder", @@ -244,11 +285,12 @@ func testJSON(t *testing.T, when spec.G, it spec.S) { Description: "Some remote description", Stack: "test.stack.id", Mixins: []string{"mixin1", "mixin2", "build:mixin3", "build:mixin4"}, - RunImage: "some/run-image", - RunImageMirrors: []string{"first/default", "second/default"}, + RunImages: []pubbldr.RunImageConfig{{Image: "some/run-image", Mirrors: []string{"first/default", "second/default"}}}, Buildpacks: buildpacks, Order: order, - BuildpackLayers: dist.BuildpackLayers{}, + Extensions: extensions, + OrderExtensions: orderExtensions, + BuildpackLayers: dist.ModuleLayers{}, Lifecycle: builder.LifecycleDescriptor{ Info: builder.LifecycleInfo{ Version: &builder.Version{ @@ -276,11 +318,12 @@ func testJSON(t *testing.T, when spec.G, it spec.S) { Description: "Some local description", Stack: "test.stack.id", Mixins: []string{"mixin1", "mixin2", "build:mixin3", "build:mixin4"}, - RunImage: "some/run-image", - RunImageMirrors: []string{"first/local-default", "second/local-default"}, + RunImages: []pubbldr.RunImageConfig{{Image: "some/run-image", Mirrors: []string{"first/local-default", "second/local-default"}}}, Buildpacks: buildpacks, Order: order, - BuildpackLayers: dist.BuildpackLayers{}, + Extensions: extensions, + OrderExtensions: orderExtensions, + BuildpackLayers: dist.ModuleLayers{}, Lifecycle: builder.LifecycleDescriptor{ Info: builder.LifecycleInfo{ Version: &builder.Version{ @@ -405,10 +448,8 @@ func testJSON(t *testing.T, when spec.G, it spec.S) { when("no run images are specified", func() { it("displays run images as empty list", func() { - localInfo.RunImage = "" - localInfo.RunImageMirrors = []string{} - remoteInfo.RunImage = "" - remoteInfo.RunImageMirrors = []string{} + localInfo.RunImages = []pubbldr.RunImageConfig{} + remoteInfo.RunImages = []pubbldr.RunImageConfig{} emptyLocalRunImages := []config.RunImage{} jsonWriter := writer.NewJSON() @@ -426,8 +467,8 @@ func testJSON(t *testing.T, when spec.G, it spec.S) { when("no buildpacks are specified", func() { it("displays buildpacks as empty list", func() { - localInfo.Buildpacks = []dist.BuildpackInfo{} - remoteInfo.Buildpacks = []dist.BuildpackInfo{} + localInfo.Buildpacks = []dist.ModuleInfo{} + remoteInfo.Buildpacks = []dist.ModuleInfo{} jsonWriter := writer.NewJSON() diff --git a/internal/builder/writer/shared_builder_test.go b/internal/builder/writer/shared_builder_test.go index 4e7629756d..b03f9715c2 100644 --- a/internal/builder/writer/shared_builder_test.go +++ b/internal/builder/writer/shared_builder_test.go @@ -8,33 +8,33 @@ import ( ) var ( - testTopNestedBuildpack = dist.BuildpackInfo{ + testTopNestedBuildpack = dist.ModuleInfo{ ID: "test.top.nested", Version: "test.top.nested.version", } - testNestedBuildpack = dist.BuildpackInfo{ + testNestedBuildpack = dist.ModuleInfo{ ID: "test.nested", Homepage: "http://geocities.com/top-bp", } - testBuildpackOne = dist.BuildpackInfo{ + testBuildpackOne = dist.ModuleInfo{ ID: "test.bp.one", Version: "test.bp.one.version", Homepage: "http://geocities.com/cool-bp", } - testBuildpackTwo = dist.BuildpackInfo{ + testBuildpackTwo = dist.ModuleInfo{ ID: "test.bp.two", Version: "test.bp.two.version", } - testBuildpackThree = dist.BuildpackInfo{ + testBuildpackThree = dist.ModuleInfo{ ID: "test.bp.three", Version: "test.bp.three.version", } - testNestedBuildpackTwo = dist.BuildpackInfo{ + testNestedBuildpackTwo = dist.ModuleInfo{ ID: "test.nested.two", Version: "test.nested.two.version", } - buildpacks = []dist.BuildpackInfo{ + buildpacks = []dist.ModuleInfo{ testTopNestedBuildpack, testNestedBuildpack, testBuildpackOne, @@ -46,34 +46,34 @@ var ( pubbldr.DetectionOrderEntry{ GroupDetectionOrder: pubbldr.DetectionOrder{ pubbldr.DetectionOrderEntry{ - BuildpackRef: dist.BuildpackRef{ - BuildpackInfo: testTopNestedBuildpack, + ModuleRef: dist.ModuleRef{ + ModuleInfo: testTopNestedBuildpack, }, GroupDetectionOrder: pubbldr.DetectionOrder{ pubbldr.DetectionOrderEntry{ - BuildpackRef: dist.BuildpackRef{BuildpackInfo: testNestedBuildpack}, + ModuleRef: dist.ModuleRef{ModuleInfo: testNestedBuildpack}, GroupDetectionOrder: pubbldr.DetectionOrder{ pubbldr.DetectionOrderEntry{ - BuildpackRef: dist.BuildpackRef{ - BuildpackInfo: testBuildpackOne, - Optional: true, + ModuleRef: dist.ModuleRef{ + ModuleInfo: testBuildpackOne, + Optional: true, }, }, }, }, pubbldr.DetectionOrderEntry{ - BuildpackRef: dist.BuildpackRef{ - BuildpackInfo: testBuildpackThree, - Optional: true, + ModuleRef: dist.ModuleRef{ + ModuleInfo: testBuildpackThree, + Optional: true, }, }, pubbldr.DetectionOrderEntry{ - BuildpackRef: dist.BuildpackRef{BuildpackInfo: testNestedBuildpackTwo}, + ModuleRef: dist.ModuleRef{ModuleInfo: testNestedBuildpackTwo}, GroupDetectionOrder: pubbldr.DetectionOrder{ pubbldr.DetectionOrderEntry{ - BuildpackRef: dist.BuildpackRef{ - BuildpackInfo: testBuildpackOne, - Optional: true, + ModuleRef: dist.ModuleRef{ + ModuleInfo: testBuildpackOne, + Optional: true, }, Cyclical: true, }, @@ -82,16 +82,47 @@ var ( }, }, pubbldr.DetectionOrderEntry{ - BuildpackRef: dist.BuildpackRef{ - BuildpackInfo: testBuildpackTwo, - Optional: true, + ModuleRef: dist.ModuleRef{ + ModuleInfo: testBuildpackTwo, + Optional: true, }, }, }, }, pubbldr.DetectionOrderEntry{ - BuildpackRef: dist.BuildpackRef{ - BuildpackInfo: testBuildpackThree, + ModuleRef: dist.ModuleRef{ + ModuleInfo: testBuildpackThree, + }, + }, + } + + extensions = []dist.ModuleInfo{ + testBuildpackOne, + testBuildpackTwo, + testBuildpackThree, + } + + orderExtensions = pubbldr.DetectionOrder{ + pubbldr.DetectionOrderEntry{ + ModuleRef: dist.ModuleRef{ + ModuleInfo: testTopNestedBuildpack, + }, + }, + pubbldr.DetectionOrderEntry{ + ModuleRef: dist.ModuleRef{ + ModuleInfo: testBuildpackOne, + Optional: true, + }, + }, + pubbldr.DetectionOrderEntry{ + ModuleRef: dist.ModuleRef{ + ModuleInfo: testBuildpackTwo, + Optional: true, + }, + }, + pubbldr.DetectionOrderEntry{ + ModuleRef: dist.ModuleRef{ + ModuleInfo: testBuildpackThree, }, }, } diff --git a/internal/builder/writer/structured_format.go b/internal/builder/writer/structured_format.go index 173d006d0b..66983d0807 100644 --- a/internal/builder/writer/structured_format.go +++ b/internal/builder/writer/structured_format.go @@ -38,11 +38,13 @@ type Stack struct { type BuilderInfo struct { Description string `json:"description,omitempty" yaml:"description,omitempty" toml:"description,omitempty"` CreatedBy builder.CreatorMetadata `json:"created_by" yaml:"created_by" toml:"created_by"` - Stack Stack `json:"stack" yaml:"stack" toml:"stack"` + Stack *Stack `json:"stack,omitempty" yaml:"stack,omitempty" toml:"stack,omitempty"` Lifecycle Lifecycle `json:"lifecycle" yaml:"lifecycle" toml:"lifecycle"` RunImages []RunImage `json:"run_images" yaml:"run_images" toml:"run_images"` - Buildpacks []dist.BuildpackInfo `json:"buildpacks" yaml:"buildpacks" toml:"buildpacks"` + Buildpacks []dist.ModuleInfo `json:"buildpacks" yaml:"buildpacks" toml:"buildpacks"` pubbldr.DetectionOrder `json:"detection_order" yaml:"detection_order" toml:"detection_order"` + Extensions []dist.ModuleInfo `json:"extensions,omitempty" yaml:"extensions,omitempty" toml:"extensions,omitempty"` + OrderExtensions pubbldr.DetectionOrder `json:"order_extensions,omitempty" yaml:"order_extensions,omitempty" toml:"order_extensions,omitempty"` } type StructuredFormat struct { @@ -67,7 +69,10 @@ func (w *StructuredFormat) Print( outputInfo := InspectOutput{SharedBuilderInfo: builderInfo} if local != nil { - stack := Stack{ID: local.Stack} + var stack *Stack + if local.Stack != "" { + stack = &Stack{ID: local.Stack} + } if logger.IsVerbose() { stack.Mixins = local.Mixins @@ -82,14 +87,19 @@ func (w *StructuredFormat) Print( BuildpackAPIs: local.Lifecycle.APIs.Buildpack, PlatformAPIs: local.Lifecycle.APIs.Platform, }, - RunImages: runImages(local.RunImage, localRunImages, local.RunImageMirrors), - Buildpacks: local.Buildpacks, - DetectionOrder: local.Order, + RunImages: runImages(local.RunImages, localRunImages), + Buildpacks: local.Buildpacks, + DetectionOrder: local.Order, + Extensions: local.Extensions, + OrderExtensions: local.OrderExtensions, } } if remote != nil { - stack := Stack{ID: remote.Stack} + var stack *Stack + if remote.Stack != "" { + stack = &Stack{ID: remote.Stack} + } if logger.IsVerbose() { stack.Mixins = remote.Mixins @@ -104,9 +114,11 @@ func (w *StructuredFormat) Print( BuildpackAPIs: remote.Lifecycle.APIs.Buildpack, PlatformAPIs: remote.Lifecycle.APIs.Platform, }, - RunImages: runImages(remote.RunImage, localRunImages, remote.RunImageMirrors), - Buildpacks: remote.Buildpacks, - DetectionOrder: remote.Order, + RunImages: runImages(remote.RunImages, localRunImages), + Buildpacks: remote.Buildpacks, + DetectionOrder: remote.Order, + Extensions: remote.Extensions, + OrderExtensions: remote.OrderExtensions, } } @@ -127,23 +139,24 @@ func (w *StructuredFormat) Print( return nil } -func runImages(runImage string, localRunImages []config.RunImage, buildRunImages []string) []RunImage { - var images = []RunImage{} +func runImages(runImages []pubbldr.RunImageConfig, localRunImages []config.RunImage) []RunImage { + images := []RunImage{} for _, i := range localRunImages { - if i.Image == runImage { - for _, m := range i.Mirrors { - images = append(images, RunImage{Name: m, UserConfigured: true}) + for _, runImage := range runImages { + if i.Image == runImage.Image { + for _, m := range i.Mirrors { + images = append(images, RunImage{Name: m, UserConfigured: true}) + } } } } - if runImage != "" { - images = append(images, RunImage{Name: runImage}) - } - - for _, m := range buildRunImages { - images = append(images, RunImage{Name: m}) + for _, runImage := range runImages { + images = append(images, RunImage{Name: runImage.Image}) + for _, m := range runImage.Mirrors { + images = append(images, RunImage{Name: m}) + } } return images diff --git a/internal/builder/writer/toml_test.go b/internal/builder/writer/toml_test.go index 89561432cf..92d6bdb933 100644 --- a/internal/builder/writer/toml_test.go +++ b/internal/builder/writer/toml_test.go @@ -271,11 +271,10 @@ default = false Description: "Some remote description", Stack: "test.stack.id", Mixins: []string{"mixin1", "mixin2", "build:mixin3", "build:mixin4"}, - RunImage: "some/run-image", - RunImageMirrors: []string{"first/default", "second/default"}, + RunImages: []pubbldr.RunImageConfig{{Image: "some/run-image", Mirrors: []string{"first/default", "second/default"}}}, Buildpacks: buildpacks, Order: order, - BuildpackLayers: dist.BuildpackLayers{}, + BuildpackLayers: dist.ModuleLayers{}, Lifecycle: builder.LifecycleDescriptor{ Info: builder.LifecycleInfo{ Version: &builder.Version{ @@ -303,11 +302,10 @@ default = false Description: "Some local description", Stack: "test.stack.id", Mixins: []string{"mixin1", "mixin2", "build:mixin3", "build:mixin4"}, - RunImage: "some/run-image", - RunImageMirrors: []string{"first/local-default", "second/local-default"}, + RunImages: []pubbldr.RunImageConfig{{Image: "some/run-image", Mirrors: []string{"first/local-default", "second/local-default"}}}, Buildpacks: buildpacks, Order: order, - BuildpackLayers: dist.BuildpackLayers{}, + BuildpackLayers: dist.ModuleLayers{}, Lifecycle: builder.LifecycleDescriptor{ Info: builder.LifecycleInfo{ Version: &builder.Version{ @@ -430,10 +428,8 @@ default = false when("no run images are specified", func() { it("omits run images from output", func() { - localInfo.RunImage = "" - localInfo.RunImageMirrors = []string{} - remoteInfo.RunImage = "" - remoteInfo.RunImageMirrors = []string{} + localInfo.RunImages = []pubbldr.RunImageConfig{} + remoteInfo.RunImages = []pubbldr.RunImageConfig{} emptyLocalRunImages := []config.RunImage{} tomlWriter := writer.NewTOML() @@ -450,8 +446,8 @@ default = false when("no buildpacks are specified", func() { it("omits buildpacks from output", func() { - localInfo.Buildpacks = []dist.BuildpackInfo{} - remoteInfo.Buildpacks = []dist.BuildpackInfo{} + localInfo.Buildpacks = []dist.ModuleInfo{} + remoteInfo.Buildpacks = []dist.ModuleInfo{} tomlWriter := writer.NewTOML() diff --git a/internal/builder/writer/yaml_test.go b/internal/builder/writer/yaml_test.go index 629fbea8c2..3ab2c91563 100644 --- a/internal/builder/writer/yaml_test.go +++ b/internal/builder/writer/yaml_test.go @@ -6,13 +6,12 @@ import ( "fmt" "testing" - "github.com/ghodss/yaml" - "github.com/Masterminds/semver" "github.com/buildpacks/lifecycle/api" "github.com/heroku/color" "github.com/sclevine/spec" "github.com/sclevine/spec/report" + yaml "gopkg.in/yaml.v3" pubbldr "github.com/buildpacks/pack/builder" "github.com/buildpacks/pack/internal/builder" @@ -63,6 +62,15 @@ func testYAML(t *testing.T, when spec.G, it spec.S) { - id: test.bp.three version: test.bp.three.version` + expectedExtensions = ` extensions: + - id: test.bp.one + version: test.bp.one.version + homepage: http://geocities.com/cool-bp + - id: test.bp.two + version: test.bp.two.version + - id: test.bp.three + version: test.bp.three.version` + expectedDetectionOrder = ` detection_order: - buildpacks: - id: test.top.nested @@ -91,6 +99,19 @@ func testYAML(t *testing.T, when spec.G, it spec.S) { optional: true - id: test.bp.three version: test.bp.three.version` + + expectedOrderExtensions = ` order_extensions: + - id: test.top.nested + version: test.top.nested.version + - id: test.bp.one + version: test.bp.one.version + homepage: http://geocities.com/cool-bp + optional: true + - id: test.bp.two + version: test.bp.two.version + optional: true + - id: test.bp.three + version: test.bp.three.version` expectedStackWithMixins = ` stack: id: test.stack.id mixins: @@ -129,7 +150,9 @@ func testYAML(t *testing.T, when spec.G, it spec.S) { - "4.5" %s %s -%s`, expectedRemoteRunImages, expectedBuildpacks, expectedDetectionOrder) +%s +%s +%s`, expectedRemoteRunImages, expectedBuildpacks, expectedDetectionOrder, expectedExtensions, expectedOrderExtensions) expectedLocalInfo = fmt.Sprintf(`local_info: description: Some local description @@ -153,7 +176,9 @@ func testYAML(t *testing.T, when spec.G, it spec.S) { - "7.8" %s %s -%s`, expectedLocalRunImages, expectedBuildpacks, expectedDetectionOrder) +%s +%s +%s`, expectedLocalRunImages, expectedBuildpacks, expectedDetectionOrder, expectedExtensions, expectedOrderExtensions) expectedPrettifiedYAML = fmt.Sprintf(` builder_name: test-builder trusted: false @@ -168,11 +193,12 @@ func testYAML(t *testing.T, when spec.G, it spec.S) { Description: "Some remote description", Stack: "test.stack.id", Mixins: []string{"mixin1", "mixin2", "build:mixin3", "build:mixin4"}, - RunImage: "some/run-image", - RunImageMirrors: []string{"first/default", "second/default"}, + RunImages: []pubbldr.RunImageConfig{{Image: "some/run-image", Mirrors: []string{"first/default", "second/default"}}}, Buildpacks: buildpacks, Order: order, - BuildpackLayers: dist.BuildpackLayers{}, + Extensions: extensions, + OrderExtensions: orderExtensions, + BuildpackLayers: dist.ModuleLayers{}, Lifecycle: builder.LifecycleDescriptor{ Info: builder.LifecycleInfo{ Version: &builder.Version{ @@ -200,11 +226,12 @@ func testYAML(t *testing.T, when spec.G, it spec.S) { Description: "Some local description", Stack: "test.stack.id", Mixins: []string{"mixin1", "mixin2", "build:mixin3", "build:mixin4"}, - RunImage: "some/run-image", - RunImageMirrors: []string{"first/local-default", "second/local-default"}, + RunImages: []pubbldr.RunImageConfig{{Image: "some/run-image", Mirrors: []string{"first/local-default", "second/local-default"}}}, Buildpacks: buildpacks, Order: order, - BuildpackLayers: dist.BuildpackLayers{}, + Extensions: extensions, + OrderExtensions: orderExtensions, + BuildpackLayers: dist.ModuleLayers{}, Lifecycle: builder.LifecycleDescriptor{ Info: builder.LifecycleInfo{ Version: &builder.Version{ @@ -329,10 +356,8 @@ func testYAML(t *testing.T, when spec.G, it spec.S) { when("no run images are specified", func() { it("displays run images as empty list", func() { - localInfo.RunImage = "" - localInfo.RunImageMirrors = []string{} - remoteInfo.RunImage = "" - remoteInfo.RunImageMirrors = []string{} + localInfo.RunImages = []pubbldr.RunImageConfig{} + remoteInfo.RunImages = []pubbldr.RunImageConfig{} emptyLocalRunImages := []config.RunImage{} yamlWriter := writer.NewYAML() @@ -350,8 +375,8 @@ func testYAML(t *testing.T, when spec.G, it spec.S) { when("no buildpacks are specified", func() { it("displays buildpacks as empty list", func() { - localInfo.Buildpacks = []dist.BuildpackInfo{} - remoteInfo.Buildpacks = []dist.BuildpackInfo{} + localInfo.Buildpacks = []dist.ModuleInfo{} + remoteInfo.Buildpacks = []dist.ModuleInfo{} yamlWriter := writer.NewYAML() diff --git a/internal/cache/image_cache.go b/internal/cache/image_cache.go deleted file mode 100644 index 158c00a027..0000000000 --- a/internal/cache/image_cache.go +++ /dev/null @@ -1,39 +0,0 @@ -package cache - -import ( - "context" - - "github.com/docker/docker/api/types" - "github.com/docker/docker/client" - "github.com/google/go-containerregistry/pkg/name" -) - -type ImageCache struct { - docker client.CommonAPIClient - image string -} - -func NewImageCache(imageRef name.Reference, dockerClient client.CommonAPIClient) *ImageCache { - return &ImageCache{ - image: imageRef.Name(), - docker: dockerClient, - } -} - -func (c *ImageCache) Name() string { - return c.image -} - -func (c *ImageCache) Clear(ctx context.Context) error { - _, err := c.docker.ImageRemove(ctx, c.Name(), types.ImageRemoveOptions{ - Force: true, - }) - if err != nil && !client.IsErrNotFound(err) { - return err - } - return nil -} - -func (c *ImageCache) Type() Type { - return Image -} diff --git a/internal/cache/volume_cache.go b/internal/cache/volume_cache.go deleted file mode 100644 index fa0124e50a..0000000000 --- a/internal/cache/volume_cache.go +++ /dev/null @@ -1,52 +0,0 @@ -package cache - -import ( - "context" - "crypto/sha256" - "fmt" - "strings" - - "github.com/docker/docker/client" - "github.com/google/go-containerregistry/pkg/name" - - "github.com/buildpacks/pack/internal/paths" -) - -type VolumeCache struct { - docker client.CommonAPIClient - volume string -} - -func NewVolumeCache(imageRef name.Reference, suffix string, dockerClient client.CommonAPIClient) *VolumeCache { - sum := sha256.Sum256([]byte(imageRef.Name())) - - vol := paths.FilterReservedNames(fmt.Sprintf("%s-%x", sanitizedRef(imageRef), sum[:6])) - return &VolumeCache{ - volume: fmt.Sprintf("pack-cache-%s.%s", vol, suffix), - docker: dockerClient, - } -} - -func (c *VolumeCache) Name() string { - return c.volume -} - -func (c *VolumeCache) Clear(ctx context.Context) error { - err := c.docker.VolumeRemove(ctx, c.Name(), true) - if err != nil && !client.IsErrNotFound(err) { - return err - } - return nil -} - -func (c *VolumeCache) Type() Type { - return Volume -} - -// note image names and volume names are validated using the same restrictions: -// see https://github.com/moby/moby/blob/f266f13965d5bfb1825afa181fe6c32f3a597fa3/daemon/names/names.go#L5 -func sanitizedRef(ref name.Reference) string { - result := strings.TrimPrefix(ref.Context().String(), ref.Context().RegistryStr()+"/") - result = strings.ReplaceAll(result, "/", "_") - return fmt.Sprintf("%s_%s", result, ref.Identifier()) -} diff --git a/internal/cache/volume_cache_test.go b/internal/cache/volume_cache_test.go deleted file mode 100644 index 9a36489ce9..0000000000 --- a/internal/cache/volume_cache_test.go +++ /dev/null @@ -1,182 +0,0 @@ -package cache_test - -import ( - "context" - "math/rand" - "strings" - "testing" - "time" - - "github.com/docker/docker/api/types/filters" - "github.com/docker/docker/api/types/volume" - "github.com/docker/docker/client" - "github.com/docker/docker/daemon/names" - "github.com/google/go-containerregistry/pkg/name" - "github.com/heroku/color" - "github.com/sclevine/spec" - "github.com/sclevine/spec/report" - - "github.com/buildpacks/pack/internal/cache" - h "github.com/buildpacks/pack/testhelpers" -) - -func TestVolumeCache(t *testing.T) { - h.RequireDocker(t) - color.Disable(true) - defer color.Disable(false) - rand.Seed(time.Now().UTC().UnixNano()) - - spec.Run(t, "VolumeCache", testCache, spec.Parallel(), spec.Report(report.Terminal{})) -} - -func testCache(t *testing.T, when spec.G, it spec.S) { - var dockerClient client.CommonAPIClient - - it.Before(func() { - var err error - dockerClient, err = client.NewClientWithOpts(client.FromEnv, client.WithVersion("1.38")) - h.AssertNil(t, err) - }) - when("#NewVolumeCache", func() { - it("adds suffix to calculated name", func() { - ref, err := name.ParseReference("my/repo", name.WeakValidation) - h.AssertNil(t, err) - subject := cache.NewVolumeCache(ref, "some-suffix", dockerClient) - if !strings.HasSuffix(subject.Name(), ".some-suffix") { - t.Fatalf("Calculated volume name '%s' should end with '.some-suffix'", subject.Name()) - } - }) - - it("reusing the same cache for the same repo name", func() { - ref, err := name.ParseReference("my/repo", name.WeakValidation) - h.AssertNil(t, err) - - subject := cache.NewVolumeCache(ref, "some-suffix", dockerClient) - expected := cache.NewVolumeCache(ref, "some-suffix", dockerClient) - if subject.Name() != expected.Name() { - t.Fatalf("The same repo name should result in the same volume") - } - }) - - it("supplies different volumes for different tags", func() { - ref, err := name.ParseReference("my/repo:other-tag", name.WeakValidation) - h.AssertNil(t, err) - - subject := cache.NewVolumeCache(ref, "some-suffix", dockerClient) - - ref, err = name.ParseReference("my/repo", name.WeakValidation) - h.AssertNil(t, err) - notExpected := cache.NewVolumeCache(ref, "some-suffix", dockerClient) - if subject.Name() == notExpected.Name() { - t.Fatalf("Different image tags should result in different volumes") - } - }) - - it("supplies different volumes for different registries", func() { - ref, err := name.ParseReference("registry.com/my/repo:other-tag", name.WeakValidation) - h.AssertNil(t, err) - - subject := cache.NewVolumeCache(ref, "some-suffix", dockerClient) - - ref, err = name.ParseReference("my/repo", name.WeakValidation) - h.AssertNil(t, err) - notExpected := cache.NewVolumeCache(ref, "some-suffix", dockerClient) - if subject.Name() == notExpected.Name() { - t.Fatalf("Different image registries should result in different volumes") - } - }) - - it("resolves implied tag", func() { - ref, err := name.ParseReference("my/repo:latest", name.WeakValidation) - h.AssertNil(t, err) - - subject := cache.NewVolumeCache(ref, "some-suffix", dockerClient) - - ref, err = name.ParseReference("my/repo", name.WeakValidation) - h.AssertNil(t, err) - expected := cache.NewVolumeCache(ref, "some-suffix", dockerClient) - h.AssertEq(t, subject.Name(), expected.Name()) - }) - - it("resolves implied registry", func() { - ref, err := name.ParseReference("index.docker.io/my/repo", name.WeakValidation) - h.AssertNil(t, err) - - subject := cache.NewVolumeCache(ref, "some-suffix", dockerClient) - - ref, err = name.ParseReference("my/repo", name.WeakValidation) - h.AssertNil(t, err) - expected := cache.NewVolumeCache(ref, "some-suffix", dockerClient) - h.AssertEq(t, subject.Name(), expected.Name()) - }) - - it("includes human readable information", func() { - ref, err := name.ParseReference("myregistryhost:5000/fedora/httpd:version1.0", name.WeakValidation) - h.AssertNil(t, err) - - subject := cache.NewVolumeCache(ref, "some-suffix", dockerClient) - - h.AssertContains(t, subject.Name(), "fedora_httpd_version1.0") - h.AssertTrue(t, names.RestrictedNamePattern.MatchString(subject.Name())) - }) - }) - - when("#Clear", func() { - var ( - volumeName string - dockerClient client.CommonAPIClient - subject *cache.VolumeCache - ctx context.Context - ) - - it.Before(func() { - var err error - dockerClient, err = client.NewClientWithOpts(client.FromEnv, client.WithVersion("1.38")) - h.AssertNil(t, err) - ctx = context.TODO() - - ref, err := name.ParseReference(h.RandString(10), name.WeakValidation) - h.AssertNil(t, err) - - subject = cache.NewVolumeCache(ref, "some-suffix", dockerClient) - volumeName = subject.Name() - }) - - when("there is a cache volume", func() { - it.Before(func() { - dockerClient.VolumeCreate(context.TODO(), volume.VolumeCreateBody{ - Name: volumeName, - }) - }) - - it("removes the volume", func() { - err := subject.Clear(ctx) - h.AssertNil(t, err) - - volumes, err := dockerClient.VolumeList(context.TODO(), filters.NewArgs(filters.KeyValuePair{ - Key: "name", - Value: volumeName, - })) - h.AssertNil(t, err) - h.AssertEq(t, len(volumes.Volumes), 0) - }) - }) - - when("there is no cache volume", func() { - it("does not fail", func() { - err := subject.Clear(ctx) - h.AssertNil(t, err) - }) - }) - }) - - when("#Type", func() { - it("returns the cache type", func() { - ref, err := name.ParseReference("my/repo", name.WeakValidation) - h.AssertNil(t, err) - subject := cache.NewVolumeCache(ref, "some-suffix", dockerClient) - expected := cache.Volume - h.AssertEq(t, subject.Type(), expected) - }) - }) -} diff --git a/internal/commands/add_registry_test.go b/internal/commands/add_registry_test.go index 2583834fcc..6b0ed16948 100644 --- a/internal/commands/add_registry_test.go +++ b/internal/commands/add_registry_test.go @@ -2,7 +2,7 @@ package commands_test import ( "bytes" - "io/ioutil" + "io" "os" "path/filepath" "testing" @@ -39,7 +39,7 @@ func testAddRegistryCommand(t *testing.T, when spec.G, it spec.S) { it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "pack-home-*") + tmpDir, err = os.MkdirTemp("", "pack-home-*") assert.Nil(err) configFile = filepath.Join(tmpDir, "config.toml") @@ -81,7 +81,7 @@ func testAddRegistryCommand(t *testing.T, when spec.G, it spec.S) { when("validation", func() { it("fails with missing args", func() { - command.SetOut(ioutil.Discard) + command.SetOut(io.Discard) command.SetArgs([]string{}) err := command.Execute() assert.ErrorContains(err, "accepts 2 arg") diff --git a/internal/commands/build.go b/internal/commands/build.go index ee80ee55c9..d3db7a69b9 100644 --- a/internal/commands/build.go +++ b/internal/commands/build.go @@ -1,17 +1,23 @@ package commands import ( - "io/ioutil" + "fmt" "os" "path/filepath" + "regexp" + "strconv" "strings" + "time" "github.com/google/go-containerregistry/pkg/name" "github.com/pkg/errors" "github.com/spf13/cobra" + bldr "github.com/buildpacks/pack/internal/builder" + "github.com/buildpacks/pack/internal/config" "github.com/buildpacks/pack/internal/style" + "github.com/buildpacks/pack/pkg/cache" "github.com/buildpacks/pack/pkg/client" "github.com/buildpacks/pack/pkg/image" "github.com/buildpacks/pack/pkg/logging" @@ -20,30 +26,44 @@ import ( ) type BuildFlags struct { - Publish bool - ClearCache bool - TrustBuilder bool - Interactive bool - DockerHost string - CacheImage string - AppPath string - Builder string - Registry string - RunImage string - Policy string - Network string - DescriptorPath string - DefaultProcessType string - LifecycleImage string - Env []string - EnvFiles []string - Buildpacks []string - Volumes []string - AdditionalTags []string - Workspace string - GID int - PreviousImage string - SBOMDestinationDir string + Publish bool + ClearCache bool + DisableSystemBuilpacks bool + TrustBuilder bool + TrustExtraBuildpacks bool + Interactive bool + Sparse bool + EnableUsernsHost bool + DockerHost string + CacheImage string + Cache cache.CacheOpts + AppPath string + Builder string + ExecutionEnv string + Registry string + RunImage string + Platform string + Policy string + Network string + DescriptorPath string + DefaultProcessType string + LifecycleImage string + Env []string + EnvFiles []string + Buildpacks []string + Extensions []string + Volumes []string + AdditionalTags []string + Workspace string + GID int + UID int + PreviousImage string + SBOMDestinationDir string + ReportDestinationDir string + DateTime string + PreBuildpacks []string + PostBuildpacks []string + InsecureRegistries []string } // Build an image from source code @@ -61,13 +81,14 @@ func Build(logger logging.Logger, cfg config.Config, packClient PackClient) *cob "be provided directly to build using `--builder`, or can be set using the `set-default-builder` command. For more " + "on how to use `pack build`, see: https://buildpacks.io/docs/app-developer-guide/build-an-app/.", RunE: logError(logger, func(cmd *cobra.Command, args []string) error { - if err := validateBuildFlags(&flags, cfg, packClient, logger); err != nil { + inputImageName := client.ParseInputImageReference(args[0]) + if err := validateBuildFlags(&flags, cfg, inputImageName, logger); err != nil { return err } - imageName := args[0] + inputPreviousImage := client.ParseInputImageReference(flags.PreviousImage) - descriptor, actualDescriptorPath, err := parseProjectToml(flags.AppPath, flags.DescriptorPath) + descriptor, actualDescriptorPath, err := parseProjectToml(flags.AppPath, flags.DescriptorPath, logger) if err != nil { return err } @@ -89,15 +110,23 @@ func Build(logger logging.Logger, cfg config.Config, packClient PackClient) *cob } buildpacks := flags.Buildpacks + extensions := flags.Extensions env, err := parseEnv(flags.EnvFiles, flags.Env) if err != nil { return err } - trustBuilder := isTrustedBuilder(cfg, builder) || flags.TrustBuilder + isTrusted, err := bldr.IsTrustedBuilder(cfg, builder) + if err != nil { + return err + } + trustBuilder := isTrusted || bldr.IsKnownTrustedBuilder(builder) || flags.TrustBuilder if trustBuilder { logger.Debugf("Builder %s is trusted", style.Symbol(builder)) + if flags.LifecycleImage != "" { + logger.Warn("Ignoring the provided lifecycle image as the builder is trusted, running the creator in a single container using the provided builder") + } } else { logger.Debugf("Builder %s is untrusted", style.Symbol(builder)) logger.Debug("As a result, the phases of the lifecycle which require root access will be run in separate trusted ephemeral containers.") @@ -116,6 +145,7 @@ func Build(logger logging.Logger, cfg config.Config, packClient PackClient) *cob if err != nil { return errors.Wrapf(err, "parsing pull policy %s", flags.Policy) } + var lifecycleImage string if flags.LifecycleImage != "" { ref, err := name.ParseReference(flags.LifecycleImage) @@ -124,10 +154,26 @@ func Build(logger logging.Logger, cfg config.Config, packClient PackClient) *cob } lifecycleImage = ref.Name() } + + err = isForbiddenTag(cfg, inputImageName.Name(), lifecycleImage, builder) + if err != nil { + return errors.Wrapf(err, "forbidden image name") + } + var gid = -1 if cmd.Flags().Changed("gid") { gid = flags.GID } + + var uid = -1 + if cmd.Flags().Changed("uid") { + uid = flags.UID + } + + dateTime, err := parseTime(flags.DateTime) + if err != nil { + return errors.Wrapf(err, "parsing creation time %s", flags.DateTime) + } if err := packClient.Build(cmd.Context(), client.BuildOptions{ AppPath: flags.AppPath, Builder: builder, @@ -136,15 +182,18 @@ func Build(logger logging.Logger, cfg config.Config, packClient PackClient) *cob AdditionalTags: flags.AdditionalTags, RunImage: flags.RunImage, Env: env, - Image: imageName, + Image: inputImageName.Name(), Publish: flags.Publish, DockerHost: flags.DockerHost, + Platform: flags.Platform, PullPolicy: pullPolicy, ClearCache: flags.ClearCache, TrustBuilder: func(string) bool { return trustBuilder }, - Buildpacks: buildpacks, + TrustExtraBuildpacks: flags.TrustExtraBuildpacks, + Buildpacks: buildpacks, + Extensions: extensions, ContainerConfig: client.ContainerConfig{ Network: flags.Network, Volumes: flags.Volumes, @@ -152,17 +201,33 @@ func Build(logger logging.Logger, cfg config.Config, packClient PackClient) *cob DefaultProcessType: flags.DefaultProcessType, ProjectDescriptorBaseDir: filepath.Dir(actualDescriptorPath), ProjectDescriptor: descriptor, + Cache: flags.Cache, CacheImage: flags.CacheImage, Workspace: flags.Workspace, LifecycleImage: lifecycleImage, GroupID: gid, - PreviousImage: flags.PreviousImage, + UserID: uid, + PreviousImage: inputPreviousImage.Name(), Interactive: flags.Interactive, SBOMDestinationDir: flags.SBOMDestinationDir, + ReportDestinationDir: flags.ReportDestinationDir, + CreationTime: dateTime, + PreBuildpacks: flags.PreBuildpacks, + PostBuildpacks: flags.PostBuildpacks, + DisableSystemBuildpacks: flags.DisableSystemBuilpacks, + EnableUsernsHost: flags.EnableUsernsHost, + LayoutConfig: &client.LayoutConfig{ + Sparse: flags.Sparse, + InputImage: inputImageName, + PreviousInputImage: inputPreviousImage, + LayoutRepoDir: cfg.LayoutRepositoryDir, + }, + CNBExecutionEnv: flags.ExecutionEnv, + InsecureRegistries: flags.InsecureRegistries, }); err != nil { return errors.Wrap(err, "failed to build") } - logger.Infof("Successfully built image %s", style.Symbol(imageName)) + logger.Infof("Successfully built image %s", style.Symbol(inputImageName.Name())) return nil }), } @@ -171,18 +236,48 @@ func Build(logger logging.Logger, cfg config.Config, packClient PackClient) *cob return cmd } +func parseTime(providedTime string) (*time.Time, error) { + var parsedTime time.Time + switch providedTime { + case "": + return nil, nil + case "now": + parsedTime = time.Now().UTC() + default: + intTime, err := strconv.ParseInt(providedTime, 10, 64) + if err != nil { + return nil, errors.Wrap(err, "parsing unix timestamp") + } + parsedTime = time.Unix(intTime, 0).UTC() + } + return &parsedTime, nil +} + func buildCommandFlags(cmd *cobra.Command, buildFlags *BuildFlags, cfg config.Config) { cmd.Flags().StringVarP(&buildFlags.AppPath, "path", "p", "", "Path to app dir or zip-formatted file (defaults to current working directory)") cmd.Flags().StringSliceVarP(&buildFlags.Buildpacks, "buildpack", "b", nil, "Buildpack to use. One of:\n a buildpack by id and version in the form of '@',\n path to a buildpack directory (not supported on Windows),\n path/URL to a buildpack .tar or .tgz file, or\n a packaged buildpack image name in the form of '/[:]'"+stringSliceHelp("buildpack")) + cmd.Flags().StringSliceVarP(&buildFlags.Extensions, "extension", "", nil, "Extension to use. One of:\n an extension by id and version in the form of '@',\n path to an extension directory (not supported on Windows),\n path/URL to an extension .tar or .tgz file, or\n a packaged extension image name in the form of '/[:]'"+stringSliceHelp("extension")) + cmd.Flags().StringArrayVar(&buildFlags.InsecureRegistries, "insecure-registry", []string{}, "List of insecure registries (only available for API >= 0.13)") cmd.Flags().StringVarP(&buildFlags.Builder, "builder", "B", cfg.DefaultBuilder, "Builder image") + cmd.Flags().Var(&buildFlags.Cache, "cache", + `Cache options used to define cache techniques for build process. +- Cache as bind: 'type=;format=bind;source=' +- Cache as image (requires --publish): 'type=;format=image;name=' +- Cache as volume: 'type=;format=volume;[name=]' + - If no name is provided, a random name will be generated. +`) cmd.Flags().StringVar(&buildFlags.CacheImage, "cache-image", "", `Cache build layers in remote registry. Requires --publish`) cmd.Flags().BoolVar(&buildFlags.ClearCache, "clear-cache", false, "Clear image's associated cache before building") + cmd.Flags().StringVar(&buildFlags.DateTime, "creation-time", "", "Desired create time in the output image config. Accepted values are Unix timestamps (e.g., '1641013200'), or 'now'. Platform API version must be at least 0.9 to use this feature.") cmd.Flags().StringVarP(&buildFlags.DescriptorPath, "descriptor", "d", "", "Path to the project descriptor file") cmd.Flags().StringVarP(&buildFlags.DefaultProcessType, "default-process", "D", "", `Set the default process type. (default "web")`) + cmd.Flags().BoolVar(&buildFlags.DisableSystemBuilpacks, "disable-system-buildpacks", false, "Disable System Buildpacks") cmd.Flags().StringArrayVarP(&buildFlags.Env, "env", "e", []string{}, "Build-time environment variable, in the form 'VAR=VALUE' or 'VAR'.\nWhen using latter value-less form, value will be taken from current\n environment at the time this command is executed.\nThis flag may be specified multiple times and will override\n individual values defined by --env-file."+stringArrayHelp("env")+"\nNOTE: These are NOT available at image runtime.") cmd.Flags().StringArrayVar(&buildFlags.EnvFiles, "env-file", []string{}, "Build-time environment variables file\nOne variable per line, of the form 'VAR=VALUE' or 'VAR'\nWhen using latter value-less form, value will be taken from current\n environment at the time this command is executed\nNOTE: These are NOT available at image runtime.\"") cmd.Flags().StringVar(&buildFlags.Network, "network", "", "Connect detect and build containers to network") - cmd.Flags().BoolVar(&buildFlags.Publish, "publish", false, "Publish to registry") + cmd.Flags().StringArrayVar(&buildFlags.PreBuildpacks, "pre-buildpack", []string{}, "Buildpacks to prepend to the groups in the builder's order") + cmd.Flags().StringArrayVar(&buildFlags.PostBuildpacks, "post-buildpack", []string{}, "Buildpacks to append to the groups in the builder's order") + cmd.Flags().BoolVar(&buildFlags.Publish, "publish", false, "Publish the application image directly to the container registry specified in , instead of the daemon. The run image must also reside in the registry.") cmd.Flags().StringVar(&buildFlags.DockerHost, "docker-host", "", `Address to docker daemon that will be exposed to the build container. If not set (or set to empty string) the standard socket location will be used. @@ -190,27 +285,47 @@ Special value 'inherit' may be used in which case DOCKER_HOST environment variab This option may set DOCKER_HOST environment variable for the build container if needed. `) cmd.Flags().StringVar(&buildFlags.LifecycleImage, "lifecycle-image", cfg.LifecycleImage, `Custom lifecycle image to use for analysis, restore, and export when builder is untrusted.`) + cmd.Flags().StringVar(&buildFlags.Platform, "platform", "", `Platform to build on (e.g., "linux/amd64").`) cmd.Flags().StringVar(&buildFlags.Policy, "pull-policy", "", `Pull policy to use. Accepted values are always, never, and if-not-present. (default "always")`) + cmd.Flags().StringVar(&buildFlags.ExecutionEnv, "exec-env", "production", `Execution environment to use. (default "production"`) cmd.Flags().StringVarP(&buildFlags.Registry, "buildpack-registry", "r", cfg.DefaultRegistryName, "Buildpack Registry by name") cmd.Flags().StringVar(&buildFlags.RunImage, "run-image", "", "Run image (defaults to default stack's run image)") cmd.Flags().StringSliceVarP(&buildFlags.AdditionalTags, "tag", "t", nil, "Additional tags to push the output image to.\nTags should be in the format 'image:tag' or 'repository/image:tag'."+stringSliceHelp("tag")) - cmd.Flags().BoolVar(&buildFlags.TrustBuilder, "trust-builder", false, "Trust the provided builder\nAll lifecycle phases will be run in a single container (if supported by the lifecycle).") + cmd.Flags().BoolVar(&buildFlags.TrustBuilder, "trust-builder", false, "Trust the provided builder.\nAll lifecycle phases will be run in a single container.\nFor more on trusted builders, and when to trust or untrust a builder, check out our docs here: https://buildpacks.io/docs/tools/pack/concepts/trusted_builders") + cmd.Flags().BoolVar(&buildFlags.TrustExtraBuildpacks, "trust-extra-buildpacks", false, "Trust buildpacks that are provided in addition to the buildpacks on the builder") cmd.Flags().StringArrayVar(&buildFlags.Volumes, "volume", nil, "Mount host volume into the build container, in the form ':[:]'.\n- 'host path': Name of the volume or absolute directory path to mount.\n- 'target path': The path where the file or directory is available in the container.\n- 'options' (default \"ro\"): An optional comma separated list of mount options.\n - \"ro\", volume contents are read-only.\n - \"rw\", volume contents are readable and writeable.\n - \"volume-opt==\", can be specified more than once, takes a key-value pair consisting of the option name and its value."+stringArrayHelp("volume")) cmd.Flags().StringVar(&buildFlags.Workspace, "workspace", "", "Location at which to mount the app dir in the build image") cmd.Flags().IntVar(&buildFlags.GID, "gid", 0, `Override GID of user's group in the stack's build and run images. The provided value must be a positive number`) + cmd.Flags().IntVar(&buildFlags.UID, "uid", 0, `Override UID of user in the stack's build and run images. The provided value must be a positive number`) cmd.Flags().StringVar(&buildFlags.PreviousImage, "previous-image", "", "Set previous image to a particular tag reference, digest reference, or (when performing a daemon build) image ID") cmd.Flags().StringVar(&buildFlags.SBOMDestinationDir, "sbom-output-dir", "", "Path to export SBoM contents.\nOmitting the flag will yield no SBoM content.") + cmd.Flags().StringVar(&buildFlags.ReportDestinationDir, "report-output-dir", "", "Path to export build report.toml.\nOmitting the flag yield no report file.") cmd.Flags().BoolVar(&buildFlags.Interactive, "interactive", false, "Launch a terminal UI to depict the build process") + cmd.Flags().BoolVar(&buildFlags.Sparse, "sparse", false, "Use this flag to avoid saving on disk the run-image layers when the application image is exported to OCI layout format") + cmd.Flags().BoolVar(&buildFlags.EnableUsernsHost, "userns-host", false, "Enable user namespace isolation for the build containers") if !cfg.Experimental { cmd.Flags().MarkHidden("interactive") + cmd.Flags().MarkHidden("sparse") } } -func validateBuildFlags(flags *BuildFlags, cfg config.Config, packClient PackClient, logger logging.Logger) error { +func validateBuildFlags(flags *BuildFlags, cfg config.Config, inputImageRef client.InputImageReference, logger logging.Logger) error { if flags.Registry != "" && !cfg.Experimental { return client.NewExperimentError("Support for buildpack registries is currently experimental.") } + if flags.Cache.Launch.Format == cache.CacheImage { + logger.Warn("cache definition: 'launch' cache in format 'image' is not supported.") + } + + if flags.Cache.Build.Format == cache.CacheImage && flags.CacheImage != "" { + return errors.New("'cache' flag with 'image' format cannot be used with 'cache-image' flag.") + } + + if flags.Cache.Build.Format == cache.CacheImage && !flags.Publish { + return errors.New("image cache format requires the 'publish' flag") + } + if flags.CacheImage != "" && !flags.Publish { return errors.New("cache-image flag requires the publish flag") } @@ -219,10 +334,31 @@ func validateBuildFlags(flags *BuildFlags, cfg config.Config, packClient PackCli return errors.New("gid flag must be in the range of 0-2147483647") } + if flags.UID < 0 { + return errors.New("uid flag must be in the range of 0-2147483647") + } + if flags.Interactive && !cfg.Experimental { return client.NewExperimentError("Interactive mode is currently experimental.") } + if inputImageRef.Layout() && !cfg.Experimental { + return client.NewExperimentError("Exporting to OCI layout is currently experimental.") + } + + if _, err := os.Stat(inputImageRef.Name()); err == nil && flags.AppPath == "" { + logger.Warnf("You are building an image named '%s'. If you mean it as an app directory path, run 'pack build --path %s'", + inputImageRef.Name(), inputImageRef.Name()) + } + + if flags.ExecutionEnv != "" && flags.ExecutionEnv != "production" && flags.ExecutionEnv != "test" { + // RFC: the / character is reserved in case we need to introduce namespacing in the future. + var executionEnvRegex = regexp.MustCompile(`^[a-zA-Z0-9.-]+$`) + if ok := executionEnvRegex.MatchString(flags.ExecutionEnv); !ok { + return errors.New("exec-env MUST only contain numbers, letters, and the characters: . or -") + } + } + return nil } @@ -247,7 +383,7 @@ func parseEnv(envFiles []string, envVars []string) (map[string]string, error) { func parseEnvFile(filename string) (map[string]string, error) { out := make(map[string]string) - f, err := ioutil.ReadFile(filepath.Clean(filename)) + f, err := os.ReadFile(filepath.Clean(filename)) if err != nil { return nil, errors.Wrapf(err, "open %s", filename) } @@ -271,7 +407,7 @@ func addEnvVar(env map[string]string, item string) map[string]string { return env } -func parseProjectToml(appPath, descriptorPath string) (projectTypes.Descriptor, string, error) { +func parseProjectToml(appPath, descriptorPath string, logger logging.Logger) (projectTypes.Descriptor, string, error) { actualPath := descriptorPath computePath := descriptorPath == "" @@ -286,6 +422,65 @@ func parseProjectToml(appPath, descriptorPath string) (projectTypes.Descriptor, return projectTypes.Descriptor{}, "", errors.Wrap(err, "stat project descriptor") } - descriptor, err := project.ReadProjectDescriptor(actualPath) + descriptor, err := project.ReadProjectDescriptor(actualPath, logger) return descriptor, actualPath, err } + +func isForbiddenTag(cfg config.Config, input, lifecycle, builder string) error { + inputImage, err := name.ParseReference(input) + if err != nil { + return errors.Wrapf(err, "invalid image name %s", input) + } + + if builder != "" { + builderImage, err := name.ParseReference(builder) + if err != nil { + return errors.Wrapf(err, "parsing builder image %s", builder) + } + if inputImage.Context().RepositoryStr() == builderImage.Context().RepositoryStr() { + return fmt.Errorf("name must not match builder image name") + } + } + + if lifecycle != "" { + lifecycleImage, err := name.ParseReference(lifecycle) + if err != nil { + return errors.Wrapf(err, "parsing lifecycle image %s", lifecycle) + } + if inputImage.Context().RepositoryStr() == lifecycleImage.Context().RepositoryStr() { + return fmt.Errorf("name must not match lifecycle image name") + } + } + + trustedBuilders := getTrustedBuilders(cfg) + for _, trustedBuilder := range trustedBuilders { + builder, err := name.ParseReference(trustedBuilder) + if err != nil { + return err + } + if inputImage.Context().RepositoryStr() == builder.Context().RepositoryStr() { + return fmt.Errorf("name must not match trusted builder name") + } + } + + defaultLifecycleImageRef, err := name.ParseReference(config.DefaultLifecycleImageRepo) + if err != nil { + return errors.Wrapf(err, "parsing default lifecycle image %s", config.DefaultLifecycleImageRepo) + } + + if inputImage.Context().RepositoryStr() == defaultLifecycleImageRef.Context().RepositoryStr() { + return fmt.Errorf("name must not match default lifecycle image name") + } + + if cfg.DefaultBuilder != "" { + defaultBuilderImage, err := name.ParseReference(cfg.DefaultBuilder) + if err != nil { + return errors.Wrapf(err, "parsing default builder %s", cfg.DefaultBuilder) + } + if inputImage.Context().RepositoryStr() == defaultBuilderImage.Context().RegistryStr() { + return fmt.Errorf("name must not match default builder image name") + } + } + + return nil +} diff --git a/internal/commands/build_test.go b/internal/commands/build_test.go index 7f9b17cc77..9a9d0ce248 100644 --- a/internal/commands/build_test.go +++ b/internal/commands/build_test.go @@ -3,11 +3,11 @@ package commands_test import ( "bytes" "fmt" - "io/ioutil" "os" "path/filepath" "reflect" "testing" + "time" "github.com/buildpacks/lifecycle/api" "github.com/golang/mock/gomock" @@ -20,6 +20,7 @@ import ( "github.com/buildpacks/pack/internal/commands" "github.com/buildpacks/pack/internal/commands/testmocks" "github.com/buildpacks/pack/internal/config" + "github.com/buildpacks/pack/internal/paths" "github.com/buildpacks/pack/pkg/client" "github.com/buildpacks/pack/pkg/image" "github.com/buildpacks/pack/pkg/logging" @@ -89,31 +90,99 @@ func testBuildCommand(t *testing.T, when spec.G, it spec.S) { }) when("the builder is trusted", func() { - it("sets the trust builder option", func() { + it.Before(func() { mockClient.EXPECT(). Build(gomock.Any(), EqBuildOptionsWithTrustedBuilder(true)). Return(nil) cfg := config.Config{TrustedBuilders: []config.TrustedBuilder{{Name: "my-builder"}}} - command := commands.Build(logger, cfg, mockClient) - + command = commands.Build(logger, cfg, mockClient) + }) + it("sets the trust builder option", func() { logger.WantVerbose(true) command.SetArgs([]string{"image", "--builder", "my-builder"}) h.AssertNil(t, command.Execute()) h.AssertContains(t, outBuf.String(), "Builder 'my-builder' is trusted") }) + when("a lifecycle-image is provided", func() { + it("ignoring the mentioned lifecycle image, going with default version", func() { + command.SetArgs([]string{"--builder", "my-builder", "image", "--lifecycle-image", "some-lifecycle-image"}) + h.AssertNil(t, command.Execute()) + h.AssertContains(t, outBuf.String(), "Warning: Ignoring the provided lifecycle image as the builder is trusted, running the creator in a single container using the provided builder") + }) + }) }) - when("the builder is suggested", func() { + when("the builder is known to be trusted and suggested", func() { it("sets the trust builder option", func() { mockClient.EXPECT(). Build(gomock.Any(), EqBuildOptionsWithTrustedBuilder(true)). Return(nil) logger.WantVerbose(true) - command.SetArgs([]string{"image", "--builder", "heroku/buildpacks:20"}) + command.SetArgs([]string{"image", "--builder", "heroku/builder:24"}) h.AssertNil(t, command.Execute()) - h.AssertContains(t, outBuf.String(), "Builder 'heroku/buildpacks:20' is trusted") + h.AssertContains(t, outBuf.String(), "Builder 'heroku/builder:24' is trusted") + }) + }) + + when("the builder is known to be trusted but not suggested", func() { + it("sets the trust builder option", func() { + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithTrustedBuilder(true)). + Return(nil) + + logger.WantVerbose(true) + command.SetArgs([]string{"image", "--builder", "heroku/builder:22"}) + h.AssertNil(t, command.Execute()) + h.AssertContains(t, outBuf.String(), "Builder 'heroku/builder:22' is trusted") + }) + }) + + when("the image name matches a builder name", func() { + it("refuses to build", func() { + logger.WantVerbose(true) + command.SetArgs([]string{"heroku/builder:test", "--builder", "heroku/builder:24"}) + h.AssertNotNil(t, command.Execute()) + h.AssertContains(t, outBuf.String(), "name must not match builder image name") + }) + }) + + when("the image name matches a trusted-builder name", func() { + it("refuses to build", func() { + logger.WantVerbose(true) + command.SetArgs([]string{"heroku/builder:test", "--builder", "test", "--trust-builder"}) + h.AssertNotNil(t, command.Execute()) + h.AssertContains(t, outBuf.String(), "name must not match trusted builder name") + }) + }) + + when("the image name matches a lifecycle image name", func() { + it("refuses to build", func() { + logger.WantVerbose(true) + command.SetArgs([]string{"buildpacksio/lifecycle:test", "--builder", "test", "--trust-builder"}) + h.AssertNotNil(t, command.Execute()) + h.AssertContains(t, outBuf.String(), "name must not match default lifecycle image name") + }) + + it("refuses to build when using fully qualified name", func() { + logger.WantVerbose(true) + command.SetArgs([]string{"docker.io/buildpacksio/lifecycle:test", "--builder", "test", "--trust-builder"}) + h.AssertNotNil(t, command.Execute()) + h.AssertContains(t, outBuf.String(), "name must not match default lifecycle image name") + }) + }) + + when("the builder is not trusted", func() { + it("warns the user that the builder is untrusted", func() { + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithTrustedBuilder(false)). + Return(nil) + + logger.WantVerbose(true) + command.SetArgs([]string{"image", "--builder", "org/builder:unknown"}) + h.AssertNil(t, command.Execute()) + h.AssertContains(t, outBuf.String(), "Builder 'org/builder:unknown' is untrusted") }) }) }) @@ -138,6 +207,17 @@ func testBuildCommand(t *testing.T, when spec.G, it spec.S) { }) }) + when("--platform", func() { + it("sets platform", func() { + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithPlatform("linux/amd64")). + Return(nil) + + command.SetArgs([]string{"image", "--builder", "my-builder", "--platform", "linux/amd64"}) + h.AssertNil(t, command.Execute()) + }) + }) + when("--pull-policy", func() { it("sets pull-policy=never", func() { mockClient.EXPECT(). @@ -147,7 +227,6 @@ func testBuildCommand(t *testing.T, when spec.G, it spec.S) { command.SetArgs([]string{"image", "--builder", "my-builder", "--pull-policy", "never"}) h.AssertNil(t, command.Execute()) }) - it("returns error for unknown policy", func() { command.SetArgs([]string{"image", "--builder", "my-builder", "--pull-policy", "unknown-policy"}) h.AssertError(t, command.Execute(), "parsing pull policy") @@ -230,7 +309,7 @@ func testBuildCommand(t *testing.T, when spec.G, it spec.S) { var envPath string it.Before(func() { - envfile, err := ioutil.TempFile("", "envfile") + envfile, err := os.CreateTemp("", "envfile") h.AssertNil(t, err) defer envfile.Close() @@ -266,7 +345,7 @@ func testBuildCommand(t *testing.T, when spec.G, it spec.S) { var envPath string it.Before(func() { - envfile, err := ioutil.TempFile("", "envfile") + envfile, err := os.CreateTemp("", "envfile") h.AssertNil(t, err) defer envfile.Close() @@ -293,14 +372,14 @@ func testBuildCommand(t *testing.T, when spec.G, it spec.S) { var envPath2 string it.Before(func() { - envfile1, err := ioutil.TempFile("", "envfile") + envfile1, err := os.CreateTemp("", "envfile") h.AssertNil(t, err) defer envfile1.Close() envfile1.WriteString("KEY1=VALUE1\nKEY2=IGNORED") envPath1 = envfile1.Name() - envfile2, err := ioutil.TempFile("", "envfile") + envfile2, err := os.CreateTemp("", "envfile") h.AssertNil(t, err) defer envfile2.Close() @@ -347,6 +426,44 @@ func testBuildCommand(t *testing.T, when spec.G, it spec.S) { }) }) + when("cache flag with 'format=image' is passed", func() { + when("--publish is not used", func() { + it("errors", func() { + command.SetArgs([]string{"--builder", "my-builder", "image", "--cache", "type=build;format=image;name=myorg/myimage:cache"}) + err := command.Execute() + h.AssertError(t, err, "image cache format requires the 'publish' flag") + }) + }) + when("--publish is used", func() { + it("succeeds", func() { + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithCacheFlags("type=build;format=image;name=myorg/myimage:cache;type=launch;format=volume;")). + Return(nil) + + command.SetArgs([]string{"--builder", "my-builder", "image", "--cache", "type=build;format=image;name=myorg/myimage:cache", "--publish"}) + h.AssertNil(t, command.Execute()) + }) + }) + when("used together with --cache-image", func() { + it("errors", func() { + command.SetArgs([]string{"--builder", "my-builder", "image", "--cache-image", "some-cache-image", "--cache", "type=build;format=image;name=myorg/myimage:cache"}) + err := command.Execute() + h.AssertError(t, err, "'cache' flag with 'image' format cannot be used with 'cache-image' flag") + }) + }) + when("'type=launch;format=image' is used", func() { + it("warns", func() { + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithCacheFlags("type=build;format=volume;type=launch;format=image;name=myorg/myimage:cache;")). + Return(nil) + + command.SetArgs([]string{"--builder", "my-builder", "image", "--cache", "type=launch;format=image;name=myorg/myimage:cache", "--publish"}) + h.AssertNil(t, command.Execute()) + h.AssertContains(t, outBuf.String(), "Warning: cache definition: 'launch' cache in format 'image' is not supported.") + }) + }) + }) + when("a valid lifecycle-image is provided", func() { when("only the image repo is provided", func() { it("uses the provided lifecycle-image and parses it correctly", func() { @@ -480,7 +597,7 @@ func testBuildCommand(t *testing.T, when spec.G, it spec.S) { var projectTomlPath string it.Before(func() { - projectToml, err := ioutil.TempFile("", "project.toml") + projectToml, err := os.CreateTemp("", "project.toml") h.AssertNil(t, err) defer projectToml.Close() @@ -519,11 +636,12 @@ version = "1.0" h.AssertNil(t, command.Execute()) }) }) + when("file has a builder specified", func() { var projectTomlPath string it.Before(func() { - projectToml, err := ioutil.TempFile("", "project.toml") + projectToml, err := os.CreateTemp("", "project.toml") h.AssertNil(t, err) defer projectToml.Close() @@ -561,11 +679,12 @@ builder = "my-builder" }) }) }) + when("file is invalid", func() { var projectTomlPath string it.Before(func() { - projectToml, err := ioutil.TempFile("", "project.toml") + projectToml, err := os.CreateTemp("", "project.toml") h.AssertNil(t, err) defer projectToml.Close() @@ -717,13 +836,9 @@ builder = "my-builder" when("previous-image flag is provided", func() { when("image is invalid", func() { it("error must be thrown", func() { - mockClient.EXPECT(). - Build(gomock.Any(), EqBuildOptionsWithPreviousImage("previous-image")). - Return(errors.New("")) - command.SetArgs([]string{"--builder", "my-builder", "/x@/y/?!z", "--previous-image", "previous-image"}) err := command.Execute() - h.AssertError(t, err, "failed to build") + h.AssertError(t, err, "forbidden image name") }) }) @@ -783,6 +898,253 @@ builder = "my-builder" h.AssertNil(t, command.Execute()) }) }) + + when("--creation-time", func() { + when("provided as 'now'", func() { + it("passes it to the builder", func() { + expectedTime := time.Now().UTC() + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithDateTime(&expectedTime)). + Return(nil) + + command.SetArgs([]string{"image", "--builder", "my-builder", "--creation-time", "now"}) + h.AssertNil(t, command.Execute()) + }) + }) + + when("provided as unix timestamp", func() { + it("passes it to the builder", func() { + expectedTime, err := time.Parse("2006-01-02T03:04:05Z", "2019-08-19T00:00:01Z") + h.AssertNil(t, err) + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithDateTime(&expectedTime)). + Return(nil) + + command.SetArgs([]string{"image", "--builder", "my-builder", "--creation-time", "1566172801"}) + h.AssertNil(t, command.Execute()) + }) + }) + + when("not provided", func() { + it("is nil", func() { + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithDateTime(nil)). + Return(nil) + + command.SetArgs([]string{"image", "--builder", "my-builder"}) + h.AssertNil(t, command.Execute()) + }) + }) + }) + + when("path to app dir or zip-formatted file is provided", func() { + it("builds with the specified path", func() { + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithPath("my-source")). + Return(nil) + + command.SetArgs([]string{"image", "--builder", "my-builder", "--path", "my-source"}) + h.AssertNil(t, command.Execute()) + }) + }) + + when("a local path with the same string as the specified image name exists", func() { + when("an app path is specified", func() { + it("doesn't warn that the positional argument will not be treated as the source path", func() { + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithImage("my-builder", "testdata")). + Return(nil) + + command.SetArgs([]string{"testdata", "--builder", "my-builder", "--path", "my-source"}) + h.AssertNil(t, command.Execute()) + h.AssertNotContainsMatch(t, outBuf.String(), `Warning: You are building an image named '([^']+)'\. If you mean it as an app directory path, run 'pack build --path ([^']+)'`) + }) + }) + + when("no app path is specified", func() { + it("warns that the positional argument will not be treated as the source path", func() { + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithImage("my-builder", "testdata")). + Return(nil) + + command.SetArgs([]string{"testdata", "--builder", "my-builder"}) + h.AssertNil(t, command.Execute()) + h.AssertContains(t, outBuf.String(), "Warning: You are building an image named 'testdata'. If you mean it as an app directory path, run 'pack build --path testdata'") + }) + }) + }) + + when("export to OCI layout is expected but experimental isn't set in the config", func() { + it("errors with a descriptive message", func() { + command.SetArgs([]string{"oci:image", "--builder", "my-builder"}) + err := command.Execute() + h.AssertNotNil(t, err) + h.AssertError(t, err, "Exporting to OCI layout is currently experimental.") + }) + }) + + when("--exec-env", func() { + when("is not provided", func() { + it("set 'production' as default value", func() { + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithExecEnv("production")). + Return(nil) + + command.SetArgs([]string{"image", "--builder", "my-builder"}) + h.AssertNil(t, command.Execute()) + }) + }) + + when("is provided", func() { + when("contains valid characters", func() { + it("forwards the exec-value (only letters) into the client", func() { + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithExecEnv("something")). + Return(nil) + + command.SetArgs([]string{"image", "--builder", "my-builder", "--exec-env", "something"}) + h.AssertNil(t, command.Execute()) + }) + + it("forwards the exec-value (only numbers) into the client", func() { + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithExecEnv("1234")). + Return(nil) + + command.SetArgs([]string{"image", "--builder", "my-builder", "--exec-env", "1234"}) + h.AssertNil(t, command.Execute()) + }) + + it("forwards the exec-value (mix letters and numbers) into the client", func() { + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithExecEnv("env1")). + Return(nil) + + command.SetArgs([]string{"image", "--builder", "my-builder", "--exec-env", "env1"}) + h.AssertNil(t, command.Execute()) + }) + + it("forwards the exec-value (mix letters, numbers and .) into the client", func() { + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithExecEnv("env1.1")). + Return(nil) + + command.SetArgs([]string{"image", "--builder", "my-builder", "--exec-env", "env1.1"}) + h.AssertNil(t, command.Execute()) + }) + + it("forwards the exec-value (mix letters, numbers and -) into the client", func() { + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithExecEnv("env-1")). + Return(nil) + + command.SetArgs([]string{"image", "--builder", "my-builder", "--exec-env", "env-1"}) + h.AssertNil(t, command.Execute()) + }) + + it("forwards the exec-value (mix letters, numbers, . and -) into the client", func() { + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithExecEnv("env-1.1")). + Return(nil) + + command.SetArgs([]string{"image", "--builder", "my-builder", "--exec-env", "env-1.1"}) + h.AssertNil(t, command.Execute()) + }) + }) + + when("contains invalid characters", func() { + it("errors with a descriptive message", func() { + command.SetArgs([]string{"image", "--builder", "my-builder", "--exec-env", "$production"}) + err := command.Execute() + h.AssertNotNil(t, err) + h.AssertError(t, err, "exec-env MUST only contain numbers, letters, and the characters: . or -") + }) + }) + }) + }) + + when("--insecure-registry is provided", func() { + it("sets one insecure registry", func() { + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithInsecureRegistries([]string{ + "foo.bar", + })). + Return(nil) + + command.SetArgs([]string{"image", "--builder", "my-builder", "--insecure-registry", "foo.bar"}) + h.AssertNil(t, command.Execute()) + }) + + it("sets more than one insecure registry", func() { + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithInsecureRegistries([]string{ + "foo.bar", + "foo.com", + })). + Return(nil) + + command.SetArgs([]string{"image", "--builder", "my-builder", "--insecure-registry", "foo.bar", "--insecure-registry", "foo.com"}) + h.AssertNil(t, command.Execute()) + }) + }) + }) + + when("export to OCI layout is expected", func() { + var ( + sparse bool + previousImage string + layoutDir string + ) + + it.Before(func() { + layoutDir = filepath.Join(paths.RootDir, "local", "repo") + previousImage = "" + cfg = config.Config{ + Experimental: true, + LayoutRepositoryDir: layoutDir, + } + command = commands.Build(logger, cfg, mockClient) + }) + + when("path to save the image is provided", func() { + it("build is called with oci layout configuration", func() { + sparse = false + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithLayoutConfig("image", previousImage, sparse, layoutDir)). + Return(nil) + + command.SetArgs([]string{"oci:image", "--builder", "my-builder"}) + err := command.Execute() + h.AssertNil(t, err) + }) + }) + + when("previous-image flag is provided", func() { + it("build is called with oci layout configuration", func() { + sparse = false + previousImage = "my-previous-image" + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithLayoutConfig("image", previousImage, sparse, layoutDir)). + Return(nil) + + command.SetArgs([]string{"oci:image", "--previous-image", "oci:my-previous-image", "--builder", "my-builder"}) + err := command.Execute() + h.AssertNil(t, err) + }) + }) + + when("-sparse flag is provided", func() { + it("build is called with oci layout configuration and sparse true", func() { + sparse = true + mockClient.EXPECT(). + Build(gomock.Any(), EqBuildOptionsWithLayoutConfig("image", previousImage, sparse, layoutDir)). + Return(nil) + + command.SetArgs([]string{"oci:image", "--sparse", "--builder", "my-builder"}) + err := command.Execute() + h.AssertNil(t, err) + }) + }) }) } @@ -804,6 +1166,15 @@ func EqBuildOptionsDefaultProcess(defaultProc string) gomock.Matcher { } } +func EqBuildOptionsWithPlatform(platform string) gomock.Matcher { + return buildOptionsMatcher{ + description: fmt.Sprintf("Platform=%s", platform), + equals: func(o client.BuildOptions) bool { + return o.Platform == platform + }, + } +} + func EqBuildOptionsWithPullPolicy(policy image.PullPolicy) gomock.Matcher { return buildOptionsMatcher{ description: fmt.Sprintf("PullPolicy=%s", policy), @@ -822,6 +1193,15 @@ func EqBuildOptionsWithCacheImage(cacheImage string) gomock.Matcher { } } +func EqBuildOptionsWithCacheFlags(cacheFlags string) gomock.Matcher { + return buildOptionsMatcher{ + description: fmt.Sprintf("CacheFlags=%s", cacheFlags), + equals: func(o client.BuildOptions) bool { + return o.Cache.String() == cacheFlags + }, + } +} + func EqBuildOptionsWithLifecycleImage(lifecycleImage string) gomock.Matcher { return buildOptionsMatcher{ description: fmt.Sprintf("LifecycleImage=%s", lifecycleImage), @@ -853,7 +1233,7 @@ func EqBuildOptionsWithTrustedBuilder(trustBuilder bool) gomock.Matcher { return buildOptionsMatcher{ description: fmt.Sprintf("Trust Builder=%t", trustBuilder), equals: func(o client.BuildOptions) bool { - return o.TrustBuilder(o.Builder) + return o.TrustBuilder(o.Builder) == trustBuilder }, } } @@ -931,6 +1311,64 @@ func EqBuildOptionsWithSBOMOutputDir(s string) interface{} { } } +func EqBuildOptionsWithDateTime(t *time.Time) interface{} { + return buildOptionsMatcher{ + description: fmt.Sprintf("CreationTime=%s", t), + equals: func(o client.BuildOptions) bool { + if t == nil { + return o.CreationTime == nil + } + return o.CreationTime.Sub(*t) < 5*time.Second && t.Sub(*o.CreationTime) < 5*time.Second + }, + } +} + +func EqBuildOptionsWithPath(path string) interface{} { + return buildOptionsMatcher{ + description: fmt.Sprintf("AppPath=%s", path), + equals: func(o client.BuildOptions) bool { + return o.AppPath == path + }, + } +} + +func EqBuildOptionsWithLayoutConfig(image, previousImage string, sparse bool, layoutDir string) interface{} { + return buildOptionsMatcher{ + description: fmt.Sprintf("image=%s, previous-image=%s, sparse=%t, layout-dir=%s", image, previousImage, sparse, layoutDir), + equals: func(o client.BuildOptions) bool { + if o.Layout() { + result := o.Image == image + if previousImage != "" { + result = result && previousImage == o.PreviousImage + } + return result && o.LayoutConfig.Sparse == sparse && o.LayoutConfig.LayoutRepoDir == layoutDir + } + return false + }, + } +} + +func EqBuildOptionsWithExecEnv(s string) interface{} { + return buildOptionsMatcher{ + description: fmt.Sprintf("exec-env=%s", s), + equals: func(o client.BuildOptions) bool { + return o.CNBExecutionEnv == s + }, + } +} + +func EqBuildOptionsWithInsecureRegistries(insecureRegistries []string) gomock.Matcher { + return buildOptionsMatcher{ + description: fmt.Sprintf("Insercure Registries=%s", insecureRegistries), + equals: func(o client.BuildOptions) bool { + if len(o.InsecureRegistries) != len(insecureRegistries) { + return false + } + return reflect.DeepEqual(o.InsecureRegistries, insecureRegistries) + }, + } +} + type buildOptionsMatcher struct { equals func(client.BuildOptions) bool description string diff --git a/internal/commands/builder_create.go b/internal/commands/builder_create.go index 5ba06f03fc..064353e004 100644 --- a/internal/commands/builder_create.go +++ b/internal/commands/builder_create.go @@ -2,6 +2,7 @@ package commands import ( "fmt" + "os" "path/filepath" "github.com/pkg/errors" @@ -10,6 +11,7 @@ import ( "github.com/buildpacks/pack/builder" "github.com/buildpacks/pack/internal/config" "github.com/buildpacks/pack/internal/style" + "github.com/buildpacks/pack/pkg/buildpack" "github.com/buildpacks/pack/pkg/client" "github.com/buildpacks/pack/pkg/image" "github.com/buildpacks/pack/pkg/logging" @@ -17,10 +19,15 @@ import ( // BuilderCreateFlags define flags provided to the CreateBuilder command type BuilderCreateFlags struct { - BuilderTomlPath string - Publish bool - Registry string - Policy string + Publish bool + AppendImageNameSuffix bool + BuilderTomlPath string + Registry string + Policy string + Flatten []string + Targets []string + Label map[string]string + AdditionalTags []string } // CreateBuilder creates a builder image, based on a builder config @@ -65,14 +72,61 @@ Creating a custom builder allows you to control what buildpacks are used and wha return errors.Wrap(err, "getting absolute path for config") } + envMap, warnings, err := builder.ParseBuildConfigEnv(builderConfig.Build.Env, flags.BuilderTomlPath) + for _, v := range warnings { + logger.Warn(v) + } + if err != nil { + return err + } + + toFlatten, err := buildpack.ParseFlattenBuildModules(flags.Flatten) + if err != nil { + return err + } + + multiArchCfg, err := processMultiArchitectureConfig(logger, flags.Targets, builderConfig.Targets, !flags.Publish) + if err != nil { + return err + } + + if len(multiArchCfg.Targets()) == 0 { + logger.Infof("Pro tip: use --targets flag OR [[targets]] in builder.toml to specify the desired platform") + } + + if !flags.Publish && flags.AppendImageNameSuffix { + logger.Warnf("--append-image-name-suffix will be ignored, use combined with --publish") + } + + // Create temporary directory for lifecycle downloads when using Docker images + var tempDir string + if hasDockerLifecycle(builderConfig) { + tempDir, err = os.MkdirTemp("", "pack-builder-*") + if err != nil { + return errors.Wrap(err, "creating temporary directory") + } + defer func() { + if cleanupErr := os.RemoveAll(tempDir); cleanupErr != nil { + logger.Debugf("Failed to clean up temporary directory %s: %v", tempDir, cleanupErr) + } + }() + } + imageName := args[0] if err := pack.CreateBuilder(cmd.Context(), client.CreateBuilderOptions{ - RelativeBaseDir: relativeBaseDir, - BuilderName: imageName, - Config: builderConfig, - Publish: flags.Publish, - Registry: flags.Registry, - PullPolicy: pullPolicy, + RelativeBaseDir: relativeBaseDir, + BuildConfigEnv: envMap, + BuilderName: imageName, + Config: builderConfig, + Publish: flags.Publish, + AppendImageNameSuffix: flags.AppendImageNameSuffix && flags.Publish, + Registry: flags.Registry, + PullPolicy: pullPolicy, + Flatten: toFlatten, + Labels: flags.Label, + Targets: multiArchCfg.Targets(), + TempDirectory: tempDir, + AdditionalTags: flags.AdditionalTags, }); err != nil { return err } @@ -87,13 +141,27 @@ Creating a custom builder allows you to control what buildpacks are used and wha cmd.Flags().MarkHidden("buildpack-registry") } cmd.Flags().StringVarP(&flags.BuilderTomlPath, "config", "c", "", "Path to builder TOML file (required)") - cmd.Flags().BoolVar(&flags.Publish, "publish", false, "Publish to registry") + cmd.Flags().BoolVar(&flags.Publish, "publish", false, "Publish the builder directly to the container registry specified in , instead of the daemon.") + cmd.Flags().BoolVar(&flags.AppendImageNameSuffix, "append-image-name-suffix", false, "Append an [os]-[arch] suffix to intermediate image tags when creating a multi-arch image; useful when publishing to a registry that doesn't allow overwriting existing tags") cmd.Flags().StringVar(&flags.Policy, "pull-policy", "", "Pull policy to use. Accepted values are always, never, and if-not-present. The default is always") + cmd.Flags().StringArrayVar(&flags.Flatten, "flatten", nil, "List of buildpacks to flatten together into a single layer (format: '@,@'") + cmd.Flags().StringToStringVarP(&flags.Label, "label", "l", nil, "Labels to add to the builder image, in the form of '='") + cmd.Flags().StringSliceVarP(&flags.Targets, "target", "t", nil, + `Target platforms to build for.\nTargets should be in the format '[os][/arch][/variant]:[distroname@osversion@anotherversion];[distroname@osversion]'. +- To specify two different architectures: '--target "linux/amd64" --target "linux/arm64"' +- To specify the distribution version: '--target "linux/arm/v6:ubuntu@14.04"' +- To specify multiple distribution versions: '--target "linux/arm/v6:ubuntu@14.04" --target "linux/arm/v6:ubuntu@16.04"' + `) + cmd.Flags().StringSliceVarP(&flags.AdditionalTags, "tag", "", nil, "Additional tags to push the output image to.\nTags should be in the format 'image:tag' or 'repository/image:tag'."+stringSliceHelp("tag")) AddHelpFlag(cmd, "create") return cmd } +func hasDockerLifecycle(builderConfig builder.Config) bool { + return buildpack.HasDockerLocator(builderConfig.Lifecycle.URI) +} + func validateCreateFlags(flags *BuilderCreateFlags, cfg config.Config) error { if flags.Publish && flags.Policy == image.PullNever.String() { return errors.Errorf("--publish and --pull-policy never cannot be used together. The --publish flag requires the use of remote images.") diff --git a/internal/commands/builder_create_test.go b/internal/commands/builder_create_test.go index e7392ca082..428e753be4 100644 --- a/internal/commands/builder_create_test.go +++ b/internal/commands/builder_create_test.go @@ -2,8 +2,11 @@ package commands_test import ( "bytes" - "io/ioutil" + "errors" + "fmt" + "os" "path/filepath" + "reflect" "testing" "github.com/golang/mock/gomock" @@ -12,9 +15,12 @@ import ( "github.com/sclevine/spec/report" "github.com/spf13/cobra" + "github.com/buildpacks/pack/builder" "github.com/buildpacks/pack/internal/commands" "github.com/buildpacks/pack/internal/commands/testmocks" "github.com/buildpacks/pack/internal/config" + "github.com/buildpacks/pack/pkg/client" + "github.com/buildpacks/pack/pkg/dist" "github.com/buildpacks/pack/pkg/logging" h "github.com/buildpacks/pack/testhelpers" ) @@ -29,6 +35,144 @@ const validConfig = ` ` +const validConfigWithTargets = ` +[[buildpacks]] +id = "some.buildpack" + +[[order]] +[[order.group]] +id = "some.buildpack" + +[[targets]] +os = "linux" +arch = "amd64" + +[[targets]] +os = "linux" +arch = "arm64" +` + +const validConfigWithExtensions = ` +[[buildpacks]] + id = "some.buildpack" + +[[extensions]] + id = "some.extension" + +[[order]] + [[order.group]] + id = "some.buildpack" + +[[order-extensions]] + [[order-extensions.group]] + id = "some.extension" + +` + +var BuildConfigEnvSuffixNone = builder.BuildConfigEnv{ + Name: "suffixNone", + Value: "suffixNoneValue", +} + +var BuildConfigEnvSuffixNoneWithEmptySuffix = builder.BuildConfigEnv{ + Name: "suffixNoneWithEmptySuffix", + Value: "suffixNoneWithEmptySuffixValue", + Suffix: "", +} + +var BuildConfigEnvSuffixDefault = builder.BuildConfigEnv{ + Name: "suffixDefault", + Value: "suffixDefaultValue", + Suffix: "default", +} + +var BuildConfigEnvSuffixOverride = builder.BuildConfigEnv{ + Name: "suffixOverride", + Value: "suffixOverrideValue", + Suffix: "override", +} + +var BuildConfigEnvSuffixAppend = builder.BuildConfigEnv{ + Name: "suffixAppend", + Value: "suffixAppendValue", + Suffix: "append", + Delim: ":", +} + +var BuildConfigEnvSuffixPrepend = builder.BuildConfigEnv{ + Name: "suffixPrepend", + Value: "suffixPrependValue", + Suffix: "prepend", + Delim: ":", +} + +var BuildConfigEnvDelimWithoutSuffix = builder.BuildConfigEnv{ + Name: "delimWithoutSuffix", + Delim: ":", +} + +var BuildConfigEnvSuffixUnknown = builder.BuildConfigEnv{ + Name: "suffixUnknown", + Value: "suffixUnknownValue", + Suffix: "unknown", +} + +var BuildConfigEnvSuffixMultiple = []builder.BuildConfigEnv{ + { + Name: "MY_VAR", + Value: "suffixAppendValueValue", + Suffix: "append", + Delim: ";", + }, + { + Name: "MY_VAR", + Value: "suffixDefaultValue", + Suffix: "default", + Delim: "%", + }, + { + Name: "MY_VAR", + Value: "suffixPrependValue", + Suffix: "prepend", + Delim: ":", + }, +} + +var BuildConfigEnvEmptyValue = builder.BuildConfigEnv{ + Name: "warning", + Value: "", +} + +var BuildConfigEnvEmptyName = builder.BuildConfigEnv{ + Name: "", + Value: "suffixUnknownValue", + Suffix: "default", +} + +var BuildConfigEnvSuffixPrependWithoutDelim = builder.BuildConfigEnv{ + Name: "suffixPrepend", + Value: "suffixPrependValue", + Suffix: "prepend", +} + +var BuildConfigEnvDelimWithoutSuffixAppendOrPrepend = builder.BuildConfigEnv{ + Name: "delimWithoutActionAppendOrPrepend", + Value: "some-value", + Delim: ":", +} + +var BuildConfigEnvDelimWithSameSuffixAndName = []builder.BuildConfigEnv{ + { + Name: "MY_VAR", + Value: "some-value", + Suffix: "", + }, + { + Name: "MY_VAR", + Value: "some-value", + }, +} + func TestCreateCommand(t *testing.T) { color.Disable(true) defer color.Disable(false) @@ -49,7 +193,7 @@ func testCreateCommand(t *testing.T, when spec.G, it spec.S) { it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "create-builder-test") + tmpDir, err = os.MkdirTemp("", "create-builder-test") h.AssertNil(t, err) builderConfigPath = filepath.Join(tmpDir, "builder.toml") cfg = config.Config{} @@ -120,7 +264,7 @@ func testCreateCommand(t *testing.T, when spec.G, it spec.S) { when("warnings encountered in builder.toml", func() { it.Before(func() { - h.AssertNil(t, ioutil.WriteFile(builderConfigPath, []byte(` + h.AssertNil(t, os.WriteFile(builderConfigPath, []byte(` [[buildpacks]] id = "some.buildpack" `), 0666)) @@ -141,7 +285,7 @@ func testCreateCommand(t *testing.T, when spec.G, it spec.S) { when("uses --builder-config", func() { it.Before(func() { - h.AssertNil(t, ioutil.WriteFile(builderConfigPath, []byte(validConfig), 0666)) + h.AssertNil(t, os.WriteFile(builderConfigPath, []byte(validConfig), 0666)) }) it("errors with a descriptive message", func() { @@ -153,6 +297,115 @@ func testCreateCommand(t *testing.T, when spec.G, it spec.S) { }) }) + when("#ParseBuildpackConfigEnv", func() { + it("should create envMap as expected when suffix is omitted", func() { + envMap, warnings, err := builder.ParseBuildConfigEnv([]builder.BuildConfigEnv{BuildConfigEnvSuffixNone}, "") + h.AssertEq(t, envMap, map[string]string{ + BuildConfigEnvSuffixNone.Name: BuildConfigEnvSuffixNone.Value, + }) + h.AssertEq(t, len(warnings), 0) + h.AssertNil(t, err) + }) + it("should create envMap as expected when suffix is empty string", func() { + envMap, warnings, err := builder.ParseBuildConfigEnv([]builder.BuildConfigEnv{BuildConfigEnvSuffixNoneWithEmptySuffix}, "") + h.AssertEq(t, envMap, map[string]string{ + BuildConfigEnvSuffixNoneWithEmptySuffix.Name: BuildConfigEnvSuffixNoneWithEmptySuffix.Value, + }) + h.AssertEq(t, len(warnings), 0) + h.AssertNil(t, err) + }) + it("should create envMap as expected when suffix is `default`", func() { + envMap, warnings, err := builder.ParseBuildConfigEnv([]builder.BuildConfigEnv{BuildConfigEnvSuffixDefault}, "") + h.AssertEq(t, envMap, map[string]string{ + BuildConfigEnvSuffixDefault.Name + "." + string(BuildConfigEnvSuffixDefault.Suffix): BuildConfigEnvSuffixDefault.Value, + }) + h.AssertEq(t, len(warnings), 0) + h.AssertNil(t, err) + }) + it("should create envMap as expected when suffix is `override`", func() { + envMap, warnings, err := builder.ParseBuildConfigEnv([]builder.BuildConfigEnv{BuildConfigEnvSuffixOverride}, "") + h.AssertEq(t, envMap, map[string]string{ + BuildConfigEnvSuffixOverride.Name + "." + string(BuildConfigEnvSuffixOverride.Suffix): BuildConfigEnvSuffixOverride.Value, + }) + h.AssertEq(t, len(warnings), 0) + h.AssertNil(t, err) + }) + it("should create envMap as expected when suffix is `append`", func() { + envMap, warnings, err := builder.ParseBuildConfigEnv([]builder.BuildConfigEnv{BuildConfigEnvSuffixAppend}, "") + h.AssertEq(t, envMap, map[string]string{ + BuildConfigEnvSuffixAppend.Name + "." + string(BuildConfigEnvSuffixAppend.Suffix): BuildConfigEnvSuffixAppend.Value, + BuildConfigEnvSuffixAppend.Name + ".delim": BuildConfigEnvSuffixAppend.Delim, + }) + h.AssertEq(t, len(warnings), 0) + h.AssertNil(t, err) + }) + it("should create envMap as expected when suffix is `prepend`", func() { + envMap, warnings, err := builder.ParseBuildConfigEnv([]builder.BuildConfigEnv{BuildConfigEnvSuffixPrepend}, "") + h.AssertEq(t, envMap, map[string]string{ + BuildConfigEnvSuffixPrepend.Name + "." + string(BuildConfigEnvSuffixPrepend.Suffix): BuildConfigEnvSuffixPrepend.Value, + BuildConfigEnvSuffixPrepend.Name + ".delim": BuildConfigEnvSuffixPrepend.Delim, + }) + h.AssertEq(t, len(warnings), 0) + h.AssertNil(t, err) + }) + it("should create envMap as expected when delim is specified", func() { + envMap, warnings, err := builder.ParseBuildConfigEnv([]builder.BuildConfigEnv{BuildConfigEnvDelimWithoutSuffix}, "") + h.AssertEq(t, envMap, map[string]string{ + BuildConfigEnvDelimWithoutSuffix.Name: BuildConfigEnvDelimWithoutSuffix.Value, + BuildConfigEnvDelimWithoutSuffix.Name + ".delim": BuildConfigEnvDelimWithoutSuffix.Delim, + }) + h.AssertNotEq(t, len(warnings), 0) + h.AssertNil(t, err) + }) + it("should create envMap with a warning when `value` is empty", func() { + envMap, warnings, err := builder.ParseBuildConfigEnv([]builder.BuildConfigEnv{BuildConfigEnvEmptyValue}, "") + h.AssertEq(t, envMap, map[string]string{ + BuildConfigEnvEmptyValue.Name: BuildConfigEnvEmptyValue.Value, + }) + h.AssertNotEq(t, len(warnings), 0) + h.AssertNil(t, err) + }) + it("should return an error when `name` is empty", func() { + envMap, warnings, err := builder.ParseBuildConfigEnv([]builder.BuildConfigEnv{BuildConfigEnvEmptyName}, "") + h.AssertEq(t, envMap, map[string]string(nil)) + h.AssertEq(t, len(warnings), 0) + h.AssertNotNil(t, err) + }) + it("should return warnings when `apprend` or `prepend` is used without `delim`", func() { + envMap, warnings, err := builder.ParseBuildConfigEnv([]builder.BuildConfigEnv{BuildConfigEnvSuffixPrependWithoutDelim}, "") + h.AssertEq(t, envMap, map[string]string{ + BuildConfigEnvSuffixPrependWithoutDelim.Name + "." + string(BuildConfigEnvSuffixPrependWithoutDelim.Suffix): BuildConfigEnvSuffixPrependWithoutDelim.Value, + }) + h.AssertNotEq(t, len(warnings), 0) + h.AssertNotNil(t, err) + }) + it("should return an error when unknown `suffix` is used", func() { + envMap, warnings, err := builder.ParseBuildConfigEnv([]builder.BuildConfigEnv{BuildConfigEnvSuffixUnknown}, "") + h.AssertEq(t, envMap, map[string]string{}) + h.AssertEq(t, len(warnings), 0) + h.AssertNotNil(t, err) + }) + it("should override with the last specified delim when `[[build.env]]` has multiple delims with same `name` with a `append` or `prepend` suffix", func() { + envMap, warnings, err := builder.ParseBuildConfigEnv(BuildConfigEnvSuffixMultiple, "") + h.AssertEq(t, envMap, map[string]string{ + BuildConfigEnvSuffixMultiple[0].Name + "." + string(BuildConfigEnvSuffixMultiple[0].Suffix): BuildConfigEnvSuffixMultiple[0].Value, + BuildConfigEnvSuffixMultiple[1].Name + "." + string(BuildConfigEnvSuffixMultiple[1].Suffix): BuildConfigEnvSuffixMultiple[1].Value, + BuildConfigEnvSuffixMultiple[2].Name + "." + string(BuildConfigEnvSuffixMultiple[2].Suffix): BuildConfigEnvSuffixMultiple[2].Value, + BuildConfigEnvSuffixMultiple[2].Name + ".delim": BuildConfigEnvSuffixMultiple[2].Delim, + }) + h.AssertNotEq(t, len(warnings), 0) + h.AssertNil(t, err) + }) + it("should override `value` with the last read value when a `[[build.env]]` has same `name` with same `suffix`", func() { + envMap, warnings, err := builder.ParseBuildConfigEnv(BuildConfigEnvDelimWithSameSuffixAndName, "") + h.AssertEq(t, envMap, map[string]string{ + BuildConfigEnvDelimWithSameSuffixAndName[1].Name: BuildConfigEnvDelimWithSameSuffixAndName[1].Value, + }) + h.AssertNotEq(t, len(warnings), 0) + h.AssertNil(t, err) + }) + }) + when("no config provided", func() { it("errors with a descriptive message", func() { command.SetArgs([]string{ @@ -161,5 +414,177 @@ func testCreateCommand(t *testing.T, when spec.G, it spec.S) { h.AssertError(t, command.Execute(), "Please provide a builder config path") }) }) + + when("builder config has extensions but experimental isn't set in the config", func() { + it.Before(func() { + h.AssertNil(t, os.WriteFile(builderConfigPath, []byte(validConfigWithExtensions), 0666)) + }) + + it("errors", func() { + mockClient.EXPECT().CreateBuilder(gomock.Any(), gomock.Any()).Return(errors.New("builder config contains image extensions, but the lifecycle Platform API version (0.12) is older than 0.13; support for image extensions with Platform API < 0.13 is currently experimental")) + + command.SetArgs([]string{ + "some/builder", + "--config", builderConfigPath, + }) + h.AssertError(t, command.Execute(), "support for image extensions with Platform API < 0.13 is currently experimental") + }) + }) + + when("--flatten", func() { + it.Before(func() { + h.AssertNil(t, os.WriteFile(builderConfigPath, []byte(validConfig), 0666)) + }) + + when("requested buildpack doesn't have format @", func() { + it("errors with a descriptive message", func() { + command.SetArgs([]string{ + "some/builder", + "--config", builderConfigPath, + "--flatten", "some-buildpack", + }) + h.AssertError(t, command.Execute(), fmt.Sprintf("invalid format %s; please use '@' to add buildpacks to be flattened", "some-buildpack")) + }) + }) + }) + + when("--label", func() { + when("can not be parsed", func() { + it("errors with a descriptive message", func() { + cmd := packageCommand() + cmd.SetArgs([]string{ + "some/builder", + "--config", builderConfigPath, + "--label", "name+value", + }) + + err := cmd.Execute() + h.AssertNotNil(t, err) + h.AssertError(t, err, "invalid argument \"name+value\" for \"-l, --label\" flag: name+value must be formatted as key=value") + }) + }) + }) + + when("multi-platform builder is expected to be created", func() { + when("builder config has no targets defined", func() { + it.Before(func() { + h.AssertNil(t, os.WriteFile(builderConfigPath, []byte(validConfig), 0666)) + }) + when("daemon", func() { + it("errors when exporting to daemon", func() { + command.SetArgs([]string{ + "some/builder", + "--config", builderConfigPath, + "--target", "linux/amd64", + "--target", "windows/amd64", + }) + err := command.Execute() + h.AssertNotNil(t, err) + h.AssertError(t, err, "when exporting to daemon only one target is allowed") + }) + }) + + when("--publish", func() { + it.Before(func() { + mockClient.EXPECT().CreateBuilder(gomock.Any(), EqCreateBuilderOptionsTargets([]dist.Target{ + {OS: "linux", Arch: "amd64"}, + {OS: "windows", Arch: "amd64"}, + })).Return(nil) + }) + + it("creates a builder with the given targets", func() { + command.SetArgs([]string{ + "some/builder", + "--config", builderConfigPath, + "--target", "linux/amd64", + "--target", "windows/amd64", + "--publish", + }) + h.AssertNil(t, command.Execute()) + }) + }) + }) + + when("builder config has targets defined", func() { + it.Before(func() { + h.AssertNil(t, os.WriteFile(builderConfigPath, []byte(validConfigWithTargets), 0666)) + }) + + when("--publish", func() { + it.Before(func() { + mockClient.EXPECT().CreateBuilder(gomock.Any(), EqCreateBuilderOptionsTargets([]dist.Target{ + {OS: "linux", Arch: "amd64"}, + {OS: "linux", Arch: "arm64"}, + })).Return(nil) + }) + + it("creates a builder with the given targets", func() { + command.SetArgs([]string{ + "some/builder", + "--config", builderConfigPath, + "--publish", + }) + h.AssertNil(t, command.Execute()) + }) + }) + + when("invalid target flag is used", func() { + it("errors with a message when invalid target flag is used", func() { + command.SetArgs([]string{ + "some/builder", + "--config", builderConfigPath, + "--target", "something/wrong", + "--publish", + }) + h.AssertNotNil(t, command.Execute()) + }) + }) + + when("--targets", func() { + it.Before(func() { + mockClient.EXPECT().CreateBuilder(gomock.Any(), EqCreateBuilderOptionsTargets([]dist.Target{ + {OS: "linux", Arch: "amd64"}, + })).Return(nil) + }) + + it("creates a builder with the given targets", func() { + command.SetArgs([]string{ + "some/builder", + "--target", "linux/amd64", + "--config", builderConfigPath, + }) + h.AssertNil(t, command.Execute()) + }) + }) + }) + }) }) } + +func EqCreateBuilderOptionsTargets(targets []dist.Target) gomock.Matcher { + return createbuilderOptionsMatcher{ + description: fmt.Sprintf("Target=%v", targets), + equals: func(o client.CreateBuilderOptions) bool { + if len(o.Targets) != len(targets) { + return false + } + return reflect.DeepEqual(o.Targets, targets) + }, + } +} + +type createbuilderOptionsMatcher struct { + equals func(options client.CreateBuilderOptions) bool + description string +} + +func (m createbuilderOptionsMatcher) Matches(x interface{}) bool { + if b, ok := x.(client.CreateBuilderOptions); ok { + return m.equals(b) + } + return false +} + +func (m createbuilderOptionsMatcher) String() string { + return "is a CreateBuilderOption with " + m.description +} diff --git a/internal/commands/builder_inspect.go b/internal/commands/builder_inspect.go index e86f91defa..36fa93be93 100644 --- a/internal/commands/builder_inspect.go +++ b/internal/commands/builder_inspect.go @@ -8,6 +8,8 @@ import ( "github.com/buildpacks/pack/internal/config" "github.com/buildpacks/pack/pkg/client" "github.com/buildpacks/pack/pkg/logging" + + bldr "github.com/buildpacks/pack/internal/builder" ) type BuilderInspector interface { @@ -61,10 +63,15 @@ func inspectBuilder( inspector BuilderInspector, writerFactory writer.BuilderWriterFactory, ) error { + isTrusted, err := bldr.IsTrustedBuilder(cfg, imageName) + if err != nil { + return err + } + builderInfo := writer.SharedBuilderInfo{ Name: imageName, IsDefault: imageName == cfg.DefaultBuilder, - Trusted: isTrustedBuilder(cfg, imageName), + Trusted: isTrusted, } localInfo, localErr := inspector.InspectBuilder(imageName, true, client.WithDetectionOrderDepth(flags.Depth)) diff --git a/internal/commands/builder_inspect_test.go b/internal/commands/builder_inspect_test.go index a17552d10f..9e4189bf71 100644 --- a/internal/commands/builder_inspect_test.go +++ b/internal/commands/builder_inspect_test.go @@ -6,6 +6,8 @@ import ( "regexp" "testing" + pubbldr "github.com/buildpacks/pack/builder" + "github.com/buildpacks/lifecycle/api" "github.com/heroku/color" "github.com/sclevine/spec" @@ -36,13 +38,13 @@ var ( expectedLocalInfo = &client.BuilderInfo{ Description: "test-local-builder", Stack: "local-stack", - RunImage: "local/image", + RunImages: []pubbldr.RunImageConfig{{Image: "local/image"}}, Lifecycle: minimalLifecycleDescriptor, } expectedRemoteInfo = &client.BuilderInfo{ Description: "test-remote-builder", Stack: "remote-stack", - RunImage: "remote/image", + RunImages: []pubbldr.RunImageConfig{{Image: "remote/image"}}, Lifecycle: minimalLifecycleDescriptor, } expectedLocalDisplay = "Sample output for local builder" @@ -250,9 +252,9 @@ func testBuilderInspectCommand(t *testing.T, when spec.G, it spec.S) { pack config default-builder `) - assert.Matches(outBuf.String(), regexp.MustCompile(`Paketo Buildpacks:\s+'paketobuildpacks/builder:base'`)) - assert.Matches(outBuf.String(), regexp.MustCompile(`Paketo Buildpacks:\s+'paketobuildpacks/builder:full'`)) - assert.Matches(outBuf.String(), regexp.MustCompile(`Heroku:\s+'heroku/buildpacks:20'`)) + assert.Matches(outBuf.String(), regexp.MustCompile(`Paketo Buildpacks:\s+'paketobuildpacks/builder-jammy-base'`)) + assert.Matches(outBuf.String(), regexp.MustCompile(`Paketo Buildpacks:\s+'paketobuildpacks/builder-jammy-full'`)) + assert.Matches(outBuf.String(), regexp.MustCompile(`Heroku:\s+'heroku/builder:24'`)) }) }) diff --git a/internal/commands/builder_suggest_test.go b/internal/commands/builder_suggest_test.go index f12bb0b164..75a9db7aff 100644 --- a/internal/commands/builder_suggest_test.go +++ b/internal/commands/builder_suggest_test.go @@ -47,7 +47,7 @@ func testSuggestCommand(t *testing.T, when spec.G, it spec.S) { }) it("displays descriptions from metadata", func() { - commands.WriteSuggestedBuilder(logger, mockClient, []bldr.SuggestedBuilder{{ + commands.WriteSuggestedBuilder(logger, mockClient, []bldr.KnownBuilder{{ Vendor: "Builder", Image: "gcr.io/some/builder:latest", DefaultDescription: "Default description", @@ -65,7 +65,7 @@ func testSuggestCommand(t *testing.T, when spec.G, it spec.S) { }) it("displays default descriptions", func() { - commands.WriteSuggestedBuilder(logger, mockClient, []bldr.SuggestedBuilder{{ + commands.WriteSuggestedBuilder(logger, mockClient, []bldr.KnownBuilder{{ Vendor: "Builder", Image: "gcr.io/some/builder:latest", DefaultDescription: "Default description", @@ -81,7 +81,7 @@ func testSuggestCommand(t *testing.T, when spec.G, it spec.S) { }) it("displays default descriptions", func() { - commands.WriteSuggestedBuilder(logger, mockClient, []bldr.SuggestedBuilder{{ + commands.WriteSuggestedBuilder(logger, mockClient, []bldr.KnownBuilder{{ Vendor: "Builder", Image: "gcr.io/some/builder:latest", DefaultDescription: "Default description", diff --git a/internal/commands/buildpack_inspect.go b/internal/commands/buildpack_inspect.go index e75c479f89..2829967368 100644 --- a/internal/commands/buildpack_inspect.go +++ b/internal/commands/buildpack_inspect.go @@ -42,7 +42,7 @@ func BuildpackInspect(logger logging.Logger, cfg config.Config, client PackClien return cmd } -func buildpackInspect(logger logging.Logger, buildpackName, registryName string, flags BuildpackInspectFlags, cfg config.Config, pack PackClient) error { +func buildpackInspect(logger logging.Logger, buildpackName, registryName string, flags BuildpackInspectFlags, _ config.Config, pack PackClient) error { logger.Infof("Inspecting buildpack: %s\n", style.Symbol(buildpackName)) inspectedBuildpacksOutput, err := inspectAllBuildpacks( diff --git a/internal/commands/buildpack_inspect_test.go b/internal/commands/buildpack_inspect_test.go index db12752243..2e317aa839 100644 --- a/internal/commands/buildpack_inspect_test.go +++ b/internal/commands/buildpack_inspect_test.go @@ -139,7 +139,7 @@ func testBuildpackInspectCommand(t *testing.T, when spec.G, it spec.S) { complexInfo = &client.BuildpackInfo{ BuildpackMetadata: buildpack.Metadata{ - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/top-buildpack", Version: "0.0.1", Homepage: "top-buildpack-homepage", @@ -150,7 +150,7 @@ func testBuildpackInspectCommand(t *testing.T, when spec.G, it spec.S) { {ID: "io.buildpacks.stacks.second-stack", Mixins: []string{"mixin1", "mixin2"}}, }, }, - Buildpacks: []dist.BuildpackInfo{ + Buildpacks: []dist.ModuleInfo{ { ID: "some/first-inner-buildpack", Version: "1.0.0", @@ -175,9 +175,9 @@ func testBuildpackInspectCommand(t *testing.T, when spec.G, it spec.S) { }, Order: dist.Order{ { - Group: []dist.BuildpackRef{ + Group: []dist.ModuleRef{ { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/top-buildpack", Version: "0.0.1", Homepage: "top-buildpack-homepage", @@ -188,7 +188,7 @@ func testBuildpackInspectCommand(t *testing.T, when spec.G, it spec.S) { }, }, }, - BuildpackLayers: dist.BuildpackLayers{ + BuildpackLayers: dist.ModuleLayers{ "some/first-inner-buildpack": { "1.0.0": { API: apiVersion, @@ -198,16 +198,16 @@ func testBuildpackInspectCommand(t *testing.T, when spec.G, it spec.S) { }, Order: dist.Order{ { - Group: []dist.BuildpackRef{ + Group: []dist.ModuleRef{ { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/first-inner-buildpack", Version: "1.0.0", }, Optional: false, }, { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/third-inner-buildpack", Version: "3.0.0", }, @@ -216,9 +216,9 @@ func testBuildpackInspectCommand(t *testing.T, when spec.G, it spec.S) { }, }, { - Group: []dist.BuildpackRef{ + Group: []dist.ModuleRef{ { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/third-inner-buildpack", Version: "3.0.0", }, @@ -258,16 +258,16 @@ func testBuildpackInspectCommand(t *testing.T, when spec.G, it spec.S) { API: apiVersion, Order: dist.Order{ { - Group: []dist.BuildpackRef{ + Group: []dist.ModuleRef{ { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/first-inner-buildpack", Version: "1.0.0", }, Optional: false, }, { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/second-inner-buildpack", Version: "2.0.0", }, @@ -276,9 +276,9 @@ func testBuildpackInspectCommand(t *testing.T, when spec.G, it spec.S) { }, }, { - Group: []dist.BuildpackRef{ + Group: []dist.ModuleRef{ { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/first-inner-buildpack", Version: "1.0.0", }, @@ -297,7 +297,7 @@ func testBuildpackInspectCommand(t *testing.T, when spec.G, it spec.S) { simpleInfo = &client.BuildpackInfo{ BuildpackMetadata: buildpack.Metadata{ - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/single-buildpack", Version: "0.0.1", Homepage: "single-homepage-homepace", @@ -308,7 +308,7 @@ func testBuildpackInspectCommand(t *testing.T, when spec.G, it spec.S) { {ID: "io.buildpacks.stacks.second-stack", Mixins: []string{"mixin1", "mixin2"}}, }, }, - Buildpacks: []dist.BuildpackInfo{ + Buildpacks: []dist.ModuleInfo{ { ID: "some/single-buildpack", Version: "0.0.1", @@ -322,9 +322,9 @@ func testBuildpackInspectCommand(t *testing.T, when spec.G, it spec.S) { }, Order: dist.Order{ { - Group: []dist.BuildpackRef{ + Group: []dist.ModuleRef{ { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/single-buildpack", Version: "0.0.1", Homepage: "single-buildpack-homepage", @@ -334,7 +334,7 @@ func testBuildpackInspectCommand(t *testing.T, when spec.G, it spec.S) { }, }, }, - BuildpackLayers: dist.BuildpackLayers{ + BuildpackLayers: dist.ModuleLayers{ "some/single-buildpack": { "0.0.1": { API: apiVersion, @@ -481,7 +481,7 @@ func testBuildpackInspectCommand(t *testing.T, when spec.G, it spec.S) { }) }) - when("uri is a http or https location", func() { + when("uri is an http or https location", func() { it.Before(func() { simpleInfo.Location = buildpack.URILocator }) diff --git a/internal/commands/buildpack_new.go b/internal/commands/buildpack_new.go index f0f143e6bc..e52733ebc6 100644 --- a/internal/commands/buildpack_new.go +++ b/internal/commands/buildpack_new.go @@ -5,11 +5,13 @@ import ( "fmt" "os" "path/filepath" + "runtime" "strings" "github.com/spf13/cobra" "github.com/buildpacks/pack/internal/style" + "github.com/buildpacks/pack/internal/target" "github.com/buildpacks/pack/pkg/client" "github.com/buildpacks/pack/pkg/dist" "github.com/buildpacks/pack/pkg/logging" @@ -17,9 +19,11 @@ import ( // BuildpackNewFlags define flags provided to the BuildpackNew command type BuildpackNewFlags struct { - API string - Path string + API string + Path string + // Deprecated: Stacks are deprecated Stacks []string + Targets []string Version string } @@ -34,7 +38,7 @@ func BuildpackNew(logger logging.Logger, creator BuildpackCreator) *cobra.Comman cmd := &cobra.Command{ Use: "new ", Short: "Creates basic scaffolding of a buildpack.", - Args: cobra.ExactValidArgs(1), + Args: cobra.MatchAll(cobra.ExactArgs(1), cobra.OnlyValidArgs), Example: "pack buildpack new sample/my-buildpack", Long: "buildpack new generates the basic scaffolding of a buildpack repository. It creates a new directory `name` in the current directory (or at `path`, if passed as a flag), and initializes a buildpack.toml, and two executable bash scripts, `bin/detect` and `bin/build`. ", RunE: logError(logger, func(cmd *cobra.Command, args []string) error { @@ -66,11 +70,24 @@ func BuildpackNew(logger logging.Logger, creator BuildpackCreator) *cobra.Comman }) } + var targets []dist.Target + if len(flags.Targets) == 0 && len(flags.Stacks) == 0 { + targets = []dist.Target{{ + OS: runtime.GOOS, + Arch: runtime.GOARCH, + }} + } else { + if targets, err = target.ParseTargets(flags.Targets, logger); err != nil { + return err + } + } + if err := creator.NewBuildpack(cmd.Context(), client.NewBuildpackOptions{ API: flags.API, ID: id, Path: path, Stacks: stacks, + Targets: targets, Version: flags.Version, }); err != nil { return err @@ -81,10 +98,17 @@ func BuildpackNew(logger logging.Logger, creator BuildpackCreator) *cobra.Comman }), } - cmd.Flags().StringVarP(&flags.API, "api", "a", "0.5", "Buildpack API compatibility of the generated buildpack") + cmd.Flags().StringVarP(&flags.API, "api", "a", "0.8", "Buildpack API compatibility of the generated buildpack") cmd.Flags().StringVarP(&flags.Path, "path", "p", "", "Path to generate the buildpack") cmd.Flags().StringVarP(&flags.Version, "version", "V", "1.0.0", "Version of the generated buildpack") - cmd.Flags().StringSliceVarP(&flags.Stacks, "stacks", "s", []string{"io.buildpacks.stacks.bionic"}, "Stack(s) this buildpack will be compatible with"+stringSliceHelp("stack")) + cmd.Flags().StringSliceVarP(&flags.Stacks, "stacks", "s", nil, "Stack(s) this buildpack will be compatible with"+stringSliceHelp("stack")) + cmd.Flags().MarkDeprecated("stacks", "prefer `--targets` instead: https://github.com/buildpacks/rfcs/blob/main/text/0096-remove-stacks-mixins.md") + cmd.Flags().StringSliceVarP(&flags.Targets, "targets", "t", nil, + `Targets are of the form 'os/arch/variant', for example 'linux/amd64' or 'linux/arm64/v9'. The full format for targets follows the form [os][/arch][/variant]:[distroname@osversion@anotherversion];[distroname@osversion] + - Base case for two different architectures : '--targets "linux/amd64" --targets "linux/arm64"' + - case for distribution version: '--targets "windows/amd64:windows-nano@10.0.19041.1415"' + - case for different architecture with distributed versions : '--targets "linux/arm/v6:ubuntu@14.04" --targets "linux/arm/v6:ubuntu@16.04"' + `) AddHelpFlag(cmd, "new") return cmd diff --git a/internal/commands/buildpack_new_test.go b/internal/commands/buildpack_new_test.go index 218cba9b48..05a76f42b4 100644 --- a/internal/commands/buildpack_new_test.go +++ b/internal/commands/buildpack_new_test.go @@ -2,9 +2,9 @@ package commands_test import ( "bytes" - "io/ioutil" "os" "path/filepath" + "runtime" "testing" "github.com/buildpacks/pack/pkg/client" @@ -37,10 +37,14 @@ func testBuildpackNewCommand(t *testing.T, when spec.G, it spec.S) { mockClient *testmocks.MockPackClient tmpDir string ) + targets := []dist.Target{{ + OS: runtime.GOOS, + Arch: runtime.GOARCH, + }} it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "build-test") + tmpDir, err = os.MkdirTemp("", "build-test") h.AssertNil(t, err) logger = logging.NewLogWithWriters(&outBuf, &outBuf) @@ -57,14 +61,11 @@ func testBuildpackNewCommand(t *testing.T, when spec.G, it spec.S) { when("BuildpackNew#Execute", func() { it("uses the args to generate artifacts", func() { mockClient.EXPECT().NewBuildpack(gomock.Any(), client.NewBuildpackOptions{ - API: "0.5", + API: "0.8", ID: "example/some-cnb", Path: filepath.Join(tmpDir, "some-cnb"), Version: "1.0.0", - Stacks: []dist.Stack{{ - ID: "io.buildpacks.stacks.bionic", - Mixins: []string{}, - }}, + Targets: targets, }).Return(nil).MaxTimes(1) path := filepath.Join(tmpDir, "some-cnb") @@ -83,5 +84,124 @@ func testBuildpackNewCommand(t *testing.T, when spec.G, it spec.S) { h.AssertNotNil(t, err) h.AssertContains(t, outBuf.String(), "ERROR: directory") }) + + when("target flag is specified, ", func() { + it("it uses target to generate artifacts", func() { + mockClient.EXPECT().NewBuildpack(gomock.Any(), client.NewBuildpackOptions{ + API: "0.8", + ID: "example/targets", + Path: filepath.Join(tmpDir, "targets"), + Version: "1.0.0", + Targets: []dist.Target{{ + OS: "linux", + Arch: "arm", + ArchVariant: "v6", + Distributions: []dist.Distribution{{ + Name: "ubuntu", + Version: "14.04", + }}, + }}, + }).Return(nil).MaxTimes(1) + + path := filepath.Join(tmpDir, "targets") + command.SetArgs([]string{"--path", path, "example/targets", "--targets", "linux/arm/v6:ubuntu@14.04"}) + + err := command.Execute() + h.AssertNil(t, err) + }) + it("it should show error when invalid [os]/[arch] passed", func() { + mockClient.EXPECT().NewBuildpack(gomock.Any(), client.NewBuildpackOptions{ + API: "0.8", + ID: "example/targets", + Path: filepath.Join(tmpDir, "targets"), + Version: "1.0.0", + Targets: []dist.Target{{ + OS: "os", + Arch: "arm", + ArchVariant: "v6", + Distributions: []dist.Distribution{{ + Name: "ubuntu", + Version: "14.04", + }, { + Name: "ubuntu", + Version: "16.04", + }}, + }}, + }).Return(nil).MaxTimes(1) + + path := filepath.Join(tmpDir, "targets") + command.SetArgs([]string{"--path", path, "example/targets", "--targets", "os/arm/v6:ubuntu@14.04@16.04"}) + + err := command.Execute() + h.AssertNotNil(t, err) + }) + when("it should", func() { + it("support format [os][/arch][/variant]:[name@version];[some-name@version]", func() { + mockClient.EXPECT().NewBuildpack(gomock.Any(), client.NewBuildpackOptions{ + API: "0.8", + ID: "example/targets", + Path: filepath.Join(tmpDir, "targets"), + Version: "1.0.0", + Targets: []dist.Target{ + { + OS: "linux", + Arch: "arm", + ArchVariant: "v6", + Distributions: []dist.Distribution{ + { + Name: "ubuntu", + Version: "14.04", + }, + { + Name: "debian", + Version: "8.10", + }, + }, + }, + { + OS: "windows", + Arch: "amd64", + Distributions: []dist.Distribution{ + { + Name: "windows-nano", + Version: "10.0.19041.1415", + }, + }, + }, + }, + }).Return(nil).MaxTimes(1) + + path := filepath.Join(tmpDir, "targets") + command.SetArgs([]string{"--path", path, "example/targets", "--targets", "linux/arm/v6:ubuntu@14.04;debian@8.10", "-t", "windows/amd64:windows-nano@10.0.19041.1415"}) + + err := command.Execute() + h.AssertNil(t, err) + }) + }) + when("stacks ", func() { + it("flag should show deprecated message when used", func() { + mockClient.EXPECT().NewBuildpack(gomock.Any(), client.NewBuildpackOptions{ + API: "0.8", + ID: "example/stacks", + Path: filepath.Join(tmpDir, "stacks"), + Version: "1.0.0", + Stacks: []dist.Stack{{ + ID: "io.buildpacks.stacks.jammy", + Mixins: []string{}, + }}, + }).Return(nil).MaxTimes(1) + + path := filepath.Join(tmpDir, "stacks") + output := new(bytes.Buffer) + command.SetOut(output) + command.SetErr(output) + command.SetArgs([]string{"--path", path, "example/stacks", "--stacks", "io.buildpacks.stacks.jammy"}) + + err := command.Execute() + h.AssertNil(t, err) + h.AssertContains(t, output.String(), "Flag --stacks has been deprecated,") + }) + }) + }) }) } diff --git a/internal/commands/buildpack_package.go b/internal/commands/buildpack_package.go index 13aea0c97a..4216470d85 100644 --- a/internal/commands/buildpack_package.go +++ b/internal/commands/buildpack_package.go @@ -2,7 +2,9 @@ package commands import ( "context" + "os" "path/filepath" + "strings" "github.com/pkg/errors" "github.com/spf13/cobra" @@ -11,18 +13,25 @@ import ( "github.com/buildpacks/pack/internal/config" "github.com/buildpacks/pack/internal/style" "github.com/buildpacks/pack/pkg/client" + "github.com/buildpacks/pack/pkg/dist" "github.com/buildpacks/pack/pkg/image" "github.com/buildpacks/pack/pkg/logging" ) // BuildpackPackageFlags define flags provided to the BuildpackPackage command type BuildpackPackageFlags struct { - PackageTomlPath string - Format string - Publish bool - Policy string - BuildpackRegistry string - Path string + PackageTomlPath string + Format string + Policy string + BuildpackRegistry string + Path string + FlattenExclude []string + Targets []string + Label map[string]string + Publish bool + Flatten bool + AppendImageNameSuffix bool + AdditionalTags []string } // BuildpackPackager packages buildpacks @@ -33,6 +42,7 @@ type BuildpackPackager interface { // PackageConfigReader reads BuildpackPackage configs type PackageConfigReader interface { Read(path string) (pubbldpkg.Config, error) + ReadBuildpackDescriptor(path string) (dist.BuildpackDescriptor, error) } // BuildpackPackage packages (a) buildpack(s) into OCI format, based on a package config @@ -41,7 +51,7 @@ func BuildpackPackage(logger logging.Logger, cfg config.Config, packager Buildpa cmd := &cobra.Command{ Use: "package --config ", Short: "Package a buildpack in OCI format.", - Args: cobra.ExactValidArgs(1), + Args: cobra.MatchAll(cobra.ExactArgs(1), cobra.OnlyValidArgs), Example: "pack buildpack package my-buildpack --config ./package.toml\npack buildpack package my-buildpack.cnb --config ./package.toml --f file", Long: "buildpack package allows users to package (a) buildpack(s) into OCI format, which can then to be hosted in " + "image repositories or persisted on disk as a '.cnb' file. You can also package a number of buildpacks " + @@ -50,7 +60,7 @@ func BuildpackPackage(logger logging.Logger, cfg config.Config, packager Buildpa "and they can be included in the configs used in `pack builder create` and `pack buildpack package`. For more " + "on how to package a buildpack, see: https://buildpacks.io/docs/buildpack-author-guide/package-a-buildpack/.", RunE: logError(logger, func(cmd *cobra.Command, args []string) error { - if err := validateBuildpackPackageFlags(&flags); err != nil { + if err := validateBuildpackPackageFlags(cfg, &flags); err != nil { return err } @@ -92,40 +102,99 @@ func BuildpackPackage(logger logging.Logger, cfg config.Config, packager Buildpa logger.Warnf("%s is not a valid extension for a packaged buildpack. Packaged buildpacks must have a %s extension", style.Symbol(ext), style.Symbol(client.CNBExtension)) } } + if flags.Flatten { + logger.Warn("Flattening a buildpack package could break the distribution specification. Please use it with caution.") + } + + targets, isCompositeBP, err := processBuildpackPackageTargets(flags.Path, packageConfigReader, bpPackageCfg) + if err != nil { + return err + } + + daemon := !flags.Publish && flags.Format == "" + multiArchCfg, err := processMultiArchitectureConfig(logger, flags.Targets, targets, daemon) + if err != nil { + return err + } + + if len(multiArchCfg.Targets()) == 0 { + if isCompositeBP { + logger.Infof("Pro tip: use --target flag OR [[targets]] in package.toml to specify the desired platform (os/arch/variant); using os %s", style.Symbol(bpPackageCfg.Platform.OS)) + } else { + logger.Infof("Pro tip: use --target flag OR [[targets]] in buildpack.toml to specify the desired platform (os/arch/variant); using os %s", style.Symbol(bpPackageCfg.Platform.OS)) + } + } else if !isCompositeBP { + // FIXME: Check if we can copy the config files during layers creation. + filesToClean, err := multiArchCfg.CopyConfigFiles(bpPath, "buildpack") + if err != nil { + return err + } + defer clean(filesToClean) + } + + if !flags.Publish && flags.AppendImageNameSuffix { + logger.Warnf("--append-image-name-suffix will be ignored, use combined with --publish") + } + if err := packager.PackageBuildpack(cmd.Context(), client.PackageBuildpackOptions{ - RelativeBaseDir: relativeBaseDir, - Name: name, - Format: flags.Format, - Config: bpPackageCfg, - Publish: flags.Publish, - PullPolicy: pullPolicy, - Registry: flags.BuildpackRegistry, + RelativeBaseDir: relativeBaseDir, + Name: name, + Format: flags.Format, + Config: bpPackageCfg, + Publish: flags.Publish, + AppendImageNameSuffix: flags.AppendImageNameSuffix && flags.Publish, + PullPolicy: pullPolicy, + Registry: flags.BuildpackRegistry, + Flatten: flags.Flatten, + FlattenExclude: flags.FlattenExclude, + Labels: flags.Label, + Targets: multiArchCfg.Targets(), + AdditionalTags: flags.AdditionalTags, }); err != nil { return err } action := "created" + location := "docker daemon" if flags.Publish { action = "published" + location = "registry" } - - logger.Infof("Successfully %s package %s", action, style.Symbol(name)) + if flags.Format == client.FormatFile { + location = "file" + } + logger.Infof("Successfully %s package %s and saved to %s", action, style.Symbol(name), location) return nil }), } cmd.Flags().StringVarP(&flags.PackageTomlPath, "config", "c", "", "Path to package TOML config") cmd.Flags().StringVarP(&flags.Format, "format", "f", "", `Format to save package as ("image" or "file")`) - cmd.Flags().BoolVar(&flags.Publish, "publish", false, `Publish to registry (applies to "--format=image" only)`) + cmd.Flags().BoolVar(&flags.Publish, "publish", false, `Publish the buildpack directly to the container registry specified in , instead of the daemon (applies to "--format=image" only).`) + cmd.Flags().BoolVar(&flags.AppendImageNameSuffix, "append-image-name-suffix", false, "When publishing to a registry that doesn't allow overwrite existing tags use this flag to append a [os]-[arch] suffix to package ") cmd.Flags().StringVar(&flags.Policy, "pull-policy", "", "Pull policy to use. Accepted values are always, never, and if-not-present. The default is always") cmd.Flags().StringVarP(&flags.Path, "path", "p", "", "Path to the Buildpack that needs to be packaged") cmd.Flags().StringVarP(&flags.BuildpackRegistry, "buildpack-registry", "r", "", "Buildpack Registry name") - + cmd.Flags().BoolVar(&flags.Flatten, "flatten", false, "Flatten the buildpack into a single layer") + cmd.Flags().StringSliceVarP(&flags.FlattenExclude, "flatten-exclude", "e", nil, "Buildpacks to exclude from flattening, in the form of '@'") + cmd.Flags().StringToStringVarP(&flags.Label, "label", "l", nil, "Labels to add to packaged Buildpack, in the form of '='") + cmd.Flags().StringSliceVarP(&flags.Targets, "target", "t", nil, + `Target platforms to build for. +Targets should be in the format '[os][/arch][/variant]:[distroname@osversion@anotherversion];[distroname@osversion]'. +- To specify two different architectures: '--target "linux/amd64" --target "linux/arm64"' +- To specify the distribution version: '--target "linux/arm/v6:ubuntu@14.04"' +- To specify multiple distribution versions: '--target "linux/arm/v6:ubuntu@14.04" --target "linux/arm/v6:ubuntu@16.04"' + `) + cmd.Flags().StringSliceVarP(&flags.AdditionalTags, "tag", "", nil, "Additional tags to push the output image to.\nTags should be in the format 'image:tag' or 'repository/image:tag'."+stringSliceHelp("tag")) + if !cfg.Experimental { + cmd.Flags().MarkHidden("flatten") + cmd.Flags().MarkHidden("flatten-exclude") + } AddHelpFlag(cmd, "package") return cmd } -func validateBuildpackPackageFlags(p *BuildpackPackageFlags) error { +func validateBuildpackPackageFlags(cfg config.Config, p *BuildpackPackageFlags) error { if p.Publish && p.Policy == image.PullNever.String() { return errors.Errorf("--publish and --pull-policy never cannot be used together. The --publish flag requires the use of remote images.") } @@ -133,5 +202,56 @@ func validateBuildpackPackageFlags(p *BuildpackPackageFlags) error { return errors.Errorf("--config and --path cannot be used together. Please specify the relative path to the Buildpack directory in the package config file.") } + if p.Flatten { + if !cfg.Experimental { + return client.NewExperimentError("Flattening a buildpack package is currently experimental.") + } + + if len(p.FlattenExclude) > 0 { + for _, exclude := range p.FlattenExclude { + if strings.Count(exclude, "@") != 1 { + return errors.Errorf("invalid format %s; please use '@' to exclude buildpack from flattening", exclude) + } + } + } + } + return nil +} + +// processBuildpackPackageTargets returns the list of targets defined in the configuration file; it could be the buildpack.toml or +// the package.toml if the buildpack is a composite buildpack +func processBuildpackPackageTargets(path string, packageConfigReader PackageConfigReader, bpPackageCfg pubbldpkg.Config) ([]dist.Target, bool, error) { + var ( + targets []dist.Target + order dist.Order + isCompositeBP bool + ) + + // Read targets from buildpack.toml + pathToBuildpackToml := filepath.Join(path, "buildpack.toml") + if _, err := os.Stat(pathToBuildpackToml); err == nil { + buildpackCfg, err := packageConfigReader.ReadBuildpackDescriptor(pathToBuildpackToml) + if err != nil { + return nil, false, err + } + targets = buildpackCfg.Targets() + order = buildpackCfg.Order() + isCompositeBP = len(order) > 0 + } + + // When composite buildpack, targets are defined in package.toml - See RFC-0128 + if isCompositeBP { + targets = bpPackageCfg.Targets + } + return targets, isCompositeBP, nil +} + +func clean(paths []string) error { + // we need to clean the buildpack.toml for each place where we copied to + if len(paths) > 0 { + for _, path := range paths { + os.Remove(path) + } + } return nil } diff --git a/internal/commands/buildpack_package_test.go b/internal/commands/buildpack_package_test.go index 9fc93a4abb..bd101ebe60 100644 --- a/internal/commands/buildpack_package_test.go +++ b/internal/commands/buildpack_package_test.go @@ -120,6 +120,46 @@ func testPackageCommand(t *testing.T, when spec.G, it spec.S) { h.AssertContains(t, outBuf.String(), "'.gz' is not a valid extension for a packaged buildpack. Packaged buildpacks must have a '.cnb' extension") }) }) + when("flatten is set to true", func() { + when("experimental is true", func() { + when("flatten exclude doesn't have format @", func() { + it("errors with a descriptive message", func() { + cmd := packageCommand(withClientConfig(config.Config{Experimental: true}), withBuildpackPackager(fakeBuildpackPackager)) + cmd.SetArgs([]string{"test", "-f", "file", "--flatten", "--flatten-exclude", "some-buildpack"}) + + err := cmd.Execute() + h.AssertError(t, err, fmt.Sprintf("invalid format %s; please use '@' to exclude buildpack from flattening", "some-buildpack")) + }) + }) + + when("no exclusions", func() { + it("creates package with correct image name and warns flatten is being used", func() { + cmd := packageCommand( + withClientConfig(config.Config{Experimental: true}), + withBuildpackPackager(fakeBuildpackPackager), + withLogger(logger), + ) + cmd.SetArgs([]string{"my-flatten-image", "-f", "file", "--flatten"}) + err := cmd.Execute() + h.AssertNil(t, err) + + receivedOptions := fakeBuildpackPackager.CreateCalledWithOptions + h.AssertEq(t, receivedOptions.Name, "my-flatten-image.cnb") + h.AssertContains(t, outBuf.String(), "Flattening a buildpack package could break the distribution specification. Please use it with caution.") + }) + }) + }) + + when("experimental is false", func() { + it("errors with a descriptive message", func() { + cmd := packageCommand(withClientConfig(config.Config{Experimental: false}), withBuildpackPackager(fakeBuildpackPackager)) + cmd.SetArgs([]string{"test", "-f", "file", "--flatten"}) + + err := cmd.Execute() + h.AssertError(t, err, "Flattening a buildpack package is currently experimental.") + }) + }) + }) }) when("there is a path flag", func() { @@ -163,6 +203,7 @@ func testPackageCommand(t *testing.T, when spec.G, it spec.S) { h.AssertEq(t, receivedOptions.PullPolicy, image.PullAlways) }) }) + when("no --pull-policy", func() { var pullPolicyArgs = []string{ "some-image-name", @@ -195,6 +236,54 @@ func testPackageCommand(t *testing.T, when spec.G, it spec.S) { h.AssertEq(t, receivedOptions.PullPolicy, image.PullNever) }) }) + + when("composite buildpack", func() { + when("multi-platform", func() { + var ( + targets []dist.Target + descriptor dist.BuildpackDescriptor + config pubbldpkg.Config + path string + ) + + it.Before(func() { + targets = []dist.Target{ + {OS: "linux", Arch: "amd64"}, + {OS: "windows", Arch: "amd64"}, + } + config = pubbldpkg.Config{Buildpack: dist.BuildpackURI{URI: "test"}} + descriptor = dist.BuildpackDescriptor{WithTargets: targets} + path = "testdata" + }) + + it("creates a multi-platform buildpack package", func() { + cmd := packageCommand(withBuildpackPackager(fakeBuildpackPackager), withPackageConfigReader(fakes.NewFakePackageConfigReader(whereReadReturns(config, nil), whereReadBuildpackDescriptor(descriptor, nil)))) + cmd.SetArgs([]string{"some-name", "-p", path}) + + h.AssertNil(t, cmd.Execute()) + h.AssertEq(t, fakeBuildpackPackager.CreateCalledWithOptions.Targets, targets) + }) + }) + }) + + when("additional tags are specified", func() { + it("forwards additional tags to buildpackPackager", func() { + expectedTags := []string{"additional-tag-1", "additional-tag-2"} + cmd := packageCommand( + withBuildpackPackager(fakeBuildpackPackager), + ) + cmd.SetArgs([]string{ + "my-specific-image", + "--tag", expectedTags[0], "--tag", expectedTags[1], + }) + err := cmd.Execute() + h.AssertNil(t, err) + + receivedOptions := fakeBuildpackPackager.CreateCalledWithOptions + h.AssertEq(t, receivedOptions.AdditionalTags[0], expectedTags[0]) + h.AssertEq(t, receivedOptions.AdditionalTags[1], expectedTags[1]) + }) + }) }) when("no config path is specified", func() { @@ -209,13 +298,43 @@ func testPackageCommand(t *testing.T, when spec.G, it spec.S) { }) }) when("a path is specified", func() { - it("creates a default config with the appropriate path", func() { - cmd := packageCommand(withBuildpackPackager(fakeBuildpackPackager)) - cmd.SetArgs([]string{"some-name", "-p", ".."}) - h.AssertNil(t, cmd.Execute()) - bpPath, _ := filepath.Abs("..") - receivedOptions := fakeBuildpackPackager.CreateCalledWithOptions - h.AssertEq(t, receivedOptions.Config.Buildpack.URI, bpPath) + when("not multi-platform", func() { + it("creates a default config with the appropriate path", func() { + cmd := packageCommand(withBuildpackPackager(fakeBuildpackPackager)) + cmd.SetArgs([]string{"some-name", "-p", ".."}) + h.AssertNil(t, cmd.Execute()) + bpPath, _ := filepath.Abs("..") + receivedOptions := fakeBuildpackPackager.CreateCalledWithOptions + h.AssertEq(t, receivedOptions.Config.Buildpack.URI, bpPath) + }) + }) + + when("multi-platform", func() { + var ( + targets []dist.Target + descriptor dist.BuildpackDescriptor + path string + ) + + when("single buildpack", func() { + it.Before(func() { + targets = []dist.Target{ + {OS: "linux", Arch: "amd64"}, + {OS: "windows", Arch: "amd64"}, + } + + descriptor = dist.BuildpackDescriptor{WithTargets: targets} + path = "testdata" + }) + + it("creates a multi-platform buildpack package", func() { + cmd := packageCommand(withBuildpackPackager(fakeBuildpackPackager), withPackageConfigReader(fakes.NewFakePackageConfigReader(whereReadBuildpackDescriptor(descriptor, nil)))) + cmd.SetArgs([]string{"some-name", "-p", path}) + + h.AssertNil(t, cmd.Execute()) + h.AssertEq(t, fakeBuildpackPackager.CreateCalledWithOptions.Targets, targets) + }) + }) }) }) }) @@ -276,6 +395,34 @@ func testPackageCommand(t *testing.T, when spec.G, it spec.S) { h.AssertError(t, cmd.Execute(), "parsing pull policy") }) }) + + when("--label cannot be parsed", func() { + it("errors with a descriptive message", func() { + cmd := packageCommand() + cmd.SetArgs([]string{ + "some-image-name", "--config", "/path/to/some/file", + "--label", "name+value", + }) + + err := cmd.Execute() + h.AssertNotNil(t, err) + h.AssertError(t, err, "invalid argument \"name+value\" for \"-l, --label\" flag: name+value must be formatted as key=value") + }) + }) + + when("--target cannot be parsed", func() { + it("errors with a descriptive message", func() { + cmd := packageCommand() + cmd.SetArgs([]string{ + "some-image-name", "--config", "/path/to/some/file", + "--target", "something/wrong", "--publish", + }) + + err := cmd.Execute() + h.AssertNotNil(t, err) + h.AssertError(t, err, "unknown target: 'something/wrong'") + }) + }) }) } @@ -359,3 +506,10 @@ func whereReadReturns(config pubbldpkg.Config, err error) func(*fakes.FakePackag r.ReadReturnError = err } } + +func whereReadBuildpackDescriptor(descriptor dist.BuildpackDescriptor, err error) func(*fakes.FakePackageConfigReader) { + return func(r *fakes.FakePackageConfigReader) { + r.ReadBuildpackDescriptorReturn = descriptor + r.ReadBuildpackDescriptorReturnError = err + } +} diff --git a/internal/commands/buildpack_yank.go b/internal/commands/buildpack_yank.go index ee2e118493..e17ecac7af 100644 --- a/internal/commands/buildpack_yank.go +++ b/internal/commands/buildpack_yank.go @@ -23,7 +23,7 @@ func BuildpackYank(logger logging.Logger, cfg config.Config, pack PackClient) *c cmd := &cobra.Command{ Use: "yank ", Args: cobra.ExactArgs(1), - Short: "Yank a buildpack from a registry", + Short: "Mark a buildpack on a Buildpack registry as unusable", Example: "pack yank my-buildpack@0.0.1", RunE: logError(logger, func(cmd *cobra.Command, args []string) error { buildpackIDVersion := args[0] diff --git a/internal/commands/commands.go b/internal/commands/commands.go index 6066b89dd5..8a3a37d5a2 100644 --- a/internal/commands/commands.go +++ b/internal/commands/commands.go @@ -7,12 +7,16 @@ import ( "os/signal" "syscall" + "github.com/google/go-containerregistry/pkg/v1/types" "github.com/pkg/errors" "github.com/spf13/cobra" "github.com/buildpacks/pack/internal/config" "github.com/buildpacks/pack/internal/style" + "github.com/buildpacks/pack/internal/target" + "github.com/buildpacks/pack/pkg/buildpack" "github.com/buildpacks/pack/pkg/client" + "github.com/buildpacks/pack/pkg/dist" "github.com/buildpacks/pack/pkg/logging" ) @@ -24,12 +28,21 @@ type PackClient interface { CreateBuilder(context.Context, client.CreateBuilderOptions) error NewBuildpack(context.Context, client.NewBuildpackOptions) error PackageBuildpack(ctx context.Context, opts client.PackageBuildpackOptions) error + PackageExtension(ctx context.Context, opts client.PackageBuildpackOptions) error Build(context.Context, client.BuildOptions) error RegisterBuildpack(context.Context, client.RegisterBuildpackOptions) error YankBuildpack(client.YankBuildpackOptions) error InspectBuildpack(client.InspectBuildpackOptions) (*client.BuildpackInfo, error) + InspectExtension(client.InspectExtensionOptions) (*client.ExtensionInfo, error) PullBuildpack(context.Context, client.PullBuildpackOptions) error DownloadSBOM(name string, options client.DownloadSBOMOptions) error + CreateManifest(ctx context.Context, opts client.CreateManifestOptions) error + AnnotateManifest(ctx context.Context, opts client.ManifestAnnotateOptions) error + AddManifest(ctx context.Context, opts client.ManifestAddOptions) error + DeleteManifest(name []string) error + RemoveManifest(name string, images []string) error + PushManifest(client.PushManifestOptions) error + InspectManifest(string) error } func AddHelpFlag(cmd *cobra.Command, commandName string) { @@ -92,16 +105,43 @@ func getMirrors(config config.Config) map[string][]string { return mirrors } -func isTrustedBuilder(cfg config.Config, builder string) bool { - for _, trustedBuilder := range cfg.TrustedBuilders { - if builder == trustedBuilder.Name { - return true - } - } +func deprecationWarning(logger logging.Logger, oldCmd, replacementCmd string) { + logger.Warnf("Command %s has been deprecated, please use %s instead", style.Symbol("pack "+oldCmd), style.Symbol("pack "+replacementCmd)) +} - return isSuggestedBuilder(builder) +func parseFormatFlag(value string) (types.MediaType, error) { + var format types.MediaType + switch value { + case "oci": + format = types.OCIImageIndex + case "docker": + format = types.DockerManifestList + default: + return format, errors.Errorf("%s invalid media type format", value) + } + return format, nil } -func deprecationWarning(logger logging.Logger, oldCmd, replacementCmd string) { - logger.Warnf("Command %s has been deprecated, please use %s instead", style.Symbol("pack "+oldCmd), style.Symbol("pack "+replacementCmd)) +// processMultiArchitectureConfig takes an array of targets with format: [os][/arch][/variant]:[distroname@osversion@anotherversion];[distroname@osversion] +// and a list of targets defined in a configuration file (buildpack.toml or package.toml) and creates a multi-architecture configuration +func processMultiArchitectureConfig(logger logging.Logger, userTargets []string, configTargets []dist.Target, daemon bool) (*buildpack.MultiArchConfig, error) { + var ( + expectedTargets []dist.Target + err error + ) + if len(userTargets) > 0 { + if expectedTargets, err = target.ParseTargets(userTargets, logger); err != nil { + return &buildpack.MultiArchConfig{}, err + } + if len(expectedTargets) > 1 && daemon { + // when we are exporting to daemon, only 1 target is allow + return &buildpack.MultiArchConfig{}, errors.Errorf("when exporting to daemon only one target is allowed") + } + } + + multiArchCfg, err := buildpack.NewMultiArchConfig(configTargets, expectedTargets, logger) + if err != nil { + return &buildpack.MultiArchConfig{}, err + } + return multiArchCfg, nil } diff --git a/internal/commands/completion_test.go b/internal/commands/completion_test.go index b4b5a679d3..5fb135babf 100644 --- a/internal/commands/completion_test.go +++ b/internal/commands/completion_test.go @@ -2,7 +2,6 @@ package commands_test import ( "bytes" - "io/ioutil" "os" "path/filepath" "testing" @@ -32,7 +31,7 @@ func testCompletionCommand(t *testing.T, when spec.G, it spec.S) { it.Before(func() { logger = logging.NewLogWithWriters(&outBuf, &outBuf) var err error - packHome, err = ioutil.TempDir("", "") + packHome, err = os.MkdirTemp("", "") assert.Nil(err) // the CompletionCommand calls a method on its Parent(), so it needs to have diff --git a/internal/commands/config_default_builder_test.go b/internal/commands/config_default_builder_test.go index a34500d99d..e06cde74b7 100644 --- a/internal/commands/config_default_builder_test.go +++ b/internal/commands/config_default_builder_test.go @@ -3,7 +3,6 @@ package commands_test import ( "bytes" "fmt" - "io/ioutil" "os" "path/filepath" "testing" @@ -46,7 +45,7 @@ func testConfigDefaultBuilder(t *testing.T, when spec.G, it spec.S) { mockController = gomock.NewController(t) mockClient = testmocks.NewMockPackClient(mockController) logger = logging.NewLogWithWriters(&outBuf, &outBuf) - tempPackHome, err = ioutil.TempDir("", "pack-home") + tempPackHome, err = os.MkdirTemp("", "pack-home") h.AssertNil(t, err) configPath = filepath.Join(tempPackHome, "config.toml") cmd = commands.ConfigDefaultBuilder(logger, config.Config{}, configPath, mockClient) @@ -95,7 +94,7 @@ func testConfigDefaultBuilder(t *testing.T, when spec.G, it spec.S) { }) it("gives clear error if unable to write to config", func() { - h.AssertNil(t, ioutil.WriteFile(configPath, []byte("some-data"), 0001)) + h.AssertNil(t, os.WriteFile(configPath, []byte("some-data"), 0001)) cmd = commands.ConfigDefaultBuilder(logger, config.Config{DefaultBuilder: "some/builder"}, configPath, mockClient) cmd.SetArgs([]string{"--unset"}) err := cmd.Execute() @@ -123,7 +122,7 @@ func testConfigDefaultBuilder(t *testing.T, when spec.G, it spec.S) { }) it("gives clear error if unable to write to config", func() { - h.AssertNil(t, ioutil.WriteFile(configPath, []byte("some-data"), 0001)) + h.AssertNil(t, os.WriteFile(configPath, []byte("some-data"), 0001)) mockClient.EXPECT().InspectBuilder(imageName, true).Return(&client.BuilderInfo{ Stack: "test.stack.id", }, nil) diff --git a/internal/commands/config_experimental.go b/internal/commands/config_experimental.go index 0476b515e2..67008c4e3f 100644 --- a/internal/commands/config_experimental.go +++ b/internal/commands/config_experimental.go @@ -1,6 +1,7 @@ package commands import ( + "path/filepath" "strconv" "github.com/pkg/errors" @@ -33,6 +34,11 @@ func ConfigExperimental(logger logging.Logger, cfg config.Config, cfgPath string return errors.Wrapf(err, "invalid value %s provided", style.Symbol(args[0])) } cfg.Experimental = val + if cfg.Experimental { + cfg.LayoutRepositoryDir = filepath.Join(filepath.Dir(cfgPath), "layout-repo") + } else { + cfg.LayoutRepositoryDir = "" + } if err = config.Write(cfg, cfgPath); err != nil { return errors.Wrap(err, "writing to config") diff --git a/internal/commands/config_experimental_test.go b/internal/commands/config_experimental_test.go index 2d49cb43ff..2a1f269d45 100644 --- a/internal/commands/config_experimental_test.go +++ b/internal/commands/config_experimental_test.go @@ -3,7 +3,6 @@ package commands_test import ( "bytes" "fmt" - "io/ioutil" "os" "path/filepath" "testing" @@ -39,7 +38,7 @@ func testConfigExperimental(t *testing.T, when spec.G, it spec.S) { var err error logger = logging.NewLogWithWriters(&outBuf, &outBuf) - tempPackHome, err = ioutil.TempDir("", "pack-home") + tempPackHome, err = os.MkdirTemp("", "pack-home") h.AssertNil(t, err) configPath = filepath.Join(tempPackHome, "config.toml") @@ -78,6 +77,10 @@ func testConfigExperimental(t *testing.T, when spec.G, it spec.S) { cfg, err := config.Read(configPath) h.AssertNil(t, err) h.AssertEq(t, cfg.Experimental, true) + + // oci layout repo is configured + layoutDir := filepath.Join(filepath.Dir(configPath), "layout-repo") + h.AssertEq(t, cfg.LayoutRepositoryDir, layoutDir) }) it("sets false if provided", func() { @@ -88,6 +91,9 @@ func testConfigExperimental(t *testing.T, when spec.G, it spec.S) { cfg, err := config.Read(configPath) h.AssertNil(t, err) h.AssertEq(t, cfg.Experimental, false) + + // oci layout repo is cleaned + h.AssertEq(t, cfg.LayoutRepositoryDir, "") }) it("returns error if invalid value provided", func() { diff --git a/internal/commands/config_lifecycle_image_test.go b/internal/commands/config_lifecycle_image_test.go index 71aba269fa..588e040bdc 100644 --- a/internal/commands/config_lifecycle_image_test.go +++ b/internal/commands/config_lifecycle_image_test.go @@ -2,7 +2,6 @@ package commands_test import ( "bytes" - "io/ioutil" "os" "path/filepath" "strings" @@ -39,7 +38,7 @@ func testConfigLifecycleImageCommand(t *testing.T, when spec.G, it spec.S) { it.Before(func() { var err error logger = logging.NewLogWithWriters(&outBuf, &outBuf) - tempPackHome, err = ioutil.TempDir("", "pack-home") + tempPackHome, err = os.MkdirTemp("", "pack-home") h.AssertNil(t, err) configFile = filepath.Join(tempPackHome, "config.toml") @@ -115,7 +114,7 @@ func testConfigLifecycleImageCommand(t *testing.T, when spec.G, it spec.S) { assert.Equal(readCfg.LifecycleImage, "custom-lifecycle/image:v1") }) it("returns clear error if fails to write", func() { - assert.Nil(ioutil.WriteFile(configFile, []byte("something"), 0001)) + assert.Nil(os.WriteFile(configFile, []byte("something"), 0001)) command := commands.ConfigLifecycleImage(logger, cfg, configFile) command.SetArgs([]string{"custom-lifecycle/image:v1"}) assert.ErrorContains(command.Execute(), "failed to write to config at") @@ -128,7 +127,7 @@ func testConfigLifecycleImageCommand(t *testing.T, when spec.G, it spec.S) { h.AssertError(t, err, `Invalid image name`) }) it("returns clear error if fails to write", func() { - assert.Nil(ioutil.WriteFile(configFile, []byte("something"), 0001)) + assert.Nil(os.WriteFile(configFile, []byte("something"), 0001)) command := commands.ConfigLifecycleImage(logger, cfg, configFile) command.SetArgs([]string{"custom-lifecycle/image:v1"}) assert.ErrorContains(command.Execute(), "failed to write to config at") @@ -155,7 +154,7 @@ func testConfigLifecycleImageCommand(t *testing.T, when spec.G, it spec.S) { assert.Equal(readCfg.LifecycleImage, "") }) it("returns clear error if fails to write", func() { - assert.Nil(ioutil.WriteFile(configFile, []byte("something"), 0001)) + assert.Nil(os.WriteFile(configFile, []byte("something"), 0001)) command := commands.ConfigLifecycleImage(logger, config.Config{LifecycleImage: "custom-lifecycle/image:v1"}, configFile) command.SetArgs([]string{"--unset"}) assert.ErrorContains(command.Execute(), "failed to write to config at") diff --git a/internal/commands/config_pull_policy_test.go b/internal/commands/config_pull_policy_test.go index 640ef6dee2..7674ab2ab9 100644 --- a/internal/commands/config_pull_policy_test.go +++ b/internal/commands/config_pull_policy_test.go @@ -2,7 +2,6 @@ package commands_test import ( "bytes" - "io/ioutil" "os" "path/filepath" "strings" @@ -39,7 +38,7 @@ func testConfigPullPolicyCommand(t *testing.T, when spec.G, it spec.S) { it.Before(func() { var err error logger = logging.NewLogWithWriters(&outBuf, &outBuf) - tempPackHome, err = ioutil.TempDir("", "pack-home") + tempPackHome, err = os.MkdirTemp("", "pack-home") h.AssertNil(t, err) configFile = filepath.Join(tempPackHome, "config.toml") @@ -154,7 +153,7 @@ func testConfigPullPolicyCommand(t *testing.T, when spec.G, it spec.S) { assert.Equal(readCfg.PullPolicy, "never") }) it("returns clear error if fails to write", func() { - assert.Nil(ioutil.WriteFile(configFile, []byte("something"), 0001)) + assert.Nil(os.WriteFile(configFile, []byte("something"), 0001)) command := commands.ConfigPullPolicy(logger, cfg, configFile) command.SetArgs([]string{"if-not-present"}) assert.ErrorContains(command.Execute(), "writing config to") @@ -174,7 +173,7 @@ func testConfigPullPolicyCommand(t *testing.T, when spec.G, it spec.S) { assert.Equal(cfg.PullPolicy, "") }) it("returns clear error if fails to write", func() { - assert.Nil(ioutil.WriteFile(configFile, []byte("something"), 0001)) + assert.Nil(os.WriteFile(configFile, []byte("something"), 0001)) command := commands.ConfigPullPolicy(logger, config.Config{PullPolicy: "never"}, configFile) command.SetArgs([]string{"--unset"}) assert.ErrorContains(command.Execute(), "writing config to") diff --git a/internal/commands/config_registries_default_test.go b/internal/commands/config_registries_default_test.go index 4bcaee743f..d990794599 100644 --- a/internal/commands/config_registries_default_test.go +++ b/internal/commands/config_registries_default_test.go @@ -3,7 +3,6 @@ package commands_test import ( "bytes" "fmt" - "io/ioutil" "os" "path/filepath" "testing" @@ -38,7 +37,7 @@ func testConfigRegistriesDefaultCommand(t *testing.T, when spec.G, it spec.S) { it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "pack-home-*") + tmpDir, err = os.MkdirTemp("", "pack-home-*") assert.Nil(err) configFile = filepath.Join(tmpDir, "config.toml") @@ -97,7 +96,7 @@ func testConfigRegistriesDefaultCommand(t *testing.T, when spec.G, it spec.S) { }) it("returns clear error if fails to write", func() { - assert.Nil(ioutil.WriteFile(configFile, []byte("something"), 0001)) + assert.Nil(os.WriteFile(configFile, []byte("something"), 0001)) cfg := config.Config{ Registries: []config.Registry{ { @@ -138,7 +137,7 @@ func testConfigRegistriesDefaultCommand(t *testing.T, when spec.G, it spec.S) { }) it("returns clear error if fails to write", func() { - assert.Nil(ioutil.WriteFile(configFile, []byte("something"), 0001)) + assert.Nil(os.WriteFile(configFile, []byte("something"), 0001)) command := commands.ConfigRegistriesDefault(logger, config.Config{DefaultRegistryName: "some-registry"}, configFile) command.SetArgs([]string{"--unset"}) assert.ErrorContains(command.Execute(), "writing config to") diff --git a/internal/commands/config_registries_test.go b/internal/commands/config_registries_test.go index 0c1aba1d7d..e19f433129 100644 --- a/internal/commands/config_registries_test.go +++ b/internal/commands/config_registries_test.go @@ -2,7 +2,7 @@ package commands_test import ( "bytes" - "io/ioutil" + "io" "os" "path/filepath" "testing" @@ -39,7 +39,7 @@ func testConfigRegistries(t *testing.T, when spec.G, it spec.S) { var err error logger = logging.NewLogWithWriters(&outBuf, &outBuf) - tempPackHome, err = ioutil.TempDir("", "pack-home") + tempPackHome, err = os.MkdirTemp("", "pack-home") assert.Nil(err) configPath = filepath.Join(tempPackHome, "config.toml") @@ -210,7 +210,7 @@ func testConfigRegistries(t *testing.T, when spec.G, it spec.S) { when("validation", func() { it("fails with missing args", func() { - cmd.SetOut(ioutil.Discard) + cmd.SetOut(io.Discard) cmd.SetArgs([]string{"add"}) err := cmd.Execute() assert.ErrorContains(err, "accepts 2 arg") @@ -253,7 +253,7 @@ func testConfigRegistries(t *testing.T, when spec.G, it spec.S) { }) it("returns clear error if fails to write", func() { - assert.Nil(ioutil.WriteFile(configPath, []byte("something"), 0001)) + assert.Nil(os.WriteFile(configPath, []byte("something"), 0001)) cmd.SetArgs(args) assert.ErrorContains(cmd.Execute(), "writing config to") }) @@ -330,7 +330,7 @@ func testConfigRegistries(t *testing.T, when spec.G, it spec.S) { }) it("returns clear error if fails to write", func() { - assert.Nil(ioutil.WriteFile(configPath, []byte("something"), 0001)) + assert.Nil(os.WriteFile(configPath, []byte("something"), 0001)) cmd.SetArgs([]string{"remove", "public registry"}) assert.ErrorContains(cmd.Execute(), "writing config to") }) diff --git a/internal/commands/config_registry_mirrors_test.go b/internal/commands/config_registry_mirrors_test.go index d882e85f3b..ed7a487019 100644 --- a/internal/commands/config_registry_mirrors_test.go +++ b/internal/commands/config_registry_mirrors_test.go @@ -3,7 +3,6 @@ package commands_test import ( "bytes" "fmt" - "io/ioutil" "os" "path/filepath" "strings" @@ -49,7 +48,7 @@ func testConfigRegistryMirrorsCommand(t *testing.T, when spec.G, it spec.S) { it.Before(func() { var err error logger = logging.NewLogWithWriters(&outBuf, &outBuf) - tempPackHome, err = ioutil.TempDir("", "pack-home") + tempPackHome, err = os.MkdirTemp("", "pack-home") h.AssertNil(t, err) configPath = filepath.Join(tempPackHome, "config.toml") @@ -96,7 +95,7 @@ func testConfigRegistryMirrorsCommand(t *testing.T, when spec.G, it spec.S) { when("config path doesn't exist", func() { it("fails to run", func() { fakePath := filepath.Join(tempPackHome, "not-exist.toml") - h.AssertNil(t, ioutil.WriteFile(fakePath, []byte("something"), 0001)) + h.AssertNil(t, os.WriteFile(fakePath, []byte("something"), 0001)) cmd = commands.ConfigRegistryMirrors(logger, config.Config{}, fakePath) cmd.SetArgs([]string{"add", registry1, "-m", testMirror1}) @@ -165,7 +164,7 @@ func testConfigRegistryMirrorsCommand(t *testing.T, when spec.G, it spec.S) { when("config path doesn't exist", func() { it("fails to run", func() { fakePath := filepath.Join(tempPackHome, "not-exist.toml") - h.AssertNil(t, ioutil.WriteFile(fakePath, []byte("something"), 0001)) + h.AssertNil(t, os.WriteFile(fakePath, []byte("something"), 0001)) cmd = commands.ConfigRegistryMirrors(logger, testCfg, fakePath) cmd.SetArgs([]string{"remove", registry1}) diff --git a/internal/commands/config_run_image_mirrors_test.go b/internal/commands/config_run_image_mirrors_test.go index d6fa9517ee..f6cb5c148d 100644 --- a/internal/commands/config_run_image_mirrors_test.go +++ b/internal/commands/config_run_image_mirrors_test.go @@ -3,7 +3,6 @@ package commands_test import ( "bytes" "fmt" - "io/ioutil" "os" "path/filepath" "strings" @@ -56,7 +55,7 @@ func testConfigRunImageMirrorsCommand(t *testing.T, when spec.G, it spec.S) { it.Before(func() { var err error logger = logging.NewLogWithWriters(&outBuf, &outBuf) - tempPackHome, err = ioutil.TempDir("", "pack-home") + tempPackHome, err = os.MkdirTemp("", "pack-home") h.AssertNil(t, err) configPath = filepath.Join(tempPackHome, "config.toml") @@ -104,7 +103,7 @@ func testConfigRunImageMirrorsCommand(t *testing.T, when spec.G, it spec.S) { when("config path doesn't exist", func() { it("fails to run", func() { fakePath := filepath.Join(tempPackHome, "not-exist.toml") - h.AssertNil(t, ioutil.WriteFile(fakePath, []byte("something"), 0001)) + h.AssertNil(t, os.WriteFile(fakePath, []byte("something"), 0001)) cmd = commands.ConfigRunImagesMirrors(logger, config.Config{}, fakePath) cmd.SetArgs([]string{"add", runImage, "-m", testMirror1}) @@ -156,7 +155,7 @@ func testConfigRunImageMirrorsCommand(t *testing.T, when spec.G, it spec.S) { when("config path doesn't exist", func() { it("fails to run", func() { fakePath := filepath.Join(tempPackHome, "not-exist.toml") - h.AssertNil(t, ioutil.WriteFile(fakePath, []byte("something"), 0001)) + h.AssertNil(t, os.WriteFile(fakePath, []byte("something"), 0001)) cmd = commands.ConfigRunImagesMirrors(logger, testCfg, fakePath) cmd.SetArgs([]string{"remove", runImage, "-m", testMirror1}) diff --git a/internal/commands/config_test.go b/internal/commands/config_test.go index 1f4ce9dc23..15f1bae8ad 100644 --- a/internal/commands/config_test.go +++ b/internal/commands/config_test.go @@ -2,7 +2,6 @@ package commands_test import ( "bytes" - "io/ioutil" "os" "path/filepath" "testing" @@ -43,7 +42,7 @@ func testConfigCommand(t *testing.T, when spec.G, it spec.S) { mockClient = testmocks.NewMockPackClient(mockController) logger = logging.NewLogWithWriters(&outBuf, &outBuf) - tempPackHome, err = ioutil.TempDir("", "pack-home") + tempPackHome, err = os.MkdirTemp("", "pack-home") h.AssertNil(t, err) configPath = filepath.Join(tempPackHome, "config.toml") diff --git a/internal/commands/config_trusted_builder.go b/internal/commands/config_trusted_builder.go index d281a4fde4..36bcfb6018 100644 --- a/internal/commands/config_trusted_builder.go +++ b/internal/commands/config_trusted_builder.go @@ -18,7 +18,7 @@ func ConfigTrustedBuilder(logger logging.Logger, cfg config.Config, cfgPath stri Short: "List, add and remove trusted builders", Long: "When pack considers a builder to be trusted, `pack build` operations will use a single lifecycle binary " + "called the creator. This is more efficient than using an untrusted builder, where pack will execute " + - "five separate lifecycle binaries: detect, analyze, restore, build and export.\n\n" + + "five separate lifecycle binaries, each in its own container: analyze, detect, restore, build and export.\n\n" + "For more on trusted builders, and when to trust or untrust a builder, " + "check out our docs here: https://buildpacks.io/docs/tools/pack/concepts/trusted_builders/", Aliases: []string{"trusted-builder", "trust-builder", "trust-builders"}, @@ -51,7 +51,11 @@ func addTrustedBuilder(args []string, logger logging.Logger, cfg config.Config, imageName := args[0] builderToTrust := config.TrustedBuilder{Name: imageName} - if isTrustedBuilder(cfg, imageName) { + isTrusted, err := bldr.IsTrustedBuilder(cfg, imageName) + if err != nil { + return err + } + if isTrusted || bldr.IsKnownTrustedBuilder(imageName) { logger.Infof("Builder %s is already trusted", style.Symbol(imageName)) return nil } @@ -80,9 +84,9 @@ func removeTrustedBuilder(args []string, logger logging.Logger, cfg config.Confi // Builder is not in the trusted builder list if len(existingTrustedBuilders) == len(cfg.TrustedBuilders) { - if isSuggestedBuilder(builder) { - // Attempted to untrust a suggested builder - return errors.Errorf("Builder %s is a suggested builder, and is trusted by default. Currently pack doesn't support making these builders untrusted", style.Symbol(builder)) + if bldr.IsKnownTrustedBuilder(builder) { + // Attempted to untrust a known trusted builder + return errors.Errorf("Builder %s is a known trusted builder. Currently pack doesn't support making these builders untrusted", style.Symbol(builder)) } logger.Infof("Builder %s wasn't trusted", style.Symbol(builder)) @@ -98,12 +102,12 @@ func removeTrustedBuilder(args []string, logger logging.Logger, cfg config.Confi return nil } -func listTrustedBuilders(args []string, logger logging.Logger, cfg config.Config) { - logger.Info("Trusted Builders:") - +func getTrustedBuilders(cfg config.Config) []string { var trustedBuilders []string - for _, builder := range bldr.SuggestedBuilders { - trustedBuilders = append(trustedBuilders, builder.Image) + for _, knownBuilder := range bldr.KnownBuilders { + if knownBuilder.Trusted { + trustedBuilders = append(trustedBuilders, knownBuilder.Image) + } } for _, builder := range cfg.TrustedBuilders { @@ -111,7 +115,13 @@ func listTrustedBuilders(args []string, logger logging.Logger, cfg config.Config } sort.Strings(trustedBuilders) + return trustedBuilders +} + +func listTrustedBuilders(args []string, logger logging.Logger, cfg config.Config) { + logger.Info("Trusted Builders:") + trustedBuilders := getTrustedBuilders(cfg) for _, builder := range trustedBuilders { logger.Infof(" %s", builder) } diff --git a/internal/commands/config_trusted_builder_test.go b/internal/commands/config_trusted_builder_test.go index dc589db979..2def80e7e5 100644 --- a/internal/commands/config_trusted_builder_test.go +++ b/internal/commands/config_trusted_builder_test.go @@ -3,7 +3,6 @@ package commands_test import ( "bytes" "fmt" - "io/ioutil" "os" "path/filepath" "testing" @@ -39,7 +38,7 @@ func testTrustedBuilderCommand(t *testing.T, when spec.G, it spec.S) { var err error logger = logging.NewLogWithWriters(&outBuf, &outBuf) - tempPackHome, err = ioutil.TempDir("", "pack-home") + tempPackHome, err = os.MkdirTemp("", "pack-home") h.AssertNil(t, err) configPath = filepath.Join(tempPackHome, "config.toml") @@ -57,11 +56,13 @@ func testTrustedBuilderCommand(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, command.Execute()) h.AssertContainsAllInOrder(t, outBuf, - "gcr.io/buildpacks/builder:v1", - "heroku/buildpacks:20", - "paketobuildpacks/builder:base", - "paketobuildpacks/builder:full", - "paketobuildpacks/builder:tiny", + "gcr.io/buildpacks/builder:google-22", + "heroku/builder:20", + "heroku/builder:22", + "heroku/builder:24", + "paketobuildpacks/builder-jammy-base", + "paketobuildpacks/builder-jammy-full", + "paketobuildpacks/builder-jammy-tiny", ) }) @@ -70,11 +71,13 @@ func testTrustedBuilderCommand(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, command.Execute()) h.AssertContainsAllInOrder(t, outBuf, - "gcr.io/buildpacks/builder:v1", - "heroku/buildpacks:20", - "paketobuildpacks/builder:base", - "paketobuildpacks/builder:full", - "paketobuildpacks/builder:tiny", + "gcr.io/buildpacks/builder:google-22", + "heroku/builder:20", + "heroku/builder:22", + "heroku/builder:24", + "paketobuildpacks/builder-jammy-base", + "paketobuildpacks/builder-jammy-full", + "paketobuildpacks/builder-jammy-tiny", ) }) }) @@ -90,11 +93,13 @@ func testTrustedBuilderCommand(t *testing.T, when spec.G, it spec.S) { h.AssertNotContains(t, outBuf.String(), builderName) h.AssertContainsAllInOrder(t, outBuf, - "gcr.io/buildpacks/builder:v1", - "heroku/buildpacks:20", - "paketobuildpacks/builder:base", - "paketobuildpacks/builder:full", - "paketobuildpacks/builder:tiny", + "gcr.io/buildpacks/builder:google-22", + "heroku/builder:20", + "heroku/builder:22", + "heroku/builder:24", + "paketobuildpacks/builder-jammy-base", + "paketobuildpacks/builder-jammy-full", + "paketobuildpacks/builder-jammy-tiny", ) outBuf.Reset() @@ -105,12 +110,14 @@ func testTrustedBuilderCommand(t *testing.T, when spec.G, it spec.S) { h.AssertContainsAllInOrder(t, outBuf, - "gcr.io/buildpacks/builder:v1", + "gcr.io/buildpacks/builder:google-22", builderName, - "heroku/buildpacks:20", - "paketobuildpacks/builder:base", - "paketobuildpacks/builder:full", - "paketobuildpacks/builder:tiny", + "heroku/builder:20", + "heroku/builder:22", + "heroku/builder:24", + "paketobuildpacks/builder-jammy-base", + "paketobuildpacks/builder-jammy-full", + "paketobuildpacks/builder-jammy-tiny", ) }) }) @@ -127,7 +134,7 @@ func testTrustedBuilderCommand(t *testing.T, when spec.G, it spec.S) { when("can't write to config path", func() { it("fails", func() { tempPath := filepath.Join(tempPackHome, "non-existent-file.toml") - h.AssertNil(t, ioutil.WriteFile(tempPath, []byte("something"), 0111)) + h.AssertNil(t, os.WriteFile(tempPath, []byte("something"), 0111)) command = commands.ConfigTrustedBuilder(logger, config.Config{}, tempPath) command.SetOut(logging.GetWriterForLevel(logger, logging.InfoLevel)) command.SetArgs(append(args, "some-builder")) @@ -141,7 +148,7 @@ func testTrustedBuilderCommand(t *testing.T, when spec.G, it spec.S) { command.SetArgs(append(args, "some-builder")) h.AssertNil(t, command.Execute()) - b, err := ioutil.ReadFile(configPath) + b, err := os.ReadFile(configPath) h.AssertNil(t, err) h.AssertContains(t, string(b), `[[trusted-builders]] name = "some-builder"`) @@ -152,13 +159,13 @@ func testTrustedBuilderCommand(t *testing.T, when spec.G, it spec.S) { it("does nothing", func() { command.SetArgs(append(args, "some-already-trusted-builder")) h.AssertNil(t, command.Execute()) - oldContents, err := ioutil.ReadFile(configPath) + oldContents, err := os.ReadFile(configPath) h.AssertNil(t, err) command.SetArgs(append(args, "some-already-trusted-builder")) h.AssertNil(t, command.Execute()) - newContents, err := ioutil.ReadFile(configPath) + newContents, err := os.ReadFile(configPath) h.AssertNil(t, err) h.AssertEq(t, newContents, oldContents) }) @@ -166,11 +173,11 @@ func testTrustedBuilderCommand(t *testing.T, when spec.G, it spec.S) { when("builder is a suggested builder", func() { it("does nothing", func() { - h.AssertNil(t, ioutil.WriteFile(configPath, []byte(""), os.ModePerm)) + h.AssertNil(t, os.WriteFile(configPath, []byte(""), os.ModePerm)) - command.SetArgs(append(args, "paketobuildpacks/builder:base")) + command.SetArgs(append(args, "paketobuildpacks/builder-jammy-base")) h.AssertNil(t, command.Execute()) - oldContents, err := ioutil.ReadFile(configPath) + oldContents, err := os.ReadFile(configPath) h.AssertNil(t, err) h.AssertEq(t, string(oldContents), "") }) @@ -211,7 +218,7 @@ func testTrustedBuilderCommand(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, command.Execute()) - b, err := ioutil.ReadFile(configPath) + b, err := os.ReadFile(configPath) h.AssertNil(t, err) h.AssertNotContains(t, string(b), builderName) @@ -231,7 +238,7 @@ func testTrustedBuilderCommand(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, command.Execute()) - b, err := ioutil.ReadFile(configPath) + b, err := os.ReadFile(configPath) h.AssertNil(t, err) h.AssertContains(t, string(b), stillTrustedBuilder) h.AssertNotContains(t, string(b), untrustBuilder) @@ -249,7 +256,7 @@ func testTrustedBuilderCommand(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, command.Execute()) - b, err := ioutil.ReadFile(configPath) + b, err := os.ReadFile(configPath) h.AssertNil(t, err) h.AssertContains(t, string(b), stillTrustedBuilder) h.AssertNotContains(t, string(b), neverTrustedBuilder) @@ -263,12 +270,12 @@ func testTrustedBuilderCommand(t *testing.T, when spec.G, it spec.S) { when("builder is a suggested builder", func() { it("does nothing and reports that ", func() { - builder := "paketobuildpacks/builder:base" + builder := "paketobuildpacks/builder-jammy-base" command := commands.ConfigTrustedBuilder(logger, config.Config{}, configPath) command.SetArgs(append(args, builder)) err := command.Execute() - h.AssertError(t, err, fmt.Sprintf("Builder %s is a suggested builder, and is trusted by default", style.Symbol(builder))) + h.AssertError(t, err, fmt.Sprintf("Builder %s is a known trusted builder. Currently pack doesn't support making these builders untrusted", style.Symbol(builder))) }) }) }) diff --git a/internal/commands/create_builder.go b/internal/commands/create_builder.go index a713c32ea1..c5f5f3a28e 100644 --- a/internal/commands/create_builder.go +++ b/internal/commands/create_builder.go @@ -83,7 +83,7 @@ Creating a custom builder allows you to control what buildpacks are used and wha cmd.Flags().MarkHidden("buildpack-registry") } cmd.Flags().StringVarP(&flags.BuilderTomlPath, "config", "c", "", "Path to builder TOML file (required)") - cmd.Flags().BoolVar(&flags.Publish, "publish", false, "Publish to registry") + cmd.Flags().BoolVar(&flags.Publish, "publish", false, "Publish the builder directly to the container registry specified in , instead of the daemon.") cmd.Flags().StringVar(&flags.Policy, "pull-policy", "", "Pull policy to use. Accepted values are always, never, and if-not-present. The default is always") return cmd } diff --git a/internal/commands/create_builder_test.go b/internal/commands/create_builder_test.go index 9701bac94e..025ee959f6 100644 --- a/internal/commands/create_builder_test.go +++ b/internal/commands/create_builder_test.go @@ -2,7 +2,8 @@ package commands_test import ( "bytes" - "io/ioutil" + "errors" + "os" "path/filepath" "testing" @@ -39,7 +40,7 @@ func testCreateBuilderCommand(t *testing.T, when spec.G, it spec.S) { it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "create-builder-test") + tmpDir, err = os.MkdirTemp("", "create-builder-test") h.AssertNil(t, err) builderConfigPath = filepath.Join(tmpDir, "builder.toml") cfg = config.Config{} @@ -56,7 +57,7 @@ func testCreateBuilderCommand(t *testing.T, when spec.G, it spec.S) { when("#CreateBuilder", func() { it("gives deprecation warning", func() { - h.AssertNil(t, ioutil.WriteFile(builderConfigPath, []byte(validConfig), 0666)) + h.AssertNil(t, os.WriteFile(builderConfigPath, []byte(validConfig), 0666)) mockClient.EXPECT().CreateBuilder(gomock.Any(), gomock.Any()).Return(nil) command.SetArgs([]string{ "some/builder", @@ -123,7 +124,7 @@ func testCreateBuilderCommand(t *testing.T, when spec.G, it spec.S) { when("warnings encountered in builder.toml", func() { it.Before(func() { - h.AssertNil(t, ioutil.WriteFile(builderConfigPath, []byte(` + h.AssertNil(t, os.WriteFile(builderConfigPath, []byte(` [[buildpacks]] id = "some.buildpack" `), 0666)) @@ -144,7 +145,7 @@ func testCreateBuilderCommand(t *testing.T, when spec.G, it spec.S) { when("uses --builder-config", func() { it.Before(func() { - h.AssertNil(t, ioutil.WriteFile(builderConfigPath, []byte(validConfig), 0666)) + h.AssertNil(t, os.WriteFile(builderConfigPath, []byte(validConfig), 0666)) }) it("errors with a descriptive message", func() { @@ -164,5 +165,21 @@ func testCreateBuilderCommand(t *testing.T, when spec.G, it spec.S) { h.AssertError(t, command.Execute(), "Please provide a builder config path") }) }) + + when("builder config has extensions but experimental isn't set in the config", func() { + it.Before(func() { + h.AssertNil(t, os.WriteFile(builderConfigPath, []byte(validConfigWithExtensions), 0666)) + }) + + it("errors", func() { + mockClient.EXPECT().CreateBuilder(gomock.Any(), gomock.Any()).Return(errors.New("builder config contains image extensions, but the lifecycle Platform API version (0.12) is older than 0.13; support for image extensions with Platform API < 0.13 is currently experimental")) + + command.SetArgs([]string{ + "some/builder", + "--config", builderConfigPath, + }) + h.AssertError(t, command.Execute(), "support for image extensions with Platform API < 0.13 is currently experimental") + }) + }) }) } diff --git a/internal/commands/download_sbom.go b/internal/commands/download_sbom.go index 06797ba019..1a2860716e 100644 --- a/internal/commands/download_sbom.go +++ b/internal/commands/download_sbom.go @@ -21,7 +21,7 @@ func DownloadSBOM( Use: "download ", Args: cobra.ExactArgs(1), Short: "Download SBoM from specified image", - Long: "Download layer containing Structured Bill of Materials (SBoM) from specified image", + Long: "Download layer containing structured Software Bill of Materials (SBoM) from specified image", Example: "pack sbom download buildpacksio/pack", RunE: logError(logger, func(cmd *cobra.Command, args []string) error { img := args[0] diff --git a/internal/commands/extension.go b/internal/commands/extension.go new file mode 100644 index 0000000000..bc4ab36476 --- /dev/null +++ b/internal/commands/extension.go @@ -0,0 +1,29 @@ +package commands + +import ( + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/internal/config" + "github.com/buildpacks/pack/pkg/logging" +) + +func NewExtensionCommand(logger logging.Logger, cfg config.Config, client PackClient, packageConfigReader PackageConfigReader) *cobra.Command { + cmd := &cobra.Command{ + Use: "extension", + Aliases: []string{"extensions"}, + Short: "Interact with extensions", + RunE: nil, + } + + cmd.AddCommand(ExtensionInspect(logger, cfg, client)) + // client and packageConfigReader to be passed later on + cmd.AddCommand(ExtensionPackage(logger, cfg, client, packageConfigReader)) + // client to be passed later on + cmd.AddCommand(ExtensionNew(logger)) + cmd.AddCommand(ExtensionPull(logger, cfg, client)) + cmd.AddCommand(ExtensionRegister(logger, cfg, client)) + cmd.AddCommand(ExtensionYank(logger, cfg, client)) + + AddHelpFlag(cmd, "extension") + return cmd +} diff --git a/internal/commands/extension_inspect.go b/internal/commands/extension_inspect.go new file mode 100644 index 0000000000..42863f78bf --- /dev/null +++ b/internal/commands/extension_inspect.go @@ -0,0 +1,48 @@ +package commands + +import ( + "fmt" + + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/internal/config" + "github.com/buildpacks/pack/internal/style" + "github.com/buildpacks/pack/pkg/client" + "github.com/buildpacks/pack/pkg/logging" +) + +func ExtensionInspect(logger logging.Logger, cfg config.Config, client PackClient) *cobra.Command { + cmd := &cobra.Command{ + Use: "inspect ", + Args: cobra.ExactArgs(1), + Short: "Show information about an extension", + Example: "pack extension inspect ", + RunE: logError(logger, func(cmd *cobra.Command, args []string) error { + extensionName := args[0] + return extensionInspect(logger, extensionName, cfg, client) + }), + } + AddHelpFlag(cmd, "inspect") + return cmd +} + +func extensionInspect(logger logging.Logger, extensionName string, _ config.Config, pack PackClient) error { + logger.Infof("Inspecting extension: %s\n", style.Symbol(extensionName)) + + inspectedExtensionsOutput, err := inspectAllExtensions( + pack, + client.InspectExtensionOptions{ + ExtensionName: extensionName, + Daemon: true, + }, + client.InspectExtensionOptions{ + ExtensionName: extensionName, + Daemon: false, + }) + if err != nil { + return fmt.Errorf("error writing extension output: %q", err) + } + + logger.Info(inspectedExtensionsOutput) + return nil +} diff --git a/internal/commands/extension_inspect_test.go b/internal/commands/extension_inspect_test.go new file mode 100644 index 0000000000..df2866e46a --- /dev/null +++ b/internal/commands/extension_inspect_test.go @@ -0,0 +1,186 @@ +package commands_test + +import ( + "bytes" + "fmt" + "testing" + + "github.com/golang/mock/gomock" + "github.com/heroku/color" + "github.com/pkg/errors" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/internal/commands" + "github.com/buildpacks/pack/internal/commands/testmocks" + "github.com/buildpacks/pack/internal/config" + "github.com/buildpacks/pack/pkg/buildpack" + "github.com/buildpacks/pack/pkg/client" + "github.com/buildpacks/pack/pkg/dist" + "github.com/buildpacks/pack/pkg/image" + "github.com/buildpacks/pack/pkg/logging" + h "github.com/buildpacks/pack/testhelpers" +) + +const extensionOutputSection = `Extension: + ID NAME VERSION HOMEPAGE + some/single-extension some 0.0.1 single-extension-homepage` + +const inspectExtensionOutputTemplate = `Inspecting extension: '%s' + +%s + +%s +` + +func TestExtensionInspectCommand(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "ExtensionInspectCommand", testExtensionInspectCommand, spec.Sequential(), spec.Report(report.Terminal{})) +} + +func testExtensionInspectCommand(t *testing.T, when spec.G, it spec.S) { + var ( + command *cobra.Command + logger logging.Logger + outBuf bytes.Buffer + mockController *gomock.Controller + mockClient *testmocks.MockPackClient + cfg config.Config + info *client.ExtensionInfo + assert = h.NewAssertionManager(t) + ) + + it.Before(func() { + mockController = gomock.NewController(t) + mockClient = testmocks.NewMockPackClient(mockController) + logger = logging.NewLogWithWriters(&outBuf, &outBuf) + + info = &client.ExtensionInfo{ + Extension: dist.ModuleInfo{ + ID: "some/single-extension", + Version: "0.0.1", + Name: "some", + Homepage: "single-extension-homepage", + }, + } + + command = commands.ExtensionInspect(logger, cfg, mockClient) + }) + + when("ExtensionInspect", func() { + when("inspecting an image", func() { + when("both remote and local image are present", func() { + it.Before(func() { + info.Location = buildpack.PackageLocator + + mockClient.EXPECT().InspectExtension(client.InspectExtensionOptions{ + ExtensionName: "test/extension", + Daemon: true, + }).Return(info, nil) + + mockClient.EXPECT().InspectExtension(client.InspectExtensionOptions{ + ExtensionName: "test/extension", + Daemon: false, + }).Return(info, nil) + }) + + it("succeeds", func() { + command.SetArgs([]string{"test/extension"}) + assert.Nil(command.Execute()) + + localOutputSection := fmt.Sprintf(inspectExtensionOutputTemplate, + "test/extension", + "LOCAL IMAGE:", + extensionOutputSection) + + remoteOutputSection := fmt.Sprintf("%s\n\n%s", + "REMOTE IMAGE:", + extensionOutputSection) + + assert.AssertTrimmedContains(outBuf.String(), localOutputSection) + assert.AssertTrimmedContains(outBuf.String(), remoteOutputSection) + }) + }) + + when("only a local image is present", func() { + it.Before(func() { + info.Location = buildpack.PackageLocator + + mockClient.EXPECT().InspectExtension(client.InspectExtensionOptions{ + ExtensionName: "only-local-test/extension", + Daemon: true, + }).Return(info, nil) + + mockClient.EXPECT().InspectExtension(client.InspectExtensionOptions{ + ExtensionName: "only-local-test/extension", + Daemon: false, + }).Return(nil, errors.Wrap(image.ErrNotFound, "remote image not found!")) + }) + + it("displays output for local image", func() { + command.SetArgs([]string{"only-local-test/extension"}) + assert.Nil(command.Execute()) + + expectedOutput := fmt.Sprintf(inspectExtensionOutputTemplate, + "only-local-test/extension", + "LOCAL IMAGE:", + extensionOutputSection) + + assert.AssertTrimmedContains(outBuf.String(), expectedOutput) + }) + }) + + when("only a remote image is present", func() { + it.Before(func() { + info.Location = buildpack.PackageLocator + + mockClient.EXPECT().InspectExtension(client.InspectExtensionOptions{ + ExtensionName: "only-remote-test/extension", + Daemon: false, + }).Return(info, nil) + + mockClient.EXPECT().InspectExtension(client.InspectExtensionOptions{ + ExtensionName: "only-remote-test/extension", + Daemon: true, + }).Return(nil, errors.Wrap(image.ErrNotFound, "local image not found!")) + }) + + it("displays output for remote image", func() { + command.SetArgs([]string{"only-remote-test/extension"}) + assert.Nil(command.Execute()) + + expectedOutput := fmt.Sprintf(inspectExtensionOutputTemplate, + "only-remote-test/extension", + "REMOTE IMAGE:", + extensionOutputSection) + + assert.AssertTrimmedContains(outBuf.String(), expectedOutput) + }) + }) + }) + }) + + when("failure cases", func() { + when("unable to inspect extension image", func() { + it.Before(func() { + mockClient.EXPECT().InspectExtension(client.InspectExtensionOptions{ + ExtensionName: "failure-case/extension", + Daemon: true, + }).Return(&client.ExtensionInfo{}, errors.Wrap(image.ErrNotFound, "unable to inspect local failure-case/extension")) + + mockClient.EXPECT().InspectExtension(client.InspectExtensionOptions{ + ExtensionName: "failure-case/extension", + Daemon: false, + }).Return(&client.ExtensionInfo{}, errors.Wrap(image.ErrNotFound, "unable to inspect remote failure-case/extension")) + }) + + it("errors", func() { + command.SetArgs([]string{"failure-case/extension"}) + err := command.Execute() + assert.Error(err) + }) + }) + }) +} diff --git a/internal/commands/extension_new.go b/internal/commands/extension_new.go new file mode 100644 index 0000000000..d91e424cb0 --- /dev/null +++ b/internal/commands/extension_new.go @@ -0,0 +1,36 @@ +package commands + +import ( + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/pkg/logging" +) + +// ExtensionNewFlags define flags provided to the ExtensionNew command +type ExtensionNewFlags struct { + API string + Path string + Stacks []string + Version string +} + +// extensioncreator type to be added here and argument also to be added in the function + +// ExtensionNew generates the scaffolding of an extension +func ExtensionNew(logger logging.Logger) *cobra.Command { + cmd := &cobra.Command{ + Use: "new ", + Short: "Creates basic scaffolding of an extension", + Args: cobra.MatchAll(cobra.ExactArgs(1), cobra.OnlyValidArgs), + Example: "pack extension new ", + RunE: logError(logger, func(cmd *cobra.Command, args []string) error { + // logic will go here + return nil + }), + } + + // flags will go here + + AddHelpFlag(cmd, "new") + return cmd +} diff --git a/internal/commands/extension_package.go b/internal/commands/extension_package.go new file mode 100644 index 0000000000..57395c9f1f --- /dev/null +++ b/internal/commands/extension_package.go @@ -0,0 +1,184 @@ +package commands + +import ( + "context" + "os" + "path/filepath" + + "github.com/pkg/errors" + "github.com/spf13/cobra" + + pubbldpkg "github.com/buildpacks/pack/buildpackage" + "github.com/buildpacks/pack/internal/config" + "github.com/buildpacks/pack/internal/style" + "github.com/buildpacks/pack/pkg/client" + "github.com/buildpacks/pack/pkg/dist" + "github.com/buildpacks/pack/pkg/image" + "github.com/buildpacks/pack/pkg/logging" +) + +// ExtensionPackageFlags define flags provided to the ExtensionPackage command +type ExtensionPackageFlags struct { + PackageTomlPath string + Format string + Targets []string + Publish bool + Policy string + Path string + AdditionalTags []string +} + +// ExtensionPackager packages extensions +type ExtensionPackager interface { + PackageExtension(ctx context.Context, options client.PackageBuildpackOptions) error +} + +// ExtensionPackage packages (a) extension(s) into OCI format, based on a package config +func ExtensionPackage(logger logging.Logger, cfg config.Config, packager ExtensionPackager, packageConfigReader PackageConfigReader) *cobra.Command { + var flags ExtensionPackageFlags + cmd := &cobra.Command{ + Use: "package --config ", + Short: "Package an extension in OCI format", + Args: cobra.MatchAll(cobra.ExactArgs(1), cobra.OnlyValidArgs), + Example: "pack extension package /output/file.cnb --path /extracted/from/tgz/folder --format file\npack extension package registry/image-name --path /extracted/from/tgz/folder --format image --publish", + Long: "extension package allows users to package (an) extension(s) into OCI format, which can then to be hosted in " + + "image repositories or persisted on disk as a '.cnb' file." + + "Packaged extensions can be used as inputs to `pack build` (using the `--extension` flag), " + + "and they can be included in the configs used in `pack builder create` and `pack extension package`. For more " + + "on how to package an extension, see: https://buildpacks.io/docs/buildpack-author-guide/package-a-buildpack/.", + RunE: logError(logger, func(cmd *cobra.Command, args []string) error { + if err := validateExtensionPackageFlags(&flags); err != nil { + return err + } + + stringPolicy := flags.Policy + if stringPolicy == "" { + stringPolicy = cfg.PullPolicy + } + + pullPolicy, err := image.ParsePullPolicy(stringPolicy) + if err != nil { + return errors.Wrap(err, "parsing pull policy") + } + + exPackageCfg := pubbldpkg.DefaultExtensionConfig() + var exPath string + if flags.Path != "" { + if exPath, err = filepath.Abs(flags.Path); err != nil { + return errors.Wrap(err, "resolving extension path") + } + exPackageCfg.Extension.URI = exPath + } + relativeBaseDir := "" + if flags.PackageTomlPath != "" { + exPackageCfg, err = packageConfigReader.Read(flags.PackageTomlPath) + if err != nil { + return errors.Wrap(err, "reading config") + } + + relativeBaseDir, err = filepath.Abs(filepath.Dir(flags.PackageTomlPath)) + if err != nil { + return errors.Wrap(err, "getting absolute path for config") + } + } + name := args[0] + if flags.Format == client.FormatFile { + switch ext := filepath.Ext(name); ext { + case client.CNBExtension: + case "": + name += client.CNBExtension + default: + logger.Warnf("%s is not a valid extension for a packaged extension. Packaged extensions must have a %s extension", style.Symbol(ext), style.Symbol(client.CNBExtension)) + } + } + + targets, err := processExtensionPackageTargets(flags.Path, packageConfigReader, exPackageCfg) + if err != nil { + return err + } + + daemon := !flags.Publish && flags.Format == "" + multiArchCfg, err := processMultiArchitectureConfig(logger, flags.Targets, targets, daemon) + if err != nil { + return err + } + + if len(multiArchCfg.Targets()) == 0 { + logger.Infof("Pro tip: use --target flag OR [[targets]] in buildpack.toml to specify the desired platform (os/arch/variant); using os %s", style.Symbol(exPackageCfg.Platform.OS)) + } else { + // FIXME: Check if we can copy the config files during layers creation. + filesToClean, err := multiArchCfg.CopyConfigFiles(exPath, "extension") + if err != nil { + return err + } + defer clean(filesToClean) + } + + if err := packager.PackageExtension(cmd.Context(), client.PackageBuildpackOptions{ + RelativeBaseDir: relativeBaseDir, + Name: name, + Format: flags.Format, + Config: exPackageCfg, + Publish: flags.Publish, + PullPolicy: pullPolicy, + Targets: multiArchCfg.Targets(), + AdditionalTags: flags.AdditionalTags, + }); err != nil { + return err + } + + action := "created" + location := "docker daemon" + if flags.Publish { + action = "published" + location = "registry" + } + if flags.Format == client.FormatFile { + location = "file" + } + logger.Infof("Successfully %s package %s and saved to %s", action, style.Symbol(name), location) + return nil + }), + } + + // flags will be added here + cmd.Flags().StringVarP(&flags.PackageTomlPath, "config", "c", "", "Path to package TOML config") + cmd.Flags().StringVarP(&flags.Format, "format", "f", "", `Format to save package as ("image" or "file")`) + cmd.Flags().BoolVar(&flags.Publish, "publish", false, `Publish the extension directly to the container registry specified in , instead of the daemon (applies to "--format=image" only).`) + cmd.Flags().StringVar(&flags.Policy, "pull-policy", "", "Pull policy to use. Accepted values are always, never, and if-not-present. The default is always") + cmd.Flags().StringVarP(&flags.Path, "path", "p", "", "Path to the Extension that needs to be packaged") + cmd.Flags().StringSliceVarP(&flags.Targets, "target", "t", nil, + `Target platforms to build for. +Targets should be in the format '[os][/arch][/variant]:[distroname@osversion@anotherversion];[distroname@osversion]'. +- To specify two different architectures: '--target "linux/amd64" --target "linux/arm64"' +- To specify the distribution version: '--target "linux/arm/v6:ubuntu@14.04"' +- To specify multiple distribution versions: '--target "linux/arm/v6:ubuntu@14.04" --target "linux/arm/v6:ubuntu@16.04"' + `) + cmd.Flags().StringSliceVarP(&flags.AdditionalTags, "tag", "", nil, "Additional tags to push the output image to.\nTags should be in the format 'image:tag' or 'repository/image:tag'."+stringSliceHelp("tag")) + AddHelpFlag(cmd, "package") + return cmd +} + +func validateExtensionPackageFlags(p *ExtensionPackageFlags) error { + if p.Publish && p.Policy == image.PullNever.String() { + return errors.Errorf("--publish and --pull-policy=never cannot be used together. The --publish flag requires the use of remote images.") + } + return nil +} + +// processExtensionPackageTargets returns the list of targets defined on the extension.toml +func processExtensionPackageTargets(path string, packageConfigReader PackageConfigReader, bpPackageCfg pubbldpkg.Config) ([]dist.Target, error) { + var targets []dist.Target + + // Read targets from extension.toml + pathToExtensionToml := filepath.Join(path, "extension.toml") + if _, err := os.Stat(pathToExtensionToml); err == nil { + buildpackCfg, err := packageConfigReader.ReadBuildpackDescriptor(pathToExtensionToml) + if err != nil { + return nil, err + } + targets = buildpackCfg.Targets() + } + + return targets, nil +} diff --git a/internal/commands/extension_package_test.go b/internal/commands/extension_package_test.go new file mode 100644 index 0000000000..241c4dfc08 --- /dev/null +++ b/internal/commands/extension_package_test.go @@ -0,0 +1,387 @@ +package commands_test + +import ( + "bytes" + "fmt" + "path/filepath" + "testing" + + "github.com/heroku/color" + "github.com/pkg/errors" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + "github.com/spf13/cobra" + + pubbldpkg "github.com/buildpacks/pack/buildpackage" + "github.com/buildpacks/pack/internal/commands" + "github.com/buildpacks/pack/internal/commands/fakes" + "github.com/buildpacks/pack/internal/config" + "github.com/buildpacks/pack/pkg/dist" + "github.com/buildpacks/pack/pkg/image" + "github.com/buildpacks/pack/pkg/logging" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestExtensionPackageCommand(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "ExtensionPackageCommand", testExtensionPackageCommand, spec.Parallel(), spec.Report(report.Terminal{})) +} + +func testExtensionPackageCommand(t *testing.T, when spec.G, it spec.S) { + var ( + logger *logging.LogWithWriters + outBuf bytes.Buffer + ) + + it.Before(func() { + logger = logging.NewLogWithWriters(&outBuf, &outBuf) + }) + + when("Package#Execute", func() { + var fakeExtensionPackager *fakes.FakeBuildpackPackager + + it.Before(func() { + fakeExtensionPackager = &fakes.FakeBuildpackPackager{} + }) + + when("valid package config", func() { + it("reads package config from the configured path", func() { + fakePackageConfigReader := fakes.NewFakePackageConfigReader() + expectedPackageConfigPath := "/path/to/some/file" + + cmd := packageExtensionCommand( + withExtensionPackageConfigReader(fakePackageConfigReader), + withExtensionPackageConfigPath(expectedPackageConfigPath), + ) + err := cmd.Execute() + h.AssertNil(t, err) + + h.AssertEq(t, fakePackageConfigReader.ReadCalledWithArg, expectedPackageConfigPath) + }) + + it("creates package with correct image name", func() { + cmd := packageExtensionCommand( + withExtensionImageName("my-specific-image"), + withExtensionPackager(fakeExtensionPackager), + ) + err := cmd.Execute() + h.AssertNil(t, err) + + receivedOptions := fakeExtensionPackager.CreateCalledWithOptions + h.AssertEq(t, receivedOptions.Name, "my-specific-image") + }) + + it("creates package with config returned by the reader", func() { + myConfig := pubbldpkg.Config{ + Extension: dist.BuildpackURI{URI: "test"}, + } + + cmd := packageExtensionCommand( + withExtensionPackager(fakeExtensionPackager), + withExtensionPackageConfigReader(fakes.NewFakePackageConfigReader(whereReadReturns(myConfig, nil))), + ) + err := cmd.Execute() + h.AssertNil(t, err) + + receivedOptions := fakeExtensionPackager.CreateCalledWithOptions + h.AssertEq(t, receivedOptions.Config, myConfig) + }) + + when("file format", func() { + when("extension is .cnb", func() { + it("does not modify the name", func() { + cmd := packageExtensionCommand(withExtensionPackager(fakeExtensionPackager)) + cmd.SetArgs([]string{"test.cnb", "-f", "file"}) + h.AssertNil(t, cmd.Execute()) + + receivedOptions := fakeExtensionPackager.CreateCalledWithOptions + h.AssertEq(t, receivedOptions.Name, "test.cnb") + }) + }) + when("extension is empty", func() { + it("appends .cnb to the name", func() { + cmd := packageExtensionCommand(withExtensionPackager(fakeExtensionPackager)) + cmd.SetArgs([]string{"test", "-f", "file"}) + h.AssertNil(t, cmd.Execute()) + + receivedOptions := fakeExtensionPackager.CreateCalledWithOptions + h.AssertEq(t, receivedOptions.Name, "test.cnb") + }) + }) + when("extension is something other than .cnb", func() { + it("does not modify the name but shows a warning", func() { + cmd := packageExtensionCommand(withExtensionPackager(fakeExtensionPackager), withExtensionLogger(logger)) + cmd.SetArgs([]string{"test.tar.gz", "-f", "file"}) + h.AssertNil(t, cmd.Execute()) + + receivedOptions := fakeExtensionPackager.CreateCalledWithOptions + h.AssertEq(t, receivedOptions.Name, "test.tar.gz") + h.AssertContains(t, outBuf.String(), "'.gz' is not a valid extension for a packaged extension. Packaged extensions must have a '.cnb' extension") + }) + }) + }) + + when("pull-policy", func() { + var pullPolicyArgs = []string{ + "some-image-name", + "--config", "/path/to/some/file", + "--pull-policy", + } + + it("pull-policy=never sets policy", func() { + cmd := packageExtensionCommand(withExtensionPackager(fakeExtensionPackager)) + cmd.SetArgs(append(pullPolicyArgs, "never")) + h.AssertNil(t, cmd.Execute()) + + receivedOptions := fakeExtensionPackager.CreateCalledWithOptions + h.AssertEq(t, receivedOptions.PullPolicy, image.PullNever) + }) + + it("pull-policy=always sets policy", func() { + cmd := packageExtensionCommand(withExtensionPackager(fakeExtensionPackager)) + cmd.SetArgs(append(pullPolicyArgs, "always")) + h.AssertNil(t, cmd.Execute()) + + receivedOptions := fakeExtensionPackager.CreateCalledWithOptions + h.AssertEq(t, receivedOptions.PullPolicy, image.PullAlways) + }) + }) + when("no --pull-policy", func() { + var pullPolicyArgs = []string{ + "some-image-name", + "--config", "/path/to/some/file", + } + + it("uses the default policy when no policy configured", func() { + cmd := packageExtensionCommand(withExtensionPackager(fakeExtensionPackager)) + cmd.SetArgs(pullPolicyArgs) + h.AssertNil(t, cmd.Execute()) + + receivedOptions := fakeExtensionPackager.CreateCalledWithOptions + h.AssertEq(t, receivedOptions.PullPolicy, image.PullAlways) + }) + it("uses the configured pull policy when policy configured", func() { + cmd := packageExtensionCommand( + withExtensionPackager(fakeExtensionPackager), + withExtensionClientConfig(config.Config{PullPolicy: "never"}), + ) + + cmd.SetArgs([]string{ + "some-image-name", + "--config", "/path/to/some/file", + }) + + err := cmd.Execute() + h.AssertNil(t, err) + + receivedOptions := fakeExtensionPackager.CreateCalledWithOptions + h.AssertEq(t, receivedOptions.PullPolicy, image.PullNever) + }) + }) + }) + + when("no config path is specified", func() { + when("no path is specified", func() { + it("creates a default config with the uri set to the current working directory", func() { + cmd := packageExtensionCommand(withExtensionPackager(fakeExtensionPackager)) + cmd.SetArgs([]string{"some-name"}) + h.AssertNil(t, cmd.Execute()) + + receivedOptions := fakeExtensionPackager.CreateCalledWithOptions + h.AssertEq(t, receivedOptions.Config.Extension.URI, ".") + }) + }) + }) + + when("a path is specified", func() { + when("no multi-platform", func() { + it("creates a default config with the appropriate path", func() { + cmd := packageExtensionCommand(withExtensionPackager(fakeExtensionPackager)) + cmd.SetArgs([]string{"some-name", "-p", ".."}) + h.AssertNil(t, cmd.Execute()) + bpPath, _ := filepath.Abs("..") + receivedOptions := fakeExtensionPackager.CreateCalledWithOptions + h.AssertEq(t, receivedOptions.Config.Extension.URI, bpPath) + }) + }) + + when("multi-platform", func() { + var targets []dist.Target + + when("single extension", func() { + it.Before(func() { + targets = []dist.Target{ + {OS: "linux", Arch: "amd64"}, + {OS: "windows", Arch: "amd64"}, + } + }) + + it("creates a multi-platform extension package", func() { + cmd := packageExtensionCommand(withExtensionPackager(fakeExtensionPackager)) + cmd.SetArgs([]string{"some-name", "-p", "some-path", "--target", "linux/amd64", "--target", "windows/amd64", "--format", "image", "--publish"}) + h.AssertNil(t, cmd.Execute()) + h.AssertEq(t, fakeExtensionPackager.CreateCalledWithOptions.Targets, targets) + }) + }) + }) + }) + + when("additional tags are specified", func() { + it("forwards additional tags to PackageExtension", func() { + expectedTags := []string{"additional-tag-1", "additional-tag-2"} + cmd := packageExtensionCommand( + withExtensionPackager(fakeExtensionPackager), + ) + cmd.SetArgs([]string{ + "my-specific-image", + "--tag", expectedTags[0], "--tag", expectedTags[1], + }) + err := cmd.Execute() + h.AssertNil(t, err) + + receivedOptions := fakeExtensionPackager.CreateCalledWithOptions + h.AssertEq(t, receivedOptions.AdditionalTags[0], expectedTags[0]) + h.AssertEq(t, receivedOptions.AdditionalTags[1], expectedTags[1]) + }) + }) + }) + + when("invalid flags", func() { + when("both --publish and --pull-policy never flags are specified", func() { + it("errors with a descriptive message", func() { + cmd := packageExtensionCommand() + cmd.SetArgs([]string{ + "some-image-name", "--config", "/path/to/some/file", + "--publish", + "--pull-policy", "never", + }) + + err := cmd.Execute() + h.AssertNotNil(t, err) + h.AssertError(t, err, "--publish and --pull-policy=never cannot be used together. The --publish flag requires the use of remote images.") + }) + }) + + it("logs an error and exits when package toml is invalid", func() { + expectedErr := errors.New("it went wrong") + + cmd := packageExtensionCommand( + withExtensionLogger(logger), + withExtensionPackageConfigReader( + fakes.NewFakePackageConfigReader(whereReadReturns(pubbldpkg.Config{}, expectedErr)), + ), + ) + + err := cmd.Execute() + h.AssertNotNil(t, err) + + h.AssertContains(t, outBuf.String(), fmt.Sprintf("ERROR: reading config: %s", expectedErr)) + }) + + when("package-config is specified", func() { + it("errors with a descriptive message", func() { + cmd := packageExtensionCommand() + cmd.SetArgs([]string{"some-name", "--package-config", "some-path"}) + + err := cmd.Execute() + h.AssertError(t, err, "unknown flag: --package-config") + }) + }) + + when("--pull-policy unknown-policy", func() { + it("fails to run", func() { + cmd := packageExtensionCommand() + cmd.SetArgs([]string{ + "some-image-name", + "--config", "/path/to/some/file", + "--pull-policy", + "unknown-policy", + }) + + h.AssertError(t, cmd.Execute(), "parsing pull policy") + }) + }) + + when("--target cannot be parsed", func() { + it("errors with a descriptive message", func() { + cmd := packageCommand() + cmd.SetArgs([]string{ + "some-image-name", "--config", "/path/to/some/file", + "--target", "something/wrong", "--publish", + }) + + err := cmd.Execute() + h.AssertNotNil(t, err) + h.AssertError(t, err, "unknown target: 'something/wrong'") + }) + }) + }) +} + +type packageExtensionCommandConfig struct { + logger *logging.LogWithWriters + packageConfigReader *fakes.FakePackageConfigReader + extensionPackager *fakes.FakeBuildpackPackager + clientConfig config.Config + imageName string + configPath string +} + +type packageExtensionCommandOption func(config *packageExtensionCommandConfig) + +func packageExtensionCommand(ops ...packageExtensionCommandOption) *cobra.Command { + config := &packageExtensionCommandConfig{ + logger: logging.NewLogWithWriters(&bytes.Buffer{}, &bytes.Buffer{}), + packageConfigReader: fakes.NewFakePackageConfigReader(), + extensionPackager: &fakes.FakeBuildpackPackager{}, + clientConfig: config.Config{}, + imageName: "some-image-name", + configPath: "/path/to/some/file", + } + + for _, op := range ops { + op(config) + } + + cmd := commands.ExtensionPackage(config.logger, config.clientConfig, config.extensionPackager, config.packageConfigReader) + cmd.SetArgs([]string{config.imageName, "--config", config.configPath}) + + return cmd +} + +func withExtensionLogger(logger *logging.LogWithWriters) packageExtensionCommandOption { + return func(config *packageExtensionCommandConfig) { + config.logger = logger + } +} + +func withExtensionPackageConfigReader(reader *fakes.FakePackageConfigReader) packageExtensionCommandOption { + return func(config *packageExtensionCommandConfig) { + config.packageConfigReader = reader + } +} + +func withExtensionPackager(creator *fakes.FakeBuildpackPackager) packageExtensionCommandOption { + return func(config *packageExtensionCommandConfig) { + config.extensionPackager = creator + } +} + +func withExtensionImageName(name string) packageExtensionCommandOption { + return func(config *packageExtensionCommandConfig) { + config.imageName = name + } +} + +func withExtensionPackageConfigPath(path string) packageExtensionCommandOption { + return func(config *packageExtensionCommandConfig) { + config.configPath = path + } +} + +func withExtensionClientConfig(clientCfg config.Config) packageExtensionCommandOption { + return func(config *packageExtensionCommandConfig) { + config.clientConfig = clientCfg + } +} diff --git a/internal/commands/extension_pull.go b/internal/commands/extension_pull.go new file mode 100644 index 0000000000..cbfe5d91b1 --- /dev/null +++ b/internal/commands/extension_pull.go @@ -0,0 +1,31 @@ +package commands + +import ( + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/internal/config" + "github.com/buildpacks/pack/pkg/logging" +) + +// ExtensionPullFlags consist of flags applicable to the `extension pull` command +type ExtensionPullFlags struct { + // ExtensionRegistry is the name of the extension registry to use to search for + ExtensionRegistry string +} + +// ExtensionPull pulls an extension and stores it locally +func ExtensionPull(logger logging.Logger, cfg config.Config, pack PackClient) *cobra.Command { + cmd := &cobra.Command{ + Use: "pull ", + Args: cobra.ExactArgs(1), + Short: "Pull an extension from a registry and store it locally", + Example: "pack extension pull ", + RunE: logError(logger, func(cmd *cobra.Command, args []string) error { + // logic will be added here + return nil + }), + } + // flags will be added here + AddHelpFlag(cmd, "pull") + return cmd +} diff --git a/internal/commands/extension_register.go b/internal/commands/extension_register.go new file mode 100644 index 0000000000..5e0eef0818 --- /dev/null +++ b/internal/commands/extension_register.go @@ -0,0 +1,28 @@ +package commands + +import ( + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/internal/config" + "github.com/buildpacks/pack/pkg/logging" +) + +type ExtensionRegisterFlags struct { + ExtensionRegistry string +} + +func ExtensionRegister(logger logging.Logger, cfg config.Config, pack PackClient) *cobra.Command { + cmd := &cobra.Command{ + Use: "register ", + Args: cobra.ExactArgs(1), + Short: "Register an extension to a registry", + Example: "pack extension register ", + RunE: logError(logger, func(cmd *cobra.Command, args []string) error { + // logic will be added here + return nil + }), + } + // flags will be added here + AddHelpFlag(cmd, "register") + return cmd +} diff --git a/internal/commands/extension_test.go b/internal/commands/extension_test.go new file mode 100644 index 0000000000..396d772905 --- /dev/null +++ b/internal/commands/extension_test.go @@ -0,0 +1,51 @@ +package commands_test + +import ( + "bytes" + "testing" + + "github.com/golang/mock/gomock" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/internal/commands" + "github.com/buildpacks/pack/internal/commands/fakes" + "github.com/buildpacks/pack/internal/commands/testmocks" + "github.com/buildpacks/pack/internal/config" + "github.com/buildpacks/pack/pkg/logging" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestExtensionCommand(t *testing.T) { + spec.Run(t, "ExtensionCommand", testExtensionCommand, spec.Parallel(), spec.Report(report.Terminal{})) +} + +func testExtensionCommand(t *testing.T, when spec.G, it spec.S) { + var ( + cmd *cobra.Command + logger logging.Logger + outBuf bytes.Buffer + mockClient *testmocks.MockPackClient + ) + + it.Before(func() { + logger = logging.NewLogWithWriters(&outBuf, &outBuf) + mockController := gomock.NewController(t) + mockClient = testmocks.NewMockPackClient(mockController) + cmd = commands.NewExtensionCommand(logger, config.Config{}, mockClient, fakes.NewFakePackageConfigReader()) + cmd.SetOut(logging.GetWriterForLevel(logger, logging.InfoLevel)) + }) + + when("extension", func() { + it("prints help text", func() { + cmd.SetArgs([]string{}) + h.AssertNil(t, cmd.Execute()) + output := outBuf.String() + h.AssertContains(t, output, "Interact with extensions") + for _, command := range []string{"Usage", "package", "register", "yank", "pull", "inspect"} { + h.AssertContains(t, output, command) + } + }) + }) +} diff --git a/internal/commands/extension_yank.go b/internal/commands/extension_yank.go new file mode 100644 index 0000000000..cc6e8947b0 --- /dev/null +++ b/internal/commands/extension_yank.go @@ -0,0 +1,30 @@ +package commands + +import ( + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/internal/config" + "github.com/buildpacks/pack/pkg/logging" +) + +type ExtensionYankFlags struct { + ExtensionRegistry string + Undo bool +} + +func ExtensionYank(logger logging.Logger, cfg config.Config, pack PackClient) *cobra.Command { + cmd := &cobra.Command{ + Use: "yank ", + Args: cobra.ExactArgs(1), + Short: "Yank an extension from a registry", + Example: "pack yank ", + RunE: logError(logger, func(cmd *cobra.Command, args []string) error { + // logic will be added here + return nil + }), + } + // flags will be added here + AddHelpFlag(cmd, "yank") + + return cmd +} diff --git a/internal/commands/fakes/fake_extension_packager.go b/internal/commands/fakes/fake_extension_packager.go new file mode 100644 index 0000000000..8cbb5c5eae --- /dev/null +++ b/internal/commands/fakes/fake_extension_packager.go @@ -0,0 +1,13 @@ +package fakes + +import ( + "context" + + "github.com/buildpacks/pack/pkg/client" +) + +func (c *FakeBuildpackPackager) PackageExtension(ctx context.Context, opts client.PackageBuildpackOptions) error { + c.CreateCalledWithOptions = opts + + return nil +} diff --git a/internal/commands/fakes/fake_package_config_reader.go b/internal/commands/fakes/fake_package_config_reader.go index e0610a33da..d5386dad3f 100644 --- a/internal/commands/fakes/fake_package_config_reader.go +++ b/internal/commands/fakes/fake_package_config_reader.go @@ -2,12 +2,18 @@ package fakes import ( pubbldpkg "github.com/buildpacks/pack/buildpackage" + "github.com/buildpacks/pack/pkg/dist" ) type FakePackageConfigReader struct { ReadCalledWithArg string ReadReturnConfig pubbldpkg.Config ReadReturnError error + + ReadBuildpackDescriptorCalledWithArg string + ReadBuildpackDescriptorReturn dist.BuildpackDescriptor + ReadExtensionDescriptorReturn dist.ExtensionDescriptor + ReadBuildpackDescriptorReturnError error } func (r *FakePackageConfigReader) Read(path string) (pubbldpkg.Config, error) { @@ -16,6 +22,12 @@ func (r *FakePackageConfigReader) Read(path string) (pubbldpkg.Config, error) { return r.ReadReturnConfig, r.ReadReturnError } +func (r *FakePackageConfigReader) ReadBuildpackDescriptor(path string) (dist.BuildpackDescriptor, error) { + r.ReadBuildpackDescriptorCalledWithArg = path + + return r.ReadBuildpackDescriptorReturn, r.ReadBuildpackDescriptorReturnError +} + func NewFakePackageConfigReader(ops ...func(*FakePackageConfigReader)) *FakePackageConfigReader { fakePackageConfigReader := &FakePackageConfigReader{ ReadReturnConfig: pubbldpkg.Config{}, diff --git a/internal/commands/inspect_builder_test.go b/internal/commands/inspect_builder_test.go index ffa5a86b7a..f30ae975d6 100644 --- a/internal/commands/inspect_builder_test.go +++ b/internal/commands/inspect_builder_test.go @@ -213,9 +213,9 @@ func testInspectBuilderCommand(t *testing.T, when spec.G, it spec.S) { pack config default-builder `) - assert.Matches(outBuf.String(), regexp.MustCompile(`Paketo Buildpacks:\s+'paketobuildpacks/builder:base'`)) - assert.Matches(outBuf.String(), regexp.MustCompile(`Paketo Buildpacks:\s+'paketobuildpacks/builder:full'`)) - assert.Matches(outBuf.String(), regexp.MustCompile(`Heroku:\s+'heroku/buildpacks:20'`)) + assert.Matches(outBuf.String(), regexp.MustCompile(`Paketo Buildpacks:\s+'paketobuildpacks/builder-jammy-base'`)) + assert.Matches(outBuf.String(), regexp.MustCompile(`Paketo Buildpacks:\s+'paketobuildpacks/builder-jammy-full'`)) + assert.Matches(outBuf.String(), regexp.MustCompile(`Heroku:\s+'heroku/builder:24'`)) }) }) diff --git a/internal/commands/inspect_buildpack.go b/internal/commands/inspect_buildpack.go index 91fb480267..4a2213cf62 100644 --- a/internal/commands/inspect_buildpack.go +++ b/internal/commands/inspect_buildpack.go @@ -167,7 +167,7 @@ func determinePrefix(name string, locator buildpack.LocatorType, daemon bool) st return "UNKNOWN SOURCE" } -func buildpacksOutput(bps []dist.BuildpackInfo) (string, error) { +func buildpacksOutput(bps []dist.ModuleInfo) (string, error) { buf := &bytes.Buffer{} tabWriter := new(tabwriter.Writer).Init(buf, writerMinWidth, writerPadChar, buildpacksTabWidth, writerPadChar, writerFlags) @@ -189,7 +189,7 @@ func buildpacksOutput(bps []dist.BuildpackInfo) (string, error) { } // Unable to easily convert format makes this feel like a poor solution... -func detectionOrderOutput(order dist.Order, layers dist.BuildpackLayers, maxDepth int) (string, error) { +func detectionOrderOutput(order dist.Order, layers dist.ModuleLayers, maxDepth int) (string, error) { buf := strings.Builder{} tabWriter := new(tabwriter.Writer).Init(&buf, writerMinWidth, writerTabWidth, defaultTabWidth, writerPadChar, writerFlags) buildpackSet := map[client.BuildpackInfoKey]bool{} @@ -204,7 +204,7 @@ func detectionOrderOutput(order dist.Order, layers dist.BuildpackLayers, maxDept } // Recursively generate output for every buildpack in an order. -func orderOutputRecurrence(w io.Writer, prefix string, order dist.Order, layers dist.BuildpackLayers, buildpackSet map[client.BuildpackInfoKey]bool, curDepth, maxDepth int) error { +func orderOutputRecurrence(w io.Writer, prefix string, order dist.Order, layers dist.ModuleLayers, buildpackSet map[client.BuildpackInfoKey]bool, curDepth, maxDepth int) error { // exit if maxDepth is exceeded if validMaxDepth(maxDepth) && maxDepth <= curDepth { return nil @@ -276,7 +276,7 @@ func displayGroup(w io.Writer, prefix string, groupCount int, last bool) error { return err } -func displayBuildpack(w io.Writer, prefix string, entry dist.BuildpackRef, visited bool, last bool) error { +func displayBuildpack(w io.Writer, prefix string, entry dist.ModuleRef, visited bool, last bool) error { var optional string if entry.Optional { optional = "(optional)" diff --git a/internal/commands/inspect_buildpack_test.go b/internal/commands/inspect_buildpack_test.go index 200290d0f6..5485758455 100644 --- a/internal/commands/inspect_buildpack_test.go +++ b/internal/commands/inspect_buildpack_test.go @@ -59,7 +59,7 @@ func testInspectBuildpackCommand(t *testing.T, when spec.G, it spec.S) { complexInfo = &client.BuildpackInfo{ BuildpackMetadata: buildpack.Metadata{ - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/top-buildpack", Version: "0.0.1", Homepage: "top-buildpack-homepage", @@ -69,7 +69,7 @@ func testInspectBuildpackCommand(t *testing.T, when spec.G, it spec.S) { {ID: "io.buildpacks.stacks.second-stack", Mixins: []string{"mixin1", "mixin2"}}, }, }, - Buildpacks: []dist.BuildpackInfo{ + Buildpacks: []dist.ModuleInfo{ { ID: "some/first-inner-buildpack", Version: "1.0.0", @@ -94,9 +94,9 @@ func testInspectBuildpackCommand(t *testing.T, when spec.G, it spec.S) { }, Order: dist.Order{ { - Group: []dist.BuildpackRef{ + Group: []dist.ModuleRef{ { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/top-buildpack", Version: "0.0.1", Homepage: "top-buildpack-homepage", @@ -106,7 +106,7 @@ func testInspectBuildpackCommand(t *testing.T, when spec.G, it spec.S) { }, }, }, - BuildpackLayers: dist.BuildpackLayers{ + BuildpackLayers: dist.ModuleLayers{ "some/first-inner-buildpack": { "1.0.0": { API: apiVersion, @@ -116,16 +116,16 @@ func testInspectBuildpackCommand(t *testing.T, when spec.G, it spec.S) { }, Order: dist.Order{ { - Group: []dist.BuildpackRef{ + Group: []dist.ModuleRef{ { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/first-inner-buildpack", Version: "1.0.0", }, Optional: false, }, { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/third-inner-buildpack", Version: "3.0.0", }, @@ -134,9 +134,9 @@ func testInspectBuildpackCommand(t *testing.T, when spec.G, it spec.S) { }, }, { - Group: []dist.BuildpackRef{ + Group: []dist.ModuleRef{ { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/third-inner-buildpack", Version: "3.0.0", }, @@ -176,16 +176,16 @@ func testInspectBuildpackCommand(t *testing.T, when spec.G, it spec.S) { API: apiVersion, Order: dist.Order{ { - Group: []dist.BuildpackRef{ + Group: []dist.ModuleRef{ { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/first-inner-buildpack", Version: "1.0.0", }, Optional: false, }, { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/second-inner-buildpack", Version: "2.0.0", }, @@ -194,9 +194,9 @@ func testInspectBuildpackCommand(t *testing.T, when spec.G, it spec.S) { }, }, { - Group: []dist.BuildpackRef{ + Group: []dist.ModuleRef{ { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/first-inner-buildpack", Version: "1.0.0", }, @@ -214,7 +214,7 @@ func testInspectBuildpackCommand(t *testing.T, when spec.G, it spec.S) { simpleInfo = &client.BuildpackInfo{ BuildpackMetadata: buildpack.Metadata{ - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/single-buildpack", Version: "0.0.1", Homepage: "single-homepage-homepace", @@ -224,7 +224,7 @@ func testInspectBuildpackCommand(t *testing.T, when spec.G, it spec.S) { {ID: "io.buildpacks.stacks.second-stack", Mixins: []string{"mixin1", "mixin2"}}, }, }, - Buildpacks: []dist.BuildpackInfo{ + Buildpacks: []dist.ModuleInfo{ { ID: "some/single-buildpack", Version: "0.0.1", @@ -238,9 +238,9 @@ func testInspectBuildpackCommand(t *testing.T, when spec.G, it spec.S) { }, Order: dist.Order{ { - Group: []dist.BuildpackRef{ + Group: []dist.ModuleRef{ { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/single-buildpack", Version: "0.0.1", Homepage: "single-buildpack-homepage", @@ -250,7 +250,7 @@ func testInspectBuildpackCommand(t *testing.T, when spec.G, it spec.S) { }, }, }, - BuildpackLayers: dist.BuildpackLayers{ + BuildpackLayers: dist.ModuleLayers{ "some/single-buildpack": { "0.0.1": { API: apiVersion, @@ -421,7 +421,7 @@ func testInspectBuildpackCommand(t *testing.T, when spec.G, it spec.S) { assert.TrimmedEq(outBuf.String(), expectedOutput) }) }) - when("uri is a http or https location", func() { + when("uri is an http or https location", func() { it.Before(func() { simpleInfo.Location = buildpack.URILocator }) diff --git a/internal/commands/inspect_extension.go b/internal/commands/inspect_extension.go new file mode 100644 index 0000000000..7963802128 --- /dev/null +++ b/internal/commands/inspect_extension.go @@ -0,0 +1,93 @@ +package commands + +import ( + "bytes" + "fmt" + "strings" + "text/tabwriter" + "text/template" + + strs "github.com/buildpacks/pack/internal/strings" + "github.com/buildpacks/pack/pkg/buildpack" + "github.com/buildpacks/pack/pkg/client" + "github.com/buildpacks/pack/pkg/dist" +) + +const inspectExtensionTemplate = ` +{{ .Location -}}: + +Extension: +{{ .Extension }} +` + +func inspectAllExtensions(client PackClient, options ...client.InspectExtensionOptions) (string, error) { + buf := bytes.NewBuffer(nil) + errArray := []error{} + for _, option := range options { + nextResult, err := client.InspectExtension(option) + if err != nil { + errArray = append(errArray, err) + continue + } + + prefix := determinePrefix(option.ExtensionName, nextResult.Location, option.Daemon) + + output, err := inspectExtensionOutput(nextResult, prefix) + if err != nil { + return "", err + } + + if _, err := buf.Write(output); err != nil { + return "", err + } + + if nextResult.Location != buildpack.PackageLocator { + return buf.String(), nil + } + } + if len(errArray) == len(options) { + return "", joinErrors(errArray) + } + return buf.String(), nil +} + +func inspectExtensionOutput(info *client.ExtensionInfo, prefix string) (output []byte, err error) { + tpl := template.Must(template.New("inspect-extension").Parse(inspectExtensionTemplate)) + exOutput, err := extensionsOutput(info.Extension) + if err != nil { + return []byte{}, fmt.Errorf("error writing extension output: %q", err) + } + + buf := bytes.NewBuffer(nil) + err = tpl.Execute(buf, &struct { + Location string + Extension string + }{ + Location: prefix, + Extension: exOutput, + }) + + if err != nil { + return []byte{}, fmt.Errorf("error templating extension output template: %q", err) + } + return buf.Bytes(), nil +} + +func extensionsOutput(ex dist.ModuleInfo) (string, error) { + buf := &bytes.Buffer{} + + tabWriter := new(tabwriter.Writer).Init(buf, writerMinWidth, writerPadChar, buildpacksTabWidth, writerPadChar, writerFlags) + if _, err := fmt.Fprint(tabWriter, " ID\tNAME\tVERSION\tHOMEPAGE\n"); err != nil { + return "", err + } + + if _, err := fmt.Fprintf(tabWriter, " %s\t%s\t%s\t%s\n", ex.ID, strs.ValueOrDefault(ex.Name, "-"), ex.Version, strs.ValueOrDefault(ex.Homepage, "-")); err != nil { + return "", err + } + + if err := tabWriter.Flush(); err != nil { + return "", err + } + + return strings.TrimSuffix(buf.String(), "\n"), nil +} diff --git a/internal/commands/inspect_image_test.go b/internal/commands/inspect_image_test.go index 983b112a50..a50189f18a 100644 --- a/internal/commands/inspect_image_test.go +++ b/internal/commands/inspect_image_test.go @@ -5,7 +5,7 @@ import ( "errors" "testing" - "github.com/buildpacks/lifecycle/platform" + "github.com/buildpacks/lifecycle/platform/files" "github.com/golang/mock/gomock" "github.com/heroku/color" "github.com/sclevine/spec" @@ -32,18 +32,38 @@ var ( expectedLocalImageInfo = &client.ImageInfo{ StackID: "local.image.stack", Buildpacks: nil, - Base: platform.RunImageMetadata{}, + Base: files.RunImageForRebase{}, BOM: nil, - Stack: platform.StackMetadata{}, + Stack: files.Stack{}, Processes: client.ProcessDetails{}, } expectedRemoteImageInfo = &client.ImageInfo{ StackID: "remote.image.stack", Buildpacks: nil, - Base: platform.RunImageMetadata{}, + Base: files.RunImageForRebase{}, BOM: nil, - Stack: platform.StackMetadata{}, + Stack: files.Stack{}, + Processes: client.ProcessDetails{}, + } + + expectedLocalImageWithExtensionInfo = &client.ImageInfo{ + StackID: "local.image.stack", + Buildpacks: nil, + Extensions: nil, + Base: files.RunImageForRebase{}, + BOM: nil, + Stack: files.Stack{}, + Processes: client.ProcessDetails{}, + } + + expectedRemoteImageWithExtensionInfo = &client.ImageInfo{ + StackID: "remote.image.stack", + Buildpacks: nil, + Extensions: nil, + Base: files.RunImageForRebase{}, + BOM: nil, + Stack: files.Stack{}, Processes: client.ProcessDetails{}, } ) @@ -83,7 +103,6 @@ func testInspectImageCommand(t *testing.T, when spec.G, it spec.S) { mockClient.EXPECT().InspectImage("some/image", true).Return(expectedLocalImageInfo, nil) mockClient.EXPECT().InspectImage("some/image", false).Return(expectedRemoteImageInfo, nil) - command := commands.InspectImage(logger, inspectImageWriterFactory, cfg, mockClient) command.SetArgs([]string{"some/image"}) err := command.Execute() @@ -100,6 +119,28 @@ func testInspectImageCommand(t *testing.T, when spec.G, it spec.S) { assert.ContainsF(outBuf.String(), "REMOTE:\n%s", expectedRemoteImageDisplay) }) + it("passes output of local and remote builders to correct writer for extension", func() { + inspectImageWriter := newDefaultInspectImageWriter() + inspectImageWriterFactory := newImageWriterFactory(inspectImageWriter) + + mockClient.EXPECT().InspectImage("some/image", true).Return(expectedLocalImageWithExtensionInfo, nil) + mockClient.EXPECT().InspectImage("some/image", false).Return(expectedRemoteImageWithExtensionInfo, nil) + command := commands.InspectImage(logger, inspectImageWriterFactory, cfg, mockClient) + command.SetArgs([]string{"some/image"}) + err := command.Execute() + assert.Nil(err) + + assert.Equal(inspectImageWriter.ReceivedInfoForLocal, expectedLocalImageWithExtensionInfo) + assert.Equal(inspectImageWriter.ReceivedInfoForRemote, expectedRemoteImageWithExtensionInfo) + assert.Equal(inspectImageWriter.RecievedGeneralInfo, expectedSharedInfo) + assert.Equal(inspectImageWriter.ReceivedErrorForLocal, nil) + assert.Equal(inspectImageWriter.ReceivedErrorForRemote, nil) + assert.Equal(inspectImageWriterFactory.ReceivedForKind, "human-readable") + + assert.ContainsF(outBuf.String(), "LOCAL:\n%s", expectedLocalImageDisplay) + assert.ContainsF(outBuf.String(), "REMOTE:\n%s", expectedRemoteImageDisplay) + }) + it("passes configured run image mirrors to the writer", func() { cfg = config.Config{ RunImages: []config.RunImage{{ @@ -129,6 +170,35 @@ func testInspectImageCommand(t *testing.T, when spec.G, it spec.S) { assert.Equal(inspectImageWriter.RecievedGeneralInfo.RunImageMirrors, cfg.RunImages) }) + it("passes configured run image mirrors to the writer", func() { + cfg = config.Config{ + RunImages: []config.RunImage{{ + Image: "image-name", + Mirrors: []string{"first-mirror", "second-mirror2"}, + }, + { + Image: "image-name2", + Mirrors: []string{"other-mirror"}, + }, + }, + TrustedBuilders: nil, + Registries: nil, + } + + inspectImageWriter := newDefaultInspectImageWriter() + inspectImageWriterFactory := newImageWriterFactory(inspectImageWriter) + + mockClient.EXPECT().InspectImage("some/image", true).Return(expectedLocalImageWithExtensionInfo, nil) + mockClient.EXPECT().InspectImage("some/image", false).Return(expectedRemoteImageWithExtensionInfo, nil) + + command := commands.InspectImage(logger, inspectImageWriterFactory, cfg, mockClient) + command.SetArgs([]string{"some/image"}) + err := command.Execute() + assert.Nil(err) + + assert.Equal(inspectImageWriter.RecievedGeneralInfo.RunImageMirrors, cfg.RunImages) + }) + when("error cases", func() { when("client returns an error when inspecting", func() { it("passes errors to the Writer", func() { @@ -183,6 +253,24 @@ func testInspectImageCommand(t *testing.T, when spec.G, it spec.S) { assert.ErrorWithMessage(err, "unable to print") }) }) + + when("Print returns fails for extension", func() { + it("returns the error", func() { + printError := errors.New("unable to print") + inspectImageWriter := &fakes.FakeInspectImageWriter{ + ErrorForPrint: printError, + } + inspectImageWriterFactory := newImageWriterFactory(inspectImageWriter) + + mockClient.EXPECT().InspectImage("some/image", true).Return(expectedLocalImageWithExtensionInfo, nil) + mockClient.EXPECT().InspectImage("some/image", false).Return(expectedRemoteImageWithExtensionInfo, nil) + + command := commands.InspectImage(logger, inspectImageWriterFactory, cfg, mockClient) + command.SetArgs([]string{"some/image"}) + err := command.Execute() + assert.ErrorWithMessage(err, "unable to print") + }) + }) }) }) } diff --git a/internal/commands/list_trusted_builders_test.go b/internal/commands/list_trusted_builders_test.go index 7844c214ba..f07b447e73 100644 --- a/internal/commands/list_trusted_builders_test.go +++ b/internal/commands/list_trusted_builders_test.go @@ -2,7 +2,6 @@ package commands_test import ( "bytes" - "io/ioutil" "os" "testing" @@ -37,7 +36,7 @@ func testListTrustedBuildersCommand(t *testing.T, when spec.G, it spec.S) { logger = logging.NewLogWithWriters(&outBuf, &outBuf) command = commands.ListTrustedBuilders(logger, config.Config{}) - tempPackHome, err = ioutil.TempDir("", "pack-home") + tempPackHome, err = os.MkdirTemp("", "pack-home") h.AssertNil(t, err) h.AssertNil(t, os.Setenv("PACK_HOME", tempPackHome)) }) @@ -65,11 +64,13 @@ func testListTrustedBuildersCommand(t *testing.T, when spec.G, it spec.S) { h.AssertNotContains(t, outBuf.String(), builderName) h.AssertContainsAllInOrder(t, outBuf, - "gcr.io/buildpacks/builder:v1", - "heroku/buildpacks:20", - "paketobuildpacks/builder:base", - "paketobuildpacks/builder:full", - "paketobuildpacks/builder:tiny", + "gcr.io/buildpacks/builder:google-22", + "heroku/builder:20", + "heroku/builder:22", + "heroku/builder:24", + "paketobuildpacks/builder-jammy-base", + "paketobuildpacks/builder-jammy-full", + "paketobuildpacks/builder-jammy-tiny", ) listTrustedBuildersCommand := commands.ListTrustedBuilders( @@ -85,12 +86,14 @@ func testListTrustedBuildersCommand(t *testing.T, when spec.G, it spec.S) { h.AssertContainsAllInOrder(t, outBuf, - "gcr.io/buildpacks/builder:v1", + "gcr.io/buildpacks/builder:google-22", builderName, - "heroku/buildpacks:20", - "paketobuildpacks/builder:base", - "paketobuildpacks/builder:full", - "paketobuildpacks/builder:tiny", + "heroku/builder:20", + "heroku/builder:22", + "heroku/builder:24", + "paketobuildpacks/builder-jammy-base", + "paketobuildpacks/builder-jammy-full", + "paketobuildpacks/builder-jammy-tiny", ) }) }) diff --git a/internal/commands/manifest.go b/internal/commands/manifest.go new file mode 100644 index 0000000000..e4cb5f8a71 --- /dev/null +++ b/internal/commands/manifest.go @@ -0,0 +1,33 @@ +package commands + +import ( + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/pkg/logging" +) + +func NewManifestCommand(logger logging.Logger, client PackClient) *cobra.Command { + cmd := &cobra.Command{ + Use: "manifest", + Short: "Interact with OCI image indexes", + Long: `An image index is a higher-level manifest which points to specific image manifests and is ideal for one or more platforms; see: https://github.com/opencontainers/image-spec/ for more details + +'pack manifest' commands provide tooling to create, update, or delete images indexes or push them to a remote registry. +'pack' will save a local copy of the image index at '$PACK_HOME/manifests'; the environment variable 'XDG_RUNTIME_DIR' +can be set to override the location, allowing manifests to be edited locally before being pushed to a registry. + +These commands are experimental. For more information, consult the RFC which can be found at https://github.com/buildpacks/rfcs/blob/main/text/0124-pack-manifest-list-commands.md`, + RunE: nil, + } + + cmd.AddCommand(ManifestCreate(logger, client)) + cmd.AddCommand(ManifestAdd(logger, client)) + cmd.AddCommand(ManifestAnnotate(logger, client)) + cmd.AddCommand(ManifestDelete(logger, client)) + cmd.AddCommand(ManifestInspect(logger, client)) + cmd.AddCommand(ManifestPush(logger, client)) + cmd.AddCommand(ManifestRemove(logger, client)) + + AddHelpFlag(cmd, "manifest") + return cmd +} diff --git a/internal/commands/manifest_add.go b/internal/commands/manifest_add.go new file mode 100644 index 0000000000..e5fefe6250 --- /dev/null +++ b/internal/commands/manifest_add.go @@ -0,0 +1,27 @@ +package commands + +import ( + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/pkg/client" + "github.com/buildpacks/pack/pkg/logging" +) + +// ManifestAdd adds a new image to a manifest list (image index). +func ManifestAdd(logger logging.Logger, pack PackClient) *cobra.Command { + cmd := &cobra.Command{ + Use: "add [OPTIONS] [flags]", + Args: cobra.MatchAll(cobra.ExactArgs(2), cobra.OnlyValidArgs), + Short: "Add an image to a manifest list.", + Example: `pack manifest add my-image-index my-image:some-arch`, + RunE: logError(logger, func(cmd *cobra.Command, args []string) (err error) { + return pack.AddManifest(cmd.Context(), client.ManifestAddOptions{ + IndexRepoName: args[0], + RepoName: args[1], + }) + }), + } + + AddHelpFlag(cmd, "add") + return cmd +} diff --git a/internal/commands/manifest_add_test.go b/internal/commands/manifest_add_test.go new file mode 100644 index 0000000000..fcb352f7e1 --- /dev/null +++ b/internal/commands/manifest_add_test.go @@ -0,0 +1,85 @@ +package commands_test + +import ( + "bytes" + "testing" + + "github.com/golang/mock/gomock" + "github.com/heroku/color" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/internal/commands" + "github.com/buildpacks/pack/internal/commands/testmocks" + "github.com/buildpacks/pack/pkg/client" + "github.com/buildpacks/pack/pkg/logging" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestManifestAddCommand(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + + spec.Run(t, "Commands", testManifestAddCommand, spec.Parallel(), spec.Report(report.Terminal{})) +} + +func testManifestAddCommand(t *testing.T, when spec.G, it spec.S) { + var ( + command *cobra.Command + logger *logging.LogWithWriters + outBuf bytes.Buffer + mockController *gomock.Controller + mockClient *testmocks.MockPackClient + ) + + it.Before(func() { + logger = logging.NewLogWithWriters(&outBuf, &outBuf) + mockController = gomock.NewController(t) + mockClient = testmocks.NewMockPackClient(mockController) + command = commands.ManifestAdd(logger, mockClient) + }) + + when("args are valid", func() { + var indexRepoName string + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + }) + + when("index exists", func() { + when("no extra flag is provided", func() { + it.Before(func() { + mockClient.EXPECT().AddManifest( + gomock.Any(), + gomock.Eq(client.ManifestAddOptions{ + IndexRepoName: indexRepoName, + RepoName: "busybox:1.36-musl", + }), + ).Return(nil) + }) + + it("should call add manifest operation with the given arguments", func() { + command.SetArgs([]string{indexRepoName, "busybox:1.36-musl"}) + err := command.Execute() + h.AssertNil(t, err) + }) + }) + + when("--help", func() { + it("should have help flag", func() { + command.SetArgs([]string{"--help"}) + h.AssertNil(t, command.Execute()) + }) + }) + }) + }) + + when("args are invalid", func() { + it("error when missing mandatory arguments", func() { + command.SetArgs([]string{"some-index"}) + err := command.Execute() + h.AssertNotNil(t, err) + h.AssertError(t, err, "accepts 2 arg(s), received 1") + }) + }) +} diff --git a/internal/commands/manifest_annotate.go b/internal/commands/manifest_annotate.go new file mode 100644 index 0000000000..2a9346edab --- /dev/null +++ b/internal/commands/manifest_annotate.go @@ -0,0 +1,55 @@ +package commands + +import ( + "fmt" + + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/pkg/client" + "github.com/buildpacks/pack/pkg/logging" +) + +// ManifestAnnotateFlags define flags provided to the ManifestAnnotate +type ManifestAnnotateFlags struct { + os, arch, variant string + annotations map[string]string +} + +// ManifestAnnotate modifies a manifest list and updates the platform information within the index for an image in the list. +func ManifestAnnotate(logger logging.Logger, pack PackClient) *cobra.Command { + var flags ManifestAnnotateFlags + cmd := &cobra.Command{ + Use: "annotate [OPTIONS] [flags]", + Args: cobra.MatchAll(cobra.ExactArgs(2), cobra.OnlyValidArgs), + Short: "Add or update information about an entry in a manifest list.", + Example: `pack manifest annotate my-image-index my-image:some-arch --arch some-other-arch`, + RunE: logError(logger, func(cmd *cobra.Command, args []string) (err error) { + if err = validateManifestAnnotateFlags(flags); err != nil { + return err + } + return pack.AnnotateManifest(cmd.Context(), client.ManifestAnnotateOptions{ + IndexRepoName: args[0], + RepoName: args[1], + OS: flags.os, + OSArch: flags.arch, + OSVariant: flags.variant, + Annotations: flags.annotations, + }) + }), + } + + cmd.Flags().StringVar(&flags.os, "os", "", "Set the OS") + cmd.Flags().StringVar(&flags.arch, "arch", "", "Set the architecture") + cmd.Flags().StringVar(&flags.variant, "variant", "", "Set the architecture variant") + cmd.Flags().StringToStringVar(&flags.annotations, "annotations", make(map[string]string, 0), "Set an `annotation` for the specified image") + + AddHelpFlag(cmd, "annotate") + return cmd +} + +func validateManifestAnnotateFlags(flags ManifestAnnotateFlags) error { + if flags.os == "" && flags.arch == "" && flags.variant == "" && len(flags.annotations) == 0 { + return fmt.Errorf("one of --os, --arch, or --variant is required") + } + return nil +} diff --git a/internal/commands/manifest_annotate_test.go b/internal/commands/manifest_annotate_test.go new file mode 100644 index 0000000000..e461edf1cb --- /dev/null +++ b/internal/commands/manifest_annotate_test.go @@ -0,0 +1,166 @@ +package commands_test + +import ( + "bytes" + "testing" + + "github.com/golang/mock/gomock" + "github.com/heroku/color" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/internal/commands" + "github.com/buildpacks/pack/internal/commands/testmocks" + "github.com/buildpacks/pack/pkg/client" + "github.com/buildpacks/pack/pkg/logging" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestManifestAnnotationsCommand(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + + spec.Run(t, "Commands", testManifestAnnotateCommand, spec.Random(), spec.Report(report.Terminal{})) +} + +func testManifestAnnotateCommand(t *testing.T, when spec.G, it spec.S) { + var ( + command *cobra.Command + logger *logging.LogWithWriters + outBuf bytes.Buffer + mockController *gomock.Controller + mockClient *testmocks.MockPackClient + ) + + it.Before(func() { + logger = logging.NewLogWithWriters(&outBuf, &outBuf) + mockController = gomock.NewController(t) + mockClient = testmocks.NewMockPackClient(mockController) + + command = commands.ManifestAnnotate(logger, mockClient) + }) + + when("args are valid", func() { + var ( + indexRepoName string + repoName string + ) + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + repoName = "busybox@sha256:6457d53fb065d6f250e1504b9bc42d5b6c65941d57532c072d929dd0628977d0" + }) + + when("index exists", func() { + when("--os is provided", func() { + it.Before(func() { + mockClient.EXPECT(). + AnnotateManifest( + gomock.Any(), + gomock.Eq(client.ManifestAnnotateOptions{ + IndexRepoName: indexRepoName, + RepoName: repoName, + OS: "linux", + Annotations: map[string]string{}, + }), + ). + Return(nil) + }) + + it("should annotate images with given flags", func() { + command.SetArgs([]string{indexRepoName, repoName, "--os", "linux"}) + h.AssertNilE(t, command.Execute()) + }) + }) + + when("--arch is provided", func() { + it.Before(func() { + mockClient.EXPECT(). + AnnotateManifest( + gomock.Any(), + gomock.Eq(client.ManifestAnnotateOptions{ + IndexRepoName: indexRepoName, + RepoName: repoName, + OSArch: "amd64", + Annotations: map[string]string{}, + }), + ). + Return(nil) + }) + + it("should annotate images with given flags", func() { + command.SetArgs([]string{indexRepoName, repoName, "--arch", "amd64"}) + h.AssertNilE(t, command.Execute()) + }) + }) + + when("--variant is provided", func() { + it.Before(func() { + mockClient.EXPECT(). + AnnotateManifest( + gomock.Any(), + gomock.Eq(client.ManifestAnnotateOptions{ + IndexRepoName: indexRepoName, + RepoName: repoName, + OSVariant: "V6", + Annotations: map[string]string{}, + }), + ). + Return(nil) + }) + + it("should annotate images with given flags", func() { + command.SetArgs([]string{indexRepoName, repoName, "--variant", "V6"}) + h.AssertNilE(t, command.Execute()) + }) + }) + + when("--annotations are provided", func() { + it.Before(func() { + mockClient.EXPECT(). + AnnotateManifest( + gomock.Any(), + gomock.Eq(client.ManifestAnnotateOptions{ + IndexRepoName: indexRepoName, + RepoName: repoName, + Annotations: map[string]string{"foo": "bar"}, + }), + ). + Return(nil) + }) + + it("should annotate images with given flags", func() { + command.SetArgs([]string{indexRepoName, repoName, "--annotations", "foo=bar"}) + h.AssertNilE(t, command.Execute()) + }) + }) + + when("--help", func() { + it("should have help flag", func() { + command.SetArgs([]string{"--help"}) + h.AssertNilE(t, command.Execute()) + }) + }) + }) + }) + + when("args are invalid", func() { + it("errors a message when no options are provided", func() { + command.SetArgs([]string{"foo", "bar"}) + h.AssertError(t, command.Execute(), "one of --os, --arch, or --variant is required") + }) + + it("errors when missing mandatory arguments", func() { + command.SetArgs([]string{"some-index"}) + err := command.Execute() + h.AssertNotNil(t, err) + h.AssertError(t, err, "accepts 2 arg(s), received 1") + }) + + it("errors when annotations are invalid", func() { + command.SetArgs([]string{"some-index", "some-manifest", "--annotations", "some-key"}) + err := command.Execute() + h.AssertEq(t, err.Error(), `invalid argument "some-key" for "--annotations" flag: some-key must be formatted as key=value`) + }) + }) +} diff --git a/internal/commands/manifest_create.go b/internal/commands/manifest_create.go new file mode 100644 index 0000000000..6162388fa4 --- /dev/null +++ b/internal/commands/manifest_create.go @@ -0,0 +1,67 @@ +package commands + +import ( + "fmt" + "strings" + + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/pkg/client" + "github.com/buildpacks/pack/pkg/logging" +) + +// ManifestCreateFlags define flags provided to the ManifestCreate +type ManifestCreateFlags struct { + format string + insecure, publish bool +} + +// ManifestCreate creates an image index for a multi-arch image +func ManifestCreate(logger logging.Logger, pack PackClient) *cobra.Command { + var flags ManifestCreateFlags + + cmd := &cobra.Command{ + Use: "create [ ... ] [flags]", + Args: cobra.MatchAll(cobra.MinimumNArgs(2), cobra.OnlyValidArgs), + Short: "Create a new manifest list.", + Example: `pack manifest create my-image-index my-image:some-arch my-image:some-other-arch`, + Long: `Create a new manifest list (e.g., for multi-arch images) which will be stored locally for manipulating images within the index`, + RunE: logError(logger, func(cmd *cobra.Command, args []string) error { + format, err := parseFormatFlag(strings.ToLower(flags.format)) + if err != nil { + return err + } + + if err = validateCreateManifestFlags(flags); err != nil { + return err + } + + return pack.CreateManifest( + cmd.Context(), + client.CreateManifestOptions{ + IndexRepoName: args[0], + RepoNames: args[1:], + Format: format, + Insecure: flags.insecure, + Publish: flags.publish, + }, + ) + }), + } + + cmdFlags := cmd.Flags() + + cmdFlags.StringVarP(&flags.format, "format", "f", "oci", "Media type to use when saving the image index. Accepted values are: oci, docker") + cmdFlags.BoolVar(&flags.insecure, "insecure", false, "When pushing the index to a registry, do not use TLS encryption or certificate verification; use with --publish") + cmdFlags.BoolVar(&flags.publish, "publish", false, "Publish directly to a registry without saving a local copy") + + AddHelpFlag(cmd, "create") + return cmd +} + +func validateCreateManifestFlags(flags ManifestCreateFlags) error { + if flags.insecure && !flags.publish { + return fmt.Errorf("insecure flag requires the publish flag") + } + return nil +} diff --git a/internal/commands/manifest_create_test.go b/internal/commands/manifest_create_test.go new file mode 100644 index 0000000000..03713e98fe --- /dev/null +++ b/internal/commands/manifest_create_test.go @@ -0,0 +1,168 @@ +package commands_test + +import ( + "bytes" + "testing" + + "github.com/golang/mock/gomock" + "github.com/google/go-containerregistry/pkg/v1/types" + "github.com/heroku/color" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/internal/commands" + "github.com/buildpacks/pack/internal/commands/testmocks" + "github.com/buildpacks/pack/pkg/client" + "github.com/buildpacks/pack/pkg/logging" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestManifestCreateCommand(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + + spec.Run(t, "Commands", testManifestCreateCommand, spec.Parallel(), spec.Report(report.Terminal{})) +} + +func testManifestCreateCommand(t *testing.T, when spec.G, it spec.S) { + var ( + command *cobra.Command + logger *logging.LogWithWriters + outBuf bytes.Buffer + mockController *gomock.Controller + mockClient *testmocks.MockPackClient + ) + + it.Before(func() { + logger = logging.NewLogWithWriters(&outBuf, &outBuf) + mockController = gomock.NewController(t) + mockClient = testmocks.NewMockPackClient(mockController) + + command = commands.ManifestCreate(logger, mockClient) + }) + + when("args are valid", func() { + var indexRepoName string + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + }) + + when("index exists", func() { + when("no extra flags are provided", func() { + it.Before(func() { + mockClient. + EXPECT(). + CreateManifest(gomock.Any(), + client.CreateManifestOptions{ + IndexRepoName: indexRepoName, + RepoNames: []string{"some-manifest"}, + Format: types.OCIImageIndex, + Insecure: false, + Publish: false, + }, + ).Return(nil) + }) + + it("should call create operation with default configuration", func() { + command.SetArgs([]string{indexRepoName, "some-manifest"}) + h.AssertNil(t, command.Execute()) + }) + }) + + when("--format is docker", func() { + it.Before(func() { + mockClient. + EXPECT(). + CreateManifest(gomock.Any(), + client.CreateManifestOptions{ + IndexRepoName: indexRepoName, + RepoNames: []string{"some-manifest"}, + Format: types.DockerManifestList, + Insecure: false, + Publish: false, + }, + ).Return(nil) + }) + + it("should call create operation with docker media type", func() { + command.SetArgs([]string{indexRepoName, "some-manifest", "-f", "docker"}) + h.AssertNil(t, command.Execute()) + }) + }) + + when("--publish", func() { + when("--insecure", func() { + it.Before(func() { + mockClient. + EXPECT(). + CreateManifest(gomock.Any(), + client.CreateManifestOptions{ + IndexRepoName: indexRepoName, + RepoNames: []string{"some-manifest"}, + Format: types.OCIImageIndex, + Insecure: true, + Publish: true, + }, + ).Return(nil) + }) + + it("should call create operation with publish and insecure", func() { + command.SetArgs([]string{indexRepoName, "some-manifest", "--publish", "--insecure"}) + h.AssertNil(t, command.Execute()) + }) + }) + + when("no --insecure", func() { + it.Before(func() { + mockClient. + EXPECT(). + CreateManifest(gomock.Any(), + client.CreateManifestOptions{ + IndexRepoName: indexRepoName, + RepoNames: []string{"some-manifest"}, + Format: types.OCIImageIndex, + Insecure: false, + Publish: true, + }, + ).Return(nil) + }) + + it("should call create operation with publish", func() { + command.SetArgs([]string{indexRepoName, "some-manifest", "--publish"}) + h.AssertNil(t, command.Execute()) + }) + }) + }) + + when("--help", func() { + it("should have help flag", func() { + command.SetArgs([]string{"--help"}) + h.AssertNilE(t, command.Execute()) + h.AssertEq(t, outBuf.String(), "") + }) + }) + }) + }) + + when("invalid arguments", func() { + when("--insecure is used without publish", func() { + it("errors a message", func() { + command.SetArgs([]string{"something", "some-manifest", "--insecure"}) + h.AssertError(t, command.Execute(), "insecure flag requires the publish flag") + }) + }) + + when("invalid media type", func() { + var format string + it.Before(func() { + format = "invalid" + }) + + it("errors a message", func() { + command.SetArgs([]string{"some-index", "some-manifest", "--format", format}) + h.AssertNotNil(t, command.Execute()) + }) + }) + }) +} diff --git a/internal/commands/manifest_inspect.go b/internal/commands/manifest_inspect.go new file mode 100644 index 0000000000..aaa61710f9 --- /dev/null +++ b/internal/commands/manifest_inspect.go @@ -0,0 +1,28 @@ +package commands + +import ( + "errors" + + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/pkg/logging" +) + +// ManifestInspect shows the manifest information stored locally +func ManifestInspect(logger logging.Logger, pack PackClient) *cobra.Command { + cmd := &cobra.Command{ + Use: "inspect ", + Args: cobra.MatchAll(cobra.ExactArgs(1), cobra.OnlyValidArgs), + Short: "Display information about a manifest list.", + Example: `pack manifest inspect my-image-index`, + RunE: logError(logger, func(cmd *cobra.Command, args []string) error { + if args[0] == "" { + return errors.New("'' is required") + } + return pack.InspectManifest(args[0]) + }), + } + + AddHelpFlag(cmd, "inspect") + return cmd +} diff --git a/internal/commands/manifest_inspect_test.go b/internal/commands/manifest_inspect_test.go new file mode 100644 index 0000000000..ddb63f783d --- /dev/null +++ b/internal/commands/manifest_inspect_test.go @@ -0,0 +1,69 @@ +package commands_test + +import ( + "bytes" + "testing" + + "github.com/golang/mock/gomock" + "github.com/heroku/color" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/internal/commands" + "github.com/buildpacks/pack/internal/commands/testmocks" + "github.com/buildpacks/pack/pkg/logging" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestManifestInspectCommand(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + + spec.Run(t, "Commands", testManifestInspectCommand, spec.Random(), spec.Report(report.Terminal{})) +} + +func testManifestInspectCommand(t *testing.T, when spec.G, it spec.S) { + var ( + command *cobra.Command + logger *logging.LogWithWriters + outBuf bytes.Buffer + mockController *gomock.Controller + mockClient *testmocks.MockPackClient + ) + + it.Before(func() { + logger = logging.NewLogWithWriters(&outBuf, &outBuf) + mockController = gomock.NewController(t) + mockClient = testmocks.NewMockPackClient(mockController) + command = commands.ManifestInspect(logger, mockClient) + }) + + when("args are valid", func() { + var indexRepoName string + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + }) + + when("index exists", func() { + when("no extra flags are provided", func() { + it.Before(func() { + mockClient.EXPECT().InspectManifest(indexRepoName).Return(nil) + }) + + it("should call inspect operation with the given index repo name", func() { + command.SetArgs([]string{indexRepoName}) + h.AssertNil(t, command.Execute()) + }) + }) + + when("--help", func() { + it("should have help flag", func() { + command.SetArgs([]string{"--help"}) + h.AssertNilE(t, command.Execute()) + h.AssertEq(t, outBuf.String(), "") + }) + }) + }) + }) +} diff --git a/internal/commands/manifest_push.go b/internal/commands/manifest_push.go new file mode 100644 index 0000000000..1b5b1e1309 --- /dev/null +++ b/internal/commands/manifest_push.go @@ -0,0 +1,50 @@ +package commands + +import ( + "strings" + + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/pkg/client" + "github.com/buildpacks/pack/pkg/logging" +) + +// ManifestPushFlags define flags provided to the ManifestPush +type ManifestPushFlags struct { + format string + insecure, purge bool +} + +// ManifestPush pushes a manifest list to a remote registry. +func ManifestPush(logger logging.Logger, pack PackClient) *cobra.Command { + var flags ManifestPushFlags + + cmd := &cobra.Command{ + Use: "push [OPTIONS] [flags]", + Args: cobra.MatchAll(cobra.ExactArgs(1), cobra.OnlyValidArgs), + Short: "Push a manifest list to a registry.", + Example: `pack manifest push my-image-index`, + Long: `manifest push' pushes a manifest list to a registry. +Use other 'pack manifest' commands to prepare the manifest list locally, then use the push command.`, + RunE: logError(logger, func(cmd *cobra.Command, args []string) error { + format, err := parseFormatFlag(strings.ToLower(flags.format)) + if err != nil { + return err + } + + return pack.PushManifest(client.PushManifestOptions{ + IndexRepoName: args[0], + Format: format, + Insecure: flags.insecure, + Purge: flags.purge, + }) + }), + } + + cmd.Flags().StringVarP(&flags.format, "format", "f", "oci", "Media type to use when saving the image index. Accepted values are: oci, docker") + cmd.Flags().BoolVar(&flags.insecure, "insecure", false, "When pushing the index to a registry, do not use TLS encryption or certificate verification") + cmd.Flags().BoolVar(&flags.purge, "purge", false, "Delete the manifest list from local storage if pushing succeeds") + + AddHelpFlag(cmd, "push") + return cmd +} diff --git a/internal/commands/manifest_push_test.go b/internal/commands/manifest_push_test.go new file mode 100644 index 0000000000..d82d67c777 --- /dev/null +++ b/internal/commands/manifest_push_test.go @@ -0,0 +1,159 @@ +package commands_test + +import ( + "bytes" + "testing" + + "github.com/golang/mock/gomock" + "github.com/google/go-containerregistry/pkg/v1/types" + "github.com/heroku/color" + "github.com/pkg/errors" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/internal/commands" + "github.com/buildpacks/pack/internal/commands/testmocks" + "github.com/buildpacks/pack/pkg/client" + "github.com/buildpacks/pack/pkg/logging" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestManifestPushCommand(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + + spec.Run(t, "Commands", testManifestPushCommand, spec.Random(), spec.Report(report.Terminal{})) +} + +func testManifestPushCommand(t *testing.T, when spec.G, it spec.S) { + var ( + command *cobra.Command + logger *logging.LogWithWriters + outBuf bytes.Buffer + mockController *gomock.Controller + mockClient *testmocks.MockPackClient + ) + + it.Before(func() { + logger = logging.NewLogWithWriters(&outBuf, &outBuf) + mockController = gomock.NewController(t) + mockClient = testmocks.NewMockPackClient(mockController) + + command = commands.ManifestPush(logger, mockClient) + }) + + when("args are valid", func() { + var indexRepoName string + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + }) + + when("index exists", func() { + when("no extra flag is provided", func() { + it.Before(func() { + mockClient.EXPECT(). + PushManifest(gomock.Eq(client.PushManifestOptions{ + IndexRepoName: indexRepoName, + Format: types.OCIImageIndex, + Insecure: false, + Purge: false, + })).Return(nil) + }) + + it("should call push operation with default configuration", func() { + command.SetArgs([]string{indexRepoName}) + h.AssertNil(t, command.Execute()) + }) + }) + + when("--format is docker", func() { + it.Before(func() { + mockClient.EXPECT(). + PushManifest(gomock.Eq(client.PushManifestOptions{ + IndexRepoName: indexRepoName, + Format: types.DockerManifestList, + Insecure: false, + Purge: false, + })).Return(nil) + }) + + it("should call push operation with docker media type", func() { + command.SetArgs([]string{indexRepoName, "-f", "docker"}) + h.AssertNil(t, command.Execute()) + }) + }) + + when("--purge", func() { + it.Before(func() { + mockClient.EXPECT(). + PushManifest(gomock.Eq(client.PushManifestOptions{ + IndexRepoName: indexRepoName, + Format: types.OCIImageIndex, + Insecure: false, + Purge: true, + })).Return(nil) + }) + + it("should call push operation with purge enabled", func() { + command.SetArgs([]string{indexRepoName, "--purge"}) + h.AssertNil(t, command.Execute()) + }) + }) + + when("--insecure", func() { + it.Before(func() { + mockClient.EXPECT(). + PushManifest(gomock.Eq(client.PushManifestOptions{ + IndexRepoName: indexRepoName, + Format: types.OCIImageIndex, + Insecure: true, + Purge: false, + })).Return(nil) + }) + + it("should call push operation with insecure enabled", func() { + command.SetArgs([]string{indexRepoName, "--insecure"}) + h.AssertNil(t, command.Execute()) + }) + }) + + when("--help", func() { + it("should have help flag", func() { + command.SetArgs([]string{"--help"}) + h.AssertNilE(t, command.Execute()) + h.AssertEq(t, outBuf.String(), "") + }) + }) + }) + + when("index doesn't exist", func() { + it.Before(func() { + mockClient. + EXPECT(). + PushManifest( + gomock.Any(), + ). + AnyTimes(). + Return(errors.New("unable to push Image")) + }) + + it("should return an error when index not exists locally", func() { + command.SetArgs([]string{"some-index"}) + err := command.Execute() + h.AssertNotNil(t, err) + }) + }) + }) + + when("args are invalid", func() { + when("--format is invalid", func() { + it("should return an error when index not exists locally", func() { + command.SetArgs([]string{"some-index", "-f", "bad-media-type"}) + err := command.Execute() + h.AssertNotNil(t, err) + h.AssertError(t, err, "invalid media type format") + }) + }) + }) +} diff --git a/internal/commands/manifest_remove.go b/internal/commands/manifest_remove.go new file mode 100644 index 0000000000..590bb84e33 --- /dev/null +++ b/internal/commands/manifest_remove.go @@ -0,0 +1,26 @@ +package commands + +import ( + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/pkg/logging" +) + +// ManifestDelete deletes one or more manifest lists from local storage +func ManifestDelete(logger logging.Logger, pack PackClient) *cobra.Command { + cmd := &cobra.Command{ + Use: "remove [manifest-list] [manifest-list...]", + Args: cobra.MatchAll(cobra.MinimumNArgs(1), cobra.OnlyValidArgs), + Short: "Remove one or more manifest lists from local storage", + Example: `pack manifest remove my-image-index`, + RunE: logError(logger, func(cmd *cobra.Command, args []string) error { + if err := pack.DeleteManifest(args); err != nil { + return err + } + return nil + }), + } + + AddHelpFlag(cmd, "remove") + return cmd +} diff --git a/internal/commands/manifest_remove_test.go b/internal/commands/manifest_remove_test.go new file mode 100644 index 0000000000..4b0bd61b51 --- /dev/null +++ b/internal/commands/manifest_remove_test.go @@ -0,0 +1,84 @@ +package commands_test + +import ( + "bytes" + "testing" + + "github.com/golang/mock/gomock" + "github.com/heroku/color" + "github.com/pkg/errors" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/internal/commands" + "github.com/buildpacks/pack/internal/commands/testmocks" + "github.com/buildpacks/pack/pkg/logging" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestManifestDeleteCommand(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + + spec.Run(t, "Commands", testManifestDeleteCommand, spec.Random(), spec.Report(report.Terminal{})) +} + +func testManifestDeleteCommand(t *testing.T, when spec.G, it spec.S) { + var ( + command *cobra.Command + logger *logging.LogWithWriters + outBuf bytes.Buffer + mockController *gomock.Controller + mockClient *testmocks.MockPackClient + ) + + it.Before(func() { + logger = logging.NewLogWithWriters(&outBuf, &outBuf) + mockController = gomock.NewController(t) + mockClient = testmocks.NewMockPackClient(mockController) + command = commands.ManifestDelete(logger, mockClient) + }) + + when("args are valid", func() { + var indexRepoName string + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + }) + + when("index exists", func() { + when("no extra flags are provided", func() { + it.Before(func() { + mockClient.EXPECT().DeleteManifest( + gomock.Eq([]string{indexRepoName}), + ).Return(nil) + }) + + it("should delete index", func() { + command.SetArgs([]string{indexRepoName}) + h.AssertNil(t, command.Execute()) + }) + }) + + when("--help", func() { + it("should have help flag", func() { + command.SetArgs([]string{"--help"}) + h.AssertNil(t, command.Execute()) + }) + }) + }) + + when("index does not exist", func() { + it.Before(func() { + mockClient.EXPECT().DeleteManifest( + gomock.Eq([]string{"some-none-existent-index"}), + ).Return(errors.New("image index doesn't exists")) + }) + + it("should return an error", func() { + command.SetArgs([]string{"some-none-existent-index"}) + h.AssertNotNil(t, command.Execute()) + }) + }) + }) +} diff --git a/internal/commands/manifest_rm.go b/internal/commands/manifest_rm.go new file mode 100644 index 0000000000..115d123a79 --- /dev/null +++ b/internal/commands/manifest_rm.go @@ -0,0 +1,29 @@ +package commands + +import ( + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/pkg/logging" +) + +// ManifestRemove will remove the specified image manifest if it is already referenced in the index +func ManifestRemove(logger logging.Logger, pack PackClient) *cobra.Command { + cmd := &cobra.Command{ + Use: "rm [manifest-list] [manifest] [manifest...] [flags]", + Args: cobra.MatchAll(cobra.MinimumNArgs(2), cobra.OnlyValidArgs), + Short: "Remove an image manifest from a manifest list.", + Example: `pack manifest rm my-image-index my-image@sha256:`, + Long: `'manifest rm' will remove the specified image manifest if it is already referenced in the index. +Users must pass the digest of the image in order to delete it from the index. +To discard __all__ the images in an index and the index itself, use 'manifest delete'.`, + RunE: logError(logger, func(cmd *cobra.Command, args []string) error { + if err := pack.RemoveManifest(args[0], args[1:]); err != nil { + return err + } + return nil + }), + } + + AddHelpFlag(cmd, "rm") + return cmd +} diff --git a/internal/commands/manifest_rm_test.go b/internal/commands/manifest_rm_test.go new file mode 100644 index 0000000000..615f3daed6 --- /dev/null +++ b/internal/commands/manifest_rm_test.go @@ -0,0 +1,93 @@ +package commands_test + +import ( + "bytes" + "errors" + "testing" + + "github.com/golang/mock/gomock" + "github.com/heroku/color" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/internal/commands" + "github.com/buildpacks/pack/internal/commands/testmocks" + "github.com/buildpacks/pack/pkg/logging" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestManifestRemoveCommand(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + + spec.Run(t, "Commands", testManifestRemoveCommand, spec.Random(), spec.Report(report.Terminal{})) +} + +func testManifestRemoveCommand(t *testing.T, when spec.G, it spec.S) { + var ( + command *cobra.Command + logger *logging.LogWithWriters + outBuf bytes.Buffer + mockController *gomock.Controller + mockClient *testmocks.MockPackClient + ) + + it.Before(func() { + logger = logging.NewLogWithWriters(&outBuf, &outBuf) + mockController = gomock.NewController(t) + mockClient = testmocks.NewMockPackClient(mockController) + command = commands.ManifestRemove(logger, mockClient) + }) + when("args are valid", func() { + var indexRepoName string + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + }) + + when("index exists", func() { + when("no extra flags are provided", func() { + it.Before(func() { + mockClient.EXPECT().RemoveManifest( + gomock.Eq(indexRepoName), + gomock.Eq([]string{"some-image"}), + ).Return(nil) + }) + it("should remove index", func() { + command.SetArgs([]string{indexRepoName, "some-image"}) + h.AssertNil(t, command.Execute()) + }) + }) + + when("--help", func() { + it("should have help flag", func() { + command.SetArgs([]string{"--help"}) + h.AssertNil(t, command.Execute()) + h.AssertEq(t, outBuf.String(), "") + }) + }) + }) + + when("index does not exist", func() { + it.Before(func() { + mockClient.EXPECT().RemoveManifest( + gomock.Eq(indexRepoName), + gomock.Eq([]string{"some-image"}), + ).Return(errors.New("image index doesn't exists")) + }) + it("should return an error", func() { + command.SetArgs([]string{indexRepoName, "some-image"}) + h.AssertNotNil(t, command.Execute()) + }) + }) + }) + + when("args are invalid", func() { + it("errors when missing mandatory arguments", func() { + command.SetArgs([]string{"some-index"}) + err := command.Execute() + h.AssertNotNil(t, err) + h.AssertError(t, err, "requires at least 2 arg(s), only received 1") + }) + }) +} diff --git a/internal/commands/manifest_test.go b/internal/commands/manifest_test.go new file mode 100644 index 0000000000..0cd300a492 --- /dev/null +++ b/internal/commands/manifest_test.go @@ -0,0 +1,54 @@ +package commands_test + +import ( + "bytes" + "testing" + + "github.com/golang/mock/gomock" + "github.com/heroku/color" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/internal/commands" + "github.com/buildpacks/pack/internal/commands/testmocks" + "github.com/buildpacks/pack/pkg/logging" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestNewManifestCommand(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + + spec.Run(t, "Commands", testNewManifestCommand, spec.Random(), spec.Report(report.Terminal{})) +} + +func testNewManifestCommand(t *testing.T, when spec.G, it spec.S) { + var ( + command *cobra.Command + logger *logging.LogWithWriters + outBuf bytes.Buffer + mockController *gomock.Controller + mockClient *testmocks.MockPackClient + ) + + it.Before(func() { + logger = logging.NewLogWithWriters(&outBuf, &outBuf) + mockController = gomock.NewController(t) + mockClient = testmocks.NewMockPackClient(mockController) + + command = commands.NewManifestCommand(logger, mockClient) + command.SetOut(logging.GetWriterForLevel(logger, logging.InfoLevel)) + }) + it("should have help flag", func() { + command.SetArgs([]string{}) + err := command.Execute() + h.AssertNilE(t, err) + + output := outBuf.String() + h.AssertContains(t, output, "Usage:") + for _, command := range []string{"create", "add", "annotate", "inspect", "remove", "rm"} { + h.AssertContains(t, output, command) + } + }) +} diff --git a/internal/commands/package_buildpack.go b/internal/commands/package_buildpack.go index 5fa89e9a0b..1bd17c4561 100644 --- a/internal/commands/package_buildpack.go +++ b/internal/commands/package_buildpack.go @@ -22,7 +22,7 @@ func PackageBuildpack(logger logging.Logger, cfg config.Config, packager Buildpa cmd := &cobra.Command{ Use: `package-buildpack --config `, Hidden: true, - Args: cobra.ExactValidArgs(1), + Args: cobra.MatchAll(cobra.ExactArgs(1), cobra.OnlyValidArgs), Short: "Package buildpack in OCI format.", Example: "pack package-buildpack my-buildpack --config ./package.toml", Long: "package-buildpack allows users to package (a) buildpack(s) into OCI format, which can then to be hosted in " + @@ -33,7 +33,7 @@ func PackageBuildpack(logger logging.Logger, cfg config.Config, packager Buildpa RunE: logError(logger, func(cmd *cobra.Command, args []string) error { deprecationWarning(logger, "package-buildpack", "buildpack package") - if err := validateBuildpackPackageFlags(&flags); err != nil { + if err := validateBuildpackPackageFlags(cfg, &flags); err != nil { return err } @@ -85,7 +85,7 @@ func PackageBuildpack(logger logging.Logger, cfg config.Config, packager Buildpa cmd.Flags().StringVarP(&flags.PackageTomlPath, "config", "c", "", "Path to package TOML config (required)") cmd.Flags().StringVarP(&flags.Format, "format", "f", "", `Format to save package as ("image" or "file")`) - cmd.Flags().BoolVar(&flags.Publish, "publish", false, `Publish to registry (applies to "--format=image" only)`) + cmd.Flags().BoolVar(&flags.Publish, "publish", false, `Publish the buildpack directly to the container registry specified in , instead of the daemon (applies to "--format=image" only).`) cmd.Flags().StringVar(&flags.Policy, "pull-policy", "", "Pull policy to use. Accepted values are always, never, and if-not-present. The default is always") cmd.Flags().StringVarP(&flags.BuildpackRegistry, "buildpack-registry", "r", "", "Buildpack Registry name") diff --git a/internal/commands/rebase.go b/internal/commands/rebase.go index 21c56a1c7f..312383b904 100644 --- a/internal/commands/rebase.go +++ b/internal/commands/rebase.go @@ -46,10 +46,13 @@ func Rebase(logger logging.Logger, cfg config.Config, pack PackClient) *cobra.Co }), } - cmd.Flags().BoolVar(&opts.Publish, "publish", false, "Publish to registry") + cmd.Flags().BoolVar(&opts.Publish, "publish", false, "Publish the rebased application image directly to the container registry specified in , instead of the daemon. The previous application image must also reside in the registry.") cmd.Flags().StringVar(&opts.RunImage, "run-image", "", "Run image to use for rebasing") cmd.Flags().StringVar(&policy, "pull-policy", "", "Pull policy to use. Accepted values are always, never, and if-not-present. The default is always") - + cmd.Flags().StringVar(&opts.PreviousImage, "previous-image", "", "Image to rebase. Set to a particular tag reference, digest reference, or (when performing a daemon build) image ID. Use this flag in combination with to avoid replacing the original image.") + cmd.Flags().StringVar(&opts.ReportDestinationDir, "report-output-dir", "", "Path to export build report.toml.\nOmitting the flag yield no report file.") + cmd.Flags().BoolVar(&opts.Force, "force", false, "Perform rebase operation without target validation (only available for API >= 0.12)") + cmd.Flags().StringArrayVar(&opts.InsecureRegistries, "insecure-registry", []string{}, "List of insecure registries (only available for API >= 0.13)") AddHelpFlag(cmd, "rebase") return cmd } diff --git a/internal/commands/rebase_test.go b/internal/commands/rebase_test.go index b3a9b2a5cf..9d52c7b0ee 100644 --- a/internal/commands/rebase_test.go +++ b/internal/commands/rebase_test.go @@ -50,6 +50,7 @@ func testRebaseCommand(t *testing.T, when spec.G, it spec.S) { when("#RebaseCommand", func() { when("no image is provided", func() { it("fails to run", func() { + command.SetArgs([]string{}) err := command.Execute() h.AssertError(t, err, "accepts 1 arg") }) @@ -80,6 +81,7 @@ func testRebaseCommand(t *testing.T, when spec.G, it spec.S) { AdditionalMirrors: map[string][]string{ runImage: {testMirror1, testMirror2}, }, + InsecureRegistries: []string{}, } }) @@ -122,6 +124,7 @@ func testRebaseCommand(t *testing.T, when spec.G, it spec.S) { h.AssertError(t, command.Execute(), "parsing pull policy") }) }) + when("--pull-policy not set", func() { when("no policy set in config", func() { it("uses the default policy", func() { @@ -148,6 +151,84 @@ func testRebaseCommand(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, command.Execute()) }) }) + when("rebase is true", func() { + it("passes it through", func() { + opts.Force = true + mockClient.EXPECT().Rebase(gomock.Any(), opts).Return(nil) + command = commands.Rebase(logger, cfg, mockClient) + command.SetArgs([]string{repoName, "--force"}) + h.AssertNil(t, command.Execute()) + }) + }) + }) + + when("image name and previous image are provided", func() { + var expectedOpts client.RebaseOptions + + it.Before(func() { + runImage := "test/image" + testMirror1 := "example.com/some/run1" + testMirror2 := "example.com/some/run2" + + cfg.RunImages = []config.RunImage{{ + Image: runImage, + Mirrors: []string{testMirror1, testMirror2}, + }} + command = commands.Rebase(logger, cfg, mockClient) + + repoName = "test/repo-image" + previousImage := "example.com/previous-image:tag" // Example of previous image with tag + opts := client.RebaseOptions{ + RepoName: repoName, + Publish: false, + PullPolicy: image.PullAlways, + RunImage: "", + AdditionalMirrors: map[string][]string{ + runImage: {testMirror1, testMirror2}, + }, + PreviousImage: previousImage, + InsecureRegistries: []string{}, + } + expectedOpts = opts + }) + + it("works", func() { + mockClient.EXPECT(). + Rebase(gomock.Any(), gomock.Eq(expectedOpts)). + Return(nil) + + command.SetArgs([]string{repoName, "--previous-image", "example.com/previous-image:tag"}) + h.AssertNil(t, command.Execute()) + }) + }) + + when("--insecure-registry is provided", func() { + it("sets one insecure registry", func() { + opts.PullPolicy = image.PullAlways + opts.InsecureRegistries = []string{ + "foo.bar", + } + mockClient.EXPECT(). + Rebase(gomock.Any(), opts). + Return(nil) + + command.SetArgs([]string{repoName, "--insecure-registry", "foo.bar"}) + h.AssertNil(t, command.Execute()) + }) + + it("sets more than one insecure registry", func() { + opts.PullPolicy = image.PullAlways + opts.InsecureRegistries = []string{ + "foo.bar", + "foo.com", + } + mockClient.EXPECT(). + Rebase(gomock.Any(), opts). + Return(nil) + + command.SetArgs([]string{repoName, "--insecure-registry", "foo.bar", "--insecure-registry", "foo.com"}) + h.AssertNil(t, command.Execute()) + }) }) }) }) diff --git a/internal/commands/remove_registry_test.go b/internal/commands/remove_registry_test.go index 7d185809f7..4d1a3291ac 100644 --- a/internal/commands/remove_registry_test.go +++ b/internal/commands/remove_registry_test.go @@ -2,7 +2,6 @@ package commands_test import ( "bytes" - "io/ioutil" "os" "path/filepath" "testing" @@ -37,7 +36,7 @@ func testRemoveRegistryCommand(t *testing.T, when spec.G, it spec.S) { it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "pack-home-*") + tmpDir, err = os.MkdirTemp("", "pack-home-*") assert.Nil(err) cfg = config.Config{ diff --git a/internal/commands/report.go b/internal/commands/report.go index 535a34e211..64cc24d4a6 100644 --- a/internal/commands/report.go +++ b/internal/commands/report.go @@ -4,7 +4,7 @@ import ( "bytes" "fmt" "io" - "io/ioutil" + "os" "path/filepath" "regexp" "runtime" @@ -57,7 +57,7 @@ Config: {{ .Config -}}`)) configData := "" - if data, err := ioutil.ReadFile(filepath.Clean(cfgPath)); err != nil { + if data, err := os.ReadFile(filepath.Clean(cfgPath)); err != nil { configData = fmt.Sprintf("(no config file found at %s)", cfgPath) } else { var padded strings.Builder diff --git a/internal/commands/report_test.go b/internal/commands/report_test.go index e69ad87596..eb6c911f10 100644 --- a/internal/commands/report_test.go +++ b/internal/commands/report_test.go @@ -3,7 +3,6 @@ package commands_test import ( "bytes" "fmt" - "io/ioutil" "os" "path/filepath" "testing" @@ -36,13 +35,13 @@ func testReportCommand(t *testing.T, when spec.G, it spec.S) { var err error logger = logging.NewLogWithWriters(&outBuf, &outBuf) - tempPackHome, err = ioutil.TempDir("", "pack-home") + tempPackHome, err = os.MkdirTemp("", "pack-home") h.AssertNil(t, err) packConfigPath = filepath.Join(tempPackHome, "config.toml") command = commands.Report(logger, testVersion, packConfigPath) command.SetArgs([]string{}) - h.AssertNil(t, ioutil.WriteFile(packConfigPath, []byte(` + h.AssertNil(t, os.WriteFile(packConfigPath, []byte(` default-builder-image = "some/image" experimental = true @@ -59,7 +58,7 @@ experimental = true url = "https://github.com/super-secret-project/registry" `), 0666)) - tempPackEmptyHome, err = ioutil.TempDir("", "") + tempPackEmptyHome, err = os.MkdirTemp("", "") h.AssertNil(t, err) }) diff --git a/internal/commands/set_default_builder_test.go b/internal/commands/set_default_builder_test.go index b3606e660a..db0f12e7a4 100644 --- a/internal/commands/set_default_builder_test.go +++ b/internal/commands/set_default_builder_test.go @@ -3,7 +3,6 @@ package commands_test import ( "bytes" "fmt" - "io/ioutil" "os" "path/filepath" "testing" @@ -44,7 +43,7 @@ func testSetDefaultBuilderCommand(t *testing.T, when spec.G, it spec.S) { logger = logging.NewLogWithWriters(&outBuf, &outBuf) var err error - tempPackHome, err = ioutil.TempDir("", "pack-home") + tempPackHome, err = os.MkdirTemp("", "pack-home") h.AssertNil(t, err) command = commands.SetDefaultBuilder(logger, config.Config{}, filepath.Join(tempPackHome, "config.toml"), mockClient) }) diff --git a/internal/commands/set_default_registry_test.go b/internal/commands/set_default_registry_test.go index 74cb44f982..e946329040 100644 --- a/internal/commands/set_default_registry_test.go +++ b/internal/commands/set_default_registry_test.go @@ -2,7 +2,6 @@ package commands_test import ( "bytes" - "io/ioutil" "os" "path/filepath" "testing" @@ -36,7 +35,7 @@ func testSetDefaultRegistryCommand(t *testing.T, when spec.G, it spec.S) { it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "pack-home-*") + tmpDir, err = os.MkdirTemp("", "pack-home-*") assert.Nil(err) configFile = filepath.Join(tmpDir, "config.toml") diff --git a/internal/commands/set_run_image_mirrors_test.go b/internal/commands/set_run_image_mirrors_test.go index da20ca042d..10ae7b94a6 100644 --- a/internal/commands/set_run_image_mirrors_test.go +++ b/internal/commands/set_run_image_mirrors_test.go @@ -2,7 +2,6 @@ package commands_test import ( "bytes" - "io/ioutil" "os" "path/filepath" "testing" @@ -35,7 +34,7 @@ func testSetRunImageMirrorsCommand(t *testing.T, when spec.G, it spec.S) { logger = logging.NewLogWithWriters(&outBuf, &outBuf) cfg = config.Config{} var err error - tempPackHome, err = ioutil.TempDir("", "pack-home") + tempPackHome, err = os.MkdirTemp("", "pack-home") h.AssertNil(t, err) cfgPath = filepath.Join(tempPackHome, "config.toml") diff --git a/internal/commands/stack.go b/internal/commands/stack.go index fcfdfda05e..482cd69418 100644 --- a/internal/commands/stack.go +++ b/internal/commands/stack.go @@ -9,7 +9,8 @@ import ( func NewStackCommand(logger logging.Logger) *cobra.Command { command := cobra.Command{ Use: "stack", - Short: "Interact with stacks", + Short: "(deprecated) Interact with stacks", + Long: "(Deprecated)\nStacks are deprecated in favor of using BuildImages and RunImages directly, but will continue to be supported throughout all of 2023 and '24 if not longer. Please see our docs for more details- https://buildpacks.io/docs/concepts/components/stack", RunE: nil, } diff --git a/internal/commands/stack_suggest.go b/internal/commands/stack_suggest.go index a96063c2c4..84afe38270 100644 --- a/internal/commands/stack_suggest.go +++ b/internal/commands/stack_suggest.go @@ -19,33 +19,45 @@ type suggestedStack struct { } var suggestedStacks = []suggestedStack{ + { + ID: "Deprecation Notice", + Description: "Stacks are deprecated in favor of using BuildImages and RunImages directly, but will continue to be supported throughout all of 2023 and 2024 if not longer. Please see our docs for more details- https://buildpacks.io/docs/concepts/components/stack", + Maintainer: "CNB", + }, { ID: "heroku-20", Description: "The official Heroku stack based on Ubuntu 20.04", Maintainer: "Heroku", - BuildImage: "heroku/pack:20-build", - RunImage: "heroku/pack:20", + BuildImage: "heroku/heroku:20-cnb-build", + RunImage: "heroku/heroku:20-cnb", + }, + { + ID: "io.buildpacks.stacks.jammy", + Description: "A minimal Paketo stack based on Ubuntu 22.04", + Maintainer: "Paketo Project", + BuildImage: "paketobuildpacks/build-jammy-base", + RunImage: "paketobuildpacks/run-jammy-base", }, { - ID: "io.buildpacks.stacks.bionic", - Description: "A minimal Paketo stack based on Ubuntu 18.04", + ID: "io.buildpacks.stacks.jammy", + Description: "A large Paketo stack based on Ubuntu 22.04", Maintainer: "Paketo Project", - BuildImage: "paketobuildpacks/build:base-cnb", - RunImage: "paketobuildpacks/run:base-cnb", + BuildImage: "paketobuildpacks/build-jammy-full", + RunImage: "paketobuildpacks/run-jammy-full", }, { - ID: "io.buildpacks.stacks.bionic", - Description: "A large Paketo stack based on Ubuntu 18.04", + ID: "io.buildpacks.stacks.jammy.tiny", + Description: "A tiny Paketo stack based on Ubuntu 22.04, similar to distroless", Maintainer: "Paketo Project", - BuildImage: "paketobuildpacks/build:full-cnb", - RunImage: "paketobuildpacks/run:full-cnb", + BuildImage: "paketobuildpacks/build-jammy-tiny", + RunImage: "paketobuildpacks/run-jammy-tiny", }, { - ID: "io.paketo.stacks.tiny", - Description: "A tiny Paketo stack based on Ubuntu 18.04, similar to distroless", + ID: "io.buildpacks.stacks.jammy.static", + Description: "A static Paketo stack based on Ubuntu 22.04, similar to distroless", Maintainer: "Paketo Project", - BuildImage: "paketobuildpacks/build:tiny-cnb", - RunImage: "paketobuildpacks/run:tiny-cnb", + BuildImage: "paketobuildpacks/build-jammy-static", + RunImage: "paketobuildpacks/run-jammy-static", }, } @@ -53,8 +65,8 @@ func stackSuggest(logger logging.Logger) *cobra.Command { cmd := &cobra.Command{ Use: "suggest", Args: cobra.NoArgs, - Short: "List the recommended stacks", - Example: "pack stacks suggest", + Short: "(deprecated) List the recommended stacks", + Example: "pack stack suggest", RunE: logError(logger, func(*cobra.Command, []string) error { Suggest(logger) return nil diff --git a/internal/commands/stack_suggest_test.go b/internal/commands/stack_suggest_test.go index 47688b2cb8..0aad10f3c7 100644 --- a/internal/commands/stack_suggest_test.go +++ b/internal/commands/stack_suggest_test.go @@ -32,29 +32,41 @@ func testStacksSuggestCommand(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, command.Execute()) h.AssertEq(t, outBuf.String(), `Stacks maintained by the community: + Stack ID: Deprecation Notice + Description: Stacks are deprecated in favor of using BuildImages and RunImages directly, but will continue to be supported throughout all of 2023 and 2024 if not longer. Please see our docs for more details- https://buildpacks.io/docs/concepts/components/stack + Maintainer: CNB + Build Image: + Run Image: + Stack ID: heroku-20 Description: The official Heroku stack based on Ubuntu 20.04 Maintainer: Heroku - Build Image: heroku/pack:20-build - Run Image: heroku/pack:20 + Build Image: heroku/heroku:20-cnb-build + Run Image: heroku/heroku:20-cnb + + Stack ID: io.buildpacks.stacks.jammy + Description: A minimal Paketo stack based on Ubuntu 22.04 + Maintainer: Paketo Project + Build Image: paketobuildpacks/build-jammy-base + Run Image: paketobuildpacks/run-jammy-base - Stack ID: io.buildpacks.stacks.bionic - Description: A minimal Paketo stack based on Ubuntu 18.04 + Stack ID: io.buildpacks.stacks.jammy + Description: A large Paketo stack based on Ubuntu 22.04 Maintainer: Paketo Project - Build Image: paketobuildpacks/build:base-cnb - Run Image: paketobuildpacks/run:base-cnb + Build Image: paketobuildpacks/build-jammy-full + Run Image: paketobuildpacks/run-jammy-full - Stack ID: io.buildpacks.stacks.bionic - Description: A large Paketo stack based on Ubuntu 18.04 + Stack ID: io.buildpacks.stacks.jammy.static + Description: A static Paketo stack based on Ubuntu 22.04, similar to distroless Maintainer: Paketo Project - Build Image: paketobuildpacks/build:full-cnb - Run Image: paketobuildpacks/run:full-cnb + Build Image: paketobuildpacks/build-jammy-static + Run Image: paketobuildpacks/run-jammy-static - Stack ID: io.paketo.stacks.tiny - Description: A tiny Paketo stack based on Ubuntu 18.04, similar to distroless + Stack ID: io.buildpacks.stacks.jammy.tiny + Description: A tiny Paketo stack based on Ubuntu 22.04, similar to distroless Maintainer: Paketo Project - Build Image: paketobuildpacks/build:tiny-cnb - Run Image: paketobuildpacks/run:tiny-cnb + Build Image: paketobuildpacks/build-jammy-tiny + Run Image: paketobuildpacks/run-jammy-tiny `) }) }) diff --git a/internal/commands/stack_test.go b/internal/commands/stack_test.go new file mode 100644 index 0000000000..6c2b571b93 --- /dev/null +++ b/internal/commands/stack_test.go @@ -0,0 +1,53 @@ +package commands + +import ( + "bytes" + "testing" + + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + "github.com/spf13/cobra" + + "github.com/buildpacks/pack/pkg/logging" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestStackCommand(t *testing.T) { + spec.Run(t, "StackCommand", testStackCommand, spec.Parallel(), spec.Report(report.Terminal{})) +} + +func testStackCommand(t *testing.T, when spec.G, it spec.S) { + var ( + command *cobra.Command + outBuf bytes.Buffer + ) + + it.Before(func() { + command = NewStackCommand(logging.NewLogWithWriters(&outBuf, &outBuf)) + }) + + when("#Stack", func() { + it("displays stack information", func() { + command.SetArgs([]string{}) + bb := bytes.NewBufferString("") // In most tests we don't seem to need to this, not sure why it's necessary here. + command.SetOut(bb) + h.AssertNil(t, command.Execute()) + h.AssertEq(t, bb.String(), `(Deprecated) +Stacks are deprecated in favor of using BuildImages and RunImages directly, but will continue to be supported throughout all of 2023 and '24 if not longer. Please see our docs for more details- https://buildpacks.io/docs/concepts/components/stack + +Usage: + stack [command] + +Available Commands: + completion Generate the autocompletion script for the specified shell + help Help about any command + suggest (deprecated) List the recommended stacks + +Flags: + -h, --help help for stack + +Use "stack [command] --help" for more information about a command. +`) + }) + }) +} diff --git a/internal/commands/suggest_builders.go b/internal/commands/suggest_builders.go index 0d8772ed92..b028086160 100644 --- a/internal/commands/suggest_builders.go +++ b/internal/commands/suggest_builders.go @@ -39,10 +39,16 @@ func suggestSettingBuilder(logger logging.Logger, inspector BuilderInspector) { } func suggestBuilders(logger logging.Logger, client BuilderInspector) { - WriteSuggestedBuilder(logger, client, bldr.SuggestedBuilders) + suggestedBuilders := []bldr.KnownBuilder{} + for _, knownBuilder := range bldr.KnownBuilders { + if knownBuilder.Suggested { + suggestedBuilders = append(suggestedBuilders, knownBuilder) + } + } + WriteSuggestedBuilder(logger, client, suggestedBuilders) } -func WriteSuggestedBuilder(logger logging.Logger, inspector BuilderInspector, builders []bldr.SuggestedBuilder) { +func WriteSuggestedBuilder(logger logging.Logger, inspector BuilderInspector, builders []bldr.KnownBuilder) { sort.Slice(builders, func(i, j int) bool { if builders[i].Vendor == builders[j].Vendor { return builders[i].Image < builders[j].Image @@ -60,7 +66,7 @@ func WriteSuggestedBuilder(logger logging.Logger, inspector BuilderInspector, bu wg.Add(len(builders)) for i, builder := range builders { - go func(w *sync.WaitGroup, i int, builder bldr.SuggestedBuilder) { + go func(w *sync.WaitGroup, i int, builder bldr.KnownBuilder) { descriptions[i] = getBuilderDescription(builder, inspector) w.Done() }(&wg, i, builder) @@ -78,7 +84,7 @@ func WriteSuggestedBuilder(logger logging.Logger, inspector BuilderInspector, bu logger.Info("\tpack builder inspect ") } -func getBuilderDescription(builder bldr.SuggestedBuilder, inspector BuilderInspector) string { +func getBuilderDescription(builder bldr.KnownBuilder, inspector BuilderInspector) string { info, err := inspector.InspectBuilder(builder.Image, false) if err == nil && info != nil && info.Description != "" { return info.Description @@ -86,13 +92,3 @@ func getBuilderDescription(builder bldr.SuggestedBuilder, inspector BuilderInspe return builder.DefaultDescription } - -func isSuggestedBuilder(builder string) bool { - for _, sugBuilder := range bldr.SuggestedBuilders { - if builder == sugBuilder.Image { - return true - } - } - - return false -} diff --git a/internal/commands/suggest_builders_test.go b/internal/commands/suggest_builders_test.go index 6759e21de1..12d38c16e2 100644 --- a/internal/commands/suggest_builders_test.go +++ b/internal/commands/suggest_builders_test.go @@ -47,7 +47,7 @@ func testSuggestBuildersCommand(t *testing.T, when spec.G, it spec.S) { }) it("displays descriptions from metadata", func() { - commands.WriteSuggestedBuilder(logger, mockClient, []bldr.SuggestedBuilder{{ + commands.WriteSuggestedBuilder(logger, mockClient, []bldr.KnownBuilder{{ Vendor: "Builder", Image: "gcr.io/some/builder:latest", DefaultDescription: "Default description", @@ -65,7 +65,7 @@ func testSuggestBuildersCommand(t *testing.T, when spec.G, it spec.S) { }) it("displays default descriptions", func() { - commands.WriteSuggestedBuilder(logger, mockClient, []bldr.SuggestedBuilder{{ + commands.WriteSuggestedBuilder(logger, mockClient, []bldr.KnownBuilder{{ Vendor: "Builder", Image: "gcr.io/some/builder:latest", DefaultDescription: "Default description", @@ -81,7 +81,7 @@ func testSuggestBuildersCommand(t *testing.T, when spec.G, it spec.S) { }) it("displays default descriptions", func() { - commands.WriteSuggestedBuilder(logger, mockClient, []bldr.SuggestedBuilder{{ + commands.WriteSuggestedBuilder(logger, mockClient, []bldr.KnownBuilder{{ Vendor: "Builder", Image: "gcr.io/some/builder:latest", DefaultDescription: "Default description", diff --git a/internal/commands/suggest_stacks.go b/internal/commands/suggest_stacks.go deleted file mode 100644 index a2c6d0b65c..0000000000 --- a/internal/commands/suggest_stacks.go +++ /dev/null @@ -1,26 +0,0 @@ -package commands - -import ( - "github.com/spf13/cobra" - - "github.com/buildpacks/pack/pkg/logging" -) - -// Deprecated: Use `stack suggest` instead -func SuggestStacks(logger logging.Logger) *cobra.Command { - cmd := &cobra.Command{ - Use: "suggest-stacks", - Args: cobra.NoArgs, - Short: "Display list of recommended stacks", - Example: "pack suggest-stacks", - RunE: logError(logger, func(cmd *cobra.Command, args []string) error { - deprecationWarning(logger, "suggest-stacks", "stack suggest") - Suggest(logger) - return nil - }), - Hidden: true, - } - - AddHelpFlag(cmd, "suggest-stacks") - return cmd -} diff --git a/internal/commands/suggest_stacks_test.go b/internal/commands/suggest_stacks_test.go deleted file mode 100644 index a208bd69f1..0000000000 --- a/internal/commands/suggest_stacks_test.go +++ /dev/null @@ -1,66 +0,0 @@ -package commands_test - -import ( - "bytes" - "testing" - - "github.com/heroku/color" - "github.com/sclevine/spec" - "github.com/sclevine/spec/report" - "github.com/spf13/cobra" - - "github.com/buildpacks/pack/internal/commands" - "github.com/buildpacks/pack/pkg/logging" - h "github.com/buildpacks/pack/testhelpers" -) - -func TestSuggestStacksCommand(t *testing.T) { - color.Disable(true) - defer color.Disable(false) - spec.Run(t, "Commands", testSuggestStacksCommand, spec.Parallel(), spec.Report(report.Terminal{})) -} - -func testSuggestStacksCommand(t *testing.T, when spec.G, it spec.S) { - var ( - command *cobra.Command - outBuf bytes.Buffer - ) - - it.Before(func() { - command = commands.SuggestStacks(logging.NewLogWithWriters(&outBuf, &outBuf)) - }) - - when("#SuggestStacks", func() { - it("displays stack information", func() { - command.SetArgs([]string{}) - h.AssertNil(t, command.Execute()) - h.AssertEq(t, outBuf.String(), `Warning: Command 'pack suggest-stacks' has been deprecated, please use 'pack stack suggest' instead -Stacks maintained by the community: - - Stack ID: heroku-20 - Description: The official Heroku stack based on Ubuntu 20.04 - Maintainer: Heroku - Build Image: heroku/pack:20-build - Run Image: heroku/pack:20 - - Stack ID: io.buildpacks.stacks.bionic - Description: A minimal Paketo stack based on Ubuntu 18.04 - Maintainer: Paketo Project - Build Image: paketobuildpacks/build:base-cnb - Run Image: paketobuildpacks/run:base-cnb - - Stack ID: io.buildpacks.stacks.bionic - Description: A large Paketo stack based on Ubuntu 18.04 - Maintainer: Paketo Project - Build Image: paketobuildpacks/build:full-cnb - Run Image: paketobuildpacks/run:full-cnb - - Stack ID: io.paketo.stacks.tiny - Description: A tiny Paketo stack based on Ubuntu 18.04, similar to distroless - Maintainer: Paketo Project - Build Image: paketobuildpacks/build:tiny-cnb - Run Image: paketobuildpacks/run:tiny-cnb -`) - }) - }) -} diff --git a/internal/commands/testdata/buildpack.toml b/internal/commands/testdata/buildpack.toml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/internal/commands/testmocks/mock_inspect_image_writer_factory.go b/internal/commands/testmocks/mock_inspect_image_writer_factory.go index 2b931aa84c..9f5a65c546 100644 --- a/internal/commands/testmocks/mock_inspect_image_writer_factory.go +++ b/internal/commands/testmocks/mock_inspect_image_writer_factory.go @@ -12,30 +12,30 @@ import ( writer "github.com/buildpacks/pack/internal/inspectimage/writer" ) -// MockInspectImageWriterFactory is a mock of InspectImageWriterFactory interface +// MockInspectImageWriterFactory is a mock of InspectImageWriterFactory interface. type MockInspectImageWriterFactory struct { ctrl *gomock.Controller recorder *MockInspectImageWriterFactoryMockRecorder } -// MockInspectImageWriterFactoryMockRecorder is the mock recorder for MockInspectImageWriterFactory +// MockInspectImageWriterFactoryMockRecorder is the mock recorder for MockInspectImageWriterFactory. type MockInspectImageWriterFactoryMockRecorder struct { mock *MockInspectImageWriterFactory } -// NewMockInspectImageWriterFactory creates a new mock instance +// NewMockInspectImageWriterFactory creates a new mock instance. func NewMockInspectImageWriterFactory(ctrl *gomock.Controller) *MockInspectImageWriterFactory { mock := &MockInspectImageWriterFactory{ctrl: ctrl} mock.recorder = &MockInspectImageWriterFactoryMockRecorder{mock} return mock } -// EXPECT returns an object that allows the caller to indicate expected use +// EXPECT returns an object that allows the caller to indicate expected use. func (m *MockInspectImageWriterFactory) EXPECT() *MockInspectImageWriterFactoryMockRecorder { return m.recorder } -// Writer mocks base method +// Writer mocks base method. func (m *MockInspectImageWriterFactory) Writer(arg0 string, arg1 bool) (writer.InspectImageWriter, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Writer", arg0, arg1) @@ -44,7 +44,7 @@ func (m *MockInspectImageWriterFactory) Writer(arg0 string, arg1 bool) (writer.I return ret0, ret1 } -// Writer indicates an expected call of Writer +// Writer indicates an expected call of Writer. func (mr *MockInspectImageWriterFactoryMockRecorder) Writer(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Writer", reflect.TypeOf((*MockInspectImageWriterFactory)(nil).Writer), arg0, arg1) diff --git a/internal/commands/testmocks/mock_pack_client.go b/internal/commands/testmocks/mock_pack_client.go index 28338a006a..981704c090 100644 --- a/internal/commands/testmocks/mock_pack_client.go +++ b/internal/commands/testmocks/mock_pack_client.go @@ -36,6 +36,34 @@ func (m *MockPackClient) EXPECT() *MockPackClientMockRecorder { return m.recorder } +// AddManifest mocks base method. +func (m *MockPackClient) AddManifest(arg0 context.Context, arg1 client.ManifestAddOptions) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "AddManifest", arg0, arg1) + ret0, _ := ret[0].(error) + return ret0 +} + +// AddManifest indicates an expected call of AddManifest. +func (mr *MockPackClientMockRecorder) AddManifest(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AddManifest", reflect.TypeOf((*MockPackClient)(nil).AddManifest), arg0, arg1) +} + +// AnnotateManifest mocks base method. +func (m *MockPackClient) AnnotateManifest(arg0 context.Context, arg1 client.ManifestAnnotateOptions) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "AnnotateManifest", arg0, arg1) + ret0, _ := ret[0].(error) + return ret0 +} + +// AnnotateManifest indicates an expected call of AnnotateManifest. +func (mr *MockPackClientMockRecorder) AnnotateManifest(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AnnotateManifest", reflect.TypeOf((*MockPackClient)(nil).AnnotateManifest), arg0, arg1) +} + // Build mocks base method. func (m *MockPackClient) Build(arg0 context.Context, arg1 client.BuildOptions) error { m.ctrl.T.Helper() @@ -64,6 +92,34 @@ func (mr *MockPackClientMockRecorder) CreateBuilder(arg0, arg1 interface{}) *gom return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateBuilder", reflect.TypeOf((*MockPackClient)(nil).CreateBuilder), arg0, arg1) } +// CreateManifest mocks base method. +func (m *MockPackClient) CreateManifest(arg0 context.Context, arg1 client.CreateManifestOptions) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateManifest", arg0, arg1) + ret0, _ := ret[0].(error) + return ret0 +} + +// CreateManifest indicates an expected call of CreateManifest. +func (mr *MockPackClientMockRecorder) CreateManifest(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateManifest", reflect.TypeOf((*MockPackClient)(nil).CreateManifest), arg0, arg1) +} + +// DeleteManifest mocks base method. +func (m *MockPackClient) DeleteManifest(arg0 []string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteManifest", arg0) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteManifest indicates an expected call of DeleteManifest. +func (mr *MockPackClientMockRecorder) DeleteManifest(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteManifest", reflect.TypeOf((*MockPackClient)(nil).DeleteManifest), arg0) +} + // DownloadSBOM mocks base method. func (m *MockPackClient) DownloadSBOM(arg0 string, arg1 client.DownloadSBOMOptions) error { m.ctrl.T.Helper() @@ -113,6 +169,21 @@ func (mr *MockPackClientMockRecorder) InspectBuildpack(arg0 interface{}) *gomock return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InspectBuildpack", reflect.TypeOf((*MockPackClient)(nil).InspectBuildpack), arg0) } +// InspectExtension mocks base method. +func (m *MockPackClient) InspectExtension(arg0 client.InspectExtensionOptions) (*client.ExtensionInfo, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "InspectExtension", arg0) + ret0, _ := ret[0].(*client.ExtensionInfo) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// InspectExtension indicates an expected call of InspectExtension. +func (mr *MockPackClientMockRecorder) InspectExtension(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InspectExtension", reflect.TypeOf((*MockPackClient)(nil).InspectExtension), arg0) +} + // InspectImage mocks base method. func (m *MockPackClient) InspectImage(arg0 string, arg1 bool) (*client.ImageInfo, error) { m.ctrl.T.Helper() @@ -128,6 +199,20 @@ func (mr *MockPackClientMockRecorder) InspectImage(arg0, arg1 interface{}) *gomo return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InspectImage", reflect.TypeOf((*MockPackClient)(nil).InspectImage), arg0, arg1) } +// InspectManifest mocks base method. +func (m *MockPackClient) InspectManifest(arg0 string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "InspectManifest", arg0) + ret0, _ := ret[0].(error) + return ret0 +} + +// InspectManifest indicates an expected call of InspectManifest. +func (mr *MockPackClientMockRecorder) InspectManifest(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InspectManifest", reflect.TypeOf((*MockPackClient)(nil).InspectManifest), arg0) +} + // NewBuildpack mocks base method. func (m *MockPackClient) NewBuildpack(arg0 context.Context, arg1 client.NewBuildpackOptions) error { m.ctrl.T.Helper() @@ -156,6 +241,20 @@ func (mr *MockPackClientMockRecorder) PackageBuildpack(arg0, arg1 interface{}) * return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PackageBuildpack", reflect.TypeOf((*MockPackClient)(nil).PackageBuildpack), arg0, arg1) } +// PackageExtension mocks base method. +func (m *MockPackClient) PackageExtension(arg0 context.Context, arg1 client.PackageBuildpackOptions) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "PackageExtension", arg0, arg1) + ret0, _ := ret[0].(error) + return ret0 +} + +// PackageExtension indicates an expected call of PackageExtension. +func (mr *MockPackClientMockRecorder) PackageExtension(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PackageExtension", reflect.TypeOf((*MockPackClient)(nil).PackageExtension), arg0, arg1) +} + // PullBuildpack mocks base method. func (m *MockPackClient) PullBuildpack(arg0 context.Context, arg1 client.PullBuildpackOptions) error { m.ctrl.T.Helper() @@ -170,6 +269,20 @@ func (mr *MockPackClientMockRecorder) PullBuildpack(arg0, arg1 interface{}) *gom return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PullBuildpack", reflect.TypeOf((*MockPackClient)(nil).PullBuildpack), arg0, arg1) } +// PushManifest mocks base method. +func (m *MockPackClient) PushManifest(arg0 client.PushManifestOptions) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "PushManifest", arg0) + ret0, _ := ret[0].(error) + return ret0 +} + +// PushManifest indicates an expected call of PushManifest. +func (mr *MockPackClientMockRecorder) PushManifest(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PushManifest", reflect.TypeOf((*MockPackClient)(nil).PushManifest), arg0) +} + // Rebase mocks base method. func (m *MockPackClient) Rebase(arg0 context.Context, arg1 client.RebaseOptions) error { m.ctrl.T.Helper() @@ -198,6 +311,20 @@ func (mr *MockPackClientMockRecorder) RegisterBuildpack(arg0, arg1 interface{}) return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RegisterBuildpack", reflect.TypeOf((*MockPackClient)(nil).RegisterBuildpack), arg0, arg1) } +// RemoveManifest mocks base method. +func (m *MockPackClient) RemoveManifest(arg0 string, arg1 []string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RemoveManifest", arg0, arg1) + ret0, _ := ret[0].(error) + return ret0 +} + +// RemoveManifest indicates an expected call of RemoveManifest. +func (mr *MockPackClientMockRecorder) RemoveManifest(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RemoveManifest", reflect.TypeOf((*MockPackClient)(nil).RemoveManifest), arg0, arg1) +} + // YankBuildpack mocks base method. func (m *MockPackClient) YankBuildpack(arg0 client.YankBuildpackOptions) error { m.ctrl.T.Helper() diff --git a/internal/commands/trust_builder_test.go b/internal/commands/trust_builder_test.go index 993bde831a..747099956b 100644 --- a/internal/commands/trust_builder_test.go +++ b/internal/commands/trust_builder_test.go @@ -2,7 +2,6 @@ package commands_test import ( "bytes" - "io/ioutil" "os" "path/filepath" "testing" @@ -37,7 +36,7 @@ func testTrustBuilderCommand(t *testing.T, when spec.G, it spec.S) { var err error logger = logging.NewLogWithWriters(&outBuf, &outBuf) - tempPackHome, err = ioutil.TempDir("", "pack-home") + tempPackHome, err = os.MkdirTemp("", "pack-home") h.AssertNil(t, err) configPath = filepath.Join(tempPackHome, "config.toml") command = commands.TrustBuilder(logger, config.Config{}, configPath) @@ -61,7 +60,7 @@ func testTrustBuilderCommand(t *testing.T, when spec.G, it spec.S) { command.SetArgs([]string{"some-builder"}) h.AssertNil(t, command.Execute()) - b, err := ioutil.ReadFile(configPath) + b, err := os.ReadFile(configPath) h.AssertNil(t, err) h.AssertContains(t, string(b), `[[trusted-builders]] name = "some-builder"`) @@ -72,13 +71,13 @@ func testTrustBuilderCommand(t *testing.T, when spec.G, it spec.S) { it("does nothing", func() { command.SetArgs([]string{"some-already-trusted-builder"}) h.AssertNil(t, command.Execute()) - oldContents, err := ioutil.ReadFile(configPath) + oldContents, err := os.ReadFile(configPath) h.AssertNil(t, err) command.SetArgs([]string{"some-already-trusted-builder"}) h.AssertNil(t, command.Execute()) - newContents, err := ioutil.ReadFile(configPath) + newContents, err := os.ReadFile(configPath) h.AssertNil(t, err) h.AssertEq(t, newContents, oldContents) }) @@ -86,11 +85,11 @@ func testTrustBuilderCommand(t *testing.T, when spec.G, it spec.S) { when("builder is a suggested builder", func() { it("does nothing", func() { - h.AssertNil(t, ioutil.WriteFile(configPath, []byte(""), os.ModePerm)) + h.AssertNil(t, os.WriteFile(configPath, []byte(""), os.ModePerm)) - command.SetArgs([]string{"paketobuildpacks/builder:base"}) + command.SetArgs([]string{"paketobuildpacks/builder-jammy-base"}) h.AssertNil(t, command.Execute()) - oldContents, err := ioutil.ReadFile(configPath) + oldContents, err := os.ReadFile(configPath) h.AssertNil(t, err) h.AssertEq(t, string(oldContents), "") }) diff --git a/internal/commands/untrust_builder_test.go b/internal/commands/untrust_builder_test.go index 5a04c02390..f903b8b606 100644 --- a/internal/commands/untrust_builder_test.go +++ b/internal/commands/untrust_builder_test.go @@ -3,7 +3,6 @@ package commands_test import ( "bytes" "fmt" - "io/ioutil" "os" "path/filepath" "testing" @@ -39,7 +38,7 @@ func testUntrustBuilderCommand(t *testing.T, when spec.G, it spec.S) { logger = logging.NewLogWithWriters(&outBuf, &outBuf) - tempPackHome, err = ioutil.TempDir("", "pack-home") + tempPackHome, err = os.MkdirTemp("", "pack-home") h.AssertNil(t, err) configPath = filepath.Join(tempPackHome, "config.toml") configManager = newConfigManager(t, configPath) @@ -73,7 +72,7 @@ func testUntrustBuilderCommand(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, command.Execute()) - b, err := ioutil.ReadFile(configPath) + b, err := os.ReadFile(configPath) h.AssertNil(t, err) h.AssertNotContains(t, string(b), builderName) @@ -93,7 +92,7 @@ func testUntrustBuilderCommand(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, command.Execute()) - b, err := ioutil.ReadFile(configPath) + b, err := os.ReadFile(configPath) h.AssertNil(t, err) h.AssertContains(t, string(b), stillTrustedBuilder) h.AssertNotContains(t, string(b), untrustBuilder) @@ -111,7 +110,7 @@ func testUntrustBuilderCommand(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, command.Execute()) - b, err := ioutil.ReadFile(configPath) + b, err := os.ReadFile(configPath) h.AssertNil(t, err) h.AssertContains(t, string(b), stillTrustedBuilder) h.AssertNotContains(t, string(b), neverTrustedBuilder) @@ -125,12 +124,12 @@ func testUntrustBuilderCommand(t *testing.T, when spec.G, it spec.S) { when("builder is a suggested builder", func() { it("does nothing and reports that ", func() { - builder := "paketobuildpacks/builder:base" + builder := "paketobuildpacks/builder-jammy-base" command := commands.UntrustBuilder(logger, config.Config{}, configPath) command.SetArgs([]string{builder}) err := command.Execute() - h.AssertError(t, err, fmt.Sprintf("Builder %s is a suggested builder, and is trusted by default", style.Symbol(builder))) + h.AssertError(t, err, fmt.Sprintf("Builder %s is a known trusted builder. Currently pack doesn't support making these builders untrusted", style.Symbol(builder))) }) }) }) diff --git a/internal/config/config.go b/internal/config/config.go index 5af064f5f9..465c231a18 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -22,6 +22,11 @@ type Config struct { Registries []Registry `toml:"registries,omitempty"` LifecycleImage string `toml:"lifecycle-image,omitempty"` RegistryMirrors map[string]string `toml:"registry-mirrors,omitempty"` + LayoutRepositoryDir string `toml:"layout-repo-dir,omitempty"` +} + +type VolumeConfig struct { + VolumeKeys map[string]string `toml:"volume-keys,omitempty"` } type Registry struct { @@ -57,6 +62,14 @@ func DefaultConfigPath() (string, error) { return filepath.Join(home, "config.toml"), nil } +func DefaultVolumeKeysPath() (string, error) { + home, err := PackHome() + if err != nil { + return "", errors.Wrap(err, "getting pack home") + } + return filepath.Join(home, "volume-keys.toml"), nil +} + func PackHome() (string, error) { packHome := os.Getenv("PACK_HOME") if packHome == "" { @@ -75,11 +88,19 @@ func Read(path string) (Config, error) { if err != nil && !os.IsNotExist(err) { return Config{}, errors.Wrapf(err, "failed to read config file at path %s", path) } + return cfg, nil +} +func ReadVolumeKeys(path string) (VolumeConfig, error) { + cfg := VolumeConfig{} + _, err := toml.DecodeFile(path, &cfg) + if err != nil && !os.IsNotExist(err) { + return VolumeConfig{}, errors.Wrapf(err, "failed to read config file at path %s", path) + } return cfg, nil } -func Write(cfg Config, path string) error { +func Write(cfg interface{}, path string) error { if err := MkdirAll(filepath.Dir(path)); err != nil { return err } @@ -128,4 +149,4 @@ func GetRegistry(cfg Config, registryName string) (Registry, error) { return Registry{}, errors.Errorf("registry %s is not defined in your config file", style.Symbol(registryName)) } -const DefaultLifecycleImageRepo = "buildpacksio/lifecycle" +const DefaultLifecycleImageRepo = "docker.io/buildpacksio/lifecycle" diff --git a/internal/config/config_test.go b/internal/config/config_test.go index 0f58e0ba22..25b5b4d0a5 100644 --- a/internal/config/config_test.go +++ b/internal/config/config_test.go @@ -1,7 +1,6 @@ package config_test import ( - "io/ioutil" "os" "path/filepath" "testing" @@ -28,7 +27,7 @@ func testConfig(t *testing.T, when spec.G, it spec.S) { it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "pack.config.test.") + tmpDir, err = os.MkdirTemp("", "pack.config.test.") h.AssertNil(t, err) configPath = filepath.Join(tmpDir, "config.toml") }) @@ -47,6 +46,7 @@ func testConfig(t *testing.T, when spec.G, it spec.S) { h.AssertEq(t, len(subject.RunImages), 0) h.AssertEq(t, subject.Experimental, false) h.AssertEq(t, len(subject.RegistryMirrors), 0) + h.AssertEq(t, subject.LayoutRepositoryDir, "") }) }) }) @@ -74,7 +74,7 @@ func testConfig(t *testing.T, when spec.G, it spec.S) { }, }, configPath)) - b, err := ioutil.ReadFile(configPath) + b, err := os.ReadFile(configPath) h.AssertNil(t, err) h.AssertContains(t, string(b), `default-builder-image = "some/builder"`) h.AssertContains(t, string(b), `[[run-images]] @@ -95,14 +95,14 @@ func testConfig(t *testing.T, when spec.G, it spec.S) { when("config on disk", func() { it.Before(func() { - h.AssertNil(t, ioutil.WriteFile(configPath, []byte("some-old-contents"), 0777)) + h.AssertNil(t, os.WriteFile(configPath, []byte("some-old-contents"), 0777)) }) it("replaces the file", func() { h.AssertNil(t, config.Write(config.Config{ DefaultBuilder: "some/builder", }, configPath)) - b, err := ioutil.ReadFile(configPath) + b, err := os.ReadFile(configPath) h.AssertNil(t, err) h.AssertContains(t, string(b), `default-builder-image = "some/builder"`) h.AssertNotContains(t, string(b), "some-old-contents") @@ -116,7 +116,7 @@ func testConfig(t *testing.T, when spec.G, it spec.S) { DefaultBuilder: "some/builder", }, missingDirConfigPath)) - b, err := ioutil.ReadFile(missingDirConfigPath) + b, err := os.ReadFile(missingDirConfigPath) h.AssertNil(t, err) h.AssertContains(t, string(b), `default-builder-image = "some/builder"`) }) diff --git a/internal/container/run.go b/internal/container/run.go index e39f8fbb23..eeeda7d2c2 100644 --- a/internal/container/run.go +++ b/internal/container/run.go @@ -5,19 +5,48 @@ import ( "fmt" "io" - "github.com/docker/docker/api/types" - dcontainer "github.com/docker/docker/api/types/container" - "github.com/docker/docker/client" "github.com/docker/docker/pkg/stdcopy" + dcontainer "github.com/moby/moby/api/types/container" + dockerClient "github.com/moby/moby/client" "github.com/pkg/errors" ) -type Handler func(bodyChan <-chan dcontainer.ContainerWaitOKBody, errChan <-chan error, reader io.Reader) error +type Handler func(bodyChan <-chan dcontainer.WaitResponse, errChan <-chan error, reader io.Reader) error -func RunWithHandler(ctx context.Context, docker client.CommonAPIClient, ctrID string, handler Handler) error { - bodyChan, errChan := docker.ContainerWait(ctx, ctrID, dcontainer.WaitConditionNextExit) +type DockerClient interface { + ContainerWait(ctx context.Context, containerID string, options dockerClient.ContainerWaitOptions) dockerClient.ContainerWaitResult + ContainerAttach(ctx context.Context, container string, options dockerClient.ContainerAttachOptions) (dockerClient.ContainerAttachResult, error) + ContainerStart(ctx context.Context, container string, options dockerClient.ContainerStartOptions) (dockerClient.ContainerStartResult, error) +} + +func ContainerWaitWrapper(ctx context.Context, docker DockerClient, container string, condition dcontainer.WaitCondition) (<-chan dcontainer.WaitResponse, <-chan error) { + bodyChan := make(chan dcontainer.WaitResponse) + errChan := make(chan error) + + go func() { + defer close(bodyChan) + defer close(errChan) + + result := docker.ContainerWait(ctx, container, dockerClient.ContainerWaitOptions{Condition: dcontainer.WaitConditionNextExit}) + for { + select { + case body := <-result.Result: + bodyChan <- body + return + case err := <-result.Error: + errChan <- err + return + } + } + }() + + return bodyChan, errChan +} + +func RunWithHandler(ctx context.Context, docker DockerClient, ctrID string, handler Handler) error { + bodyChan, errChan := ContainerWaitWrapper(ctx, docker, ctrID, dcontainer.WaitConditionNextExit) - resp, err := docker.ContainerAttach(ctx, ctrID, types.ContainerAttachOptions{ + resp, err := docker.ContainerAttach(ctx, ctrID, dockerClient.ContainerAttachOptions{ Stream: true, Stdout: true, Stderr: true, @@ -27,7 +56,7 @@ func RunWithHandler(ctx context.Context, docker client.CommonAPIClient, ctrID st } defer resp.Close() - if err := docker.ContainerStart(ctx, ctrID, types.ContainerStartOptions{}); err != nil { + if _, err := docker.ContainerStart(ctx, ctrID, dockerClient.ContainerStartOptions{}); err != nil { return errors.Wrap(err, "container start") } @@ -35,7 +64,7 @@ func RunWithHandler(ctx context.Context, docker client.CommonAPIClient, ctrID st } func DefaultHandler(out, errOut io.Writer) Handler { - return func(bodyChan <-chan dcontainer.ContainerWaitOKBody, errChan <-chan error, reader io.Reader) error { + return func(bodyChan <-chan dcontainer.WaitResponse, errChan <-chan error, reader io.Reader) error { copyErr := make(chan error) go func() { _, err := stdcopy.StdCopy(out, errOut, reader) diff --git a/internal/fakes/fake_buildpack.go b/internal/fakes/fake_buildpack.go index ce4ce28c80..3a798c586e 100644 --- a/internal/fakes/fake_buildpack.go +++ b/internal/fakes/fake_buildpack.go @@ -37,23 +37,23 @@ func WithExtraBuildpackContents(filename, contents string) FakeBuildpackOption { } } -func WithOpenError(err error) FakeBuildpackOption { +func WithBpOpenError(err error) FakeBuildpackOption { return func(f *fakeBuildpackConfig) { f.OpenError = err } } -// NewFakeBuildpack creates a fake buildpacks with contents: +// NewFakeBuildpack creates a fake buildpack with contents: // -// \_ /cnbs/buildpacks/{ID} -// \_ /cnbs/buildpacks/{ID}/{version} -// \_ /cnbs/buildpacks/{ID}/{version}/buildpack.toml -// \_ /cnbs/buildpacks/{ID}/{version}/bin -// \_ /cnbs/buildpacks/{ID}/{version}/bin/build -// build-contents -// \_ /cnbs/buildpacks/{ID}/{version}/bin/detect -// detect-contents -func NewFakeBuildpack(descriptor dist.BuildpackDescriptor, chmod int64, options ...FakeBuildpackOption) (buildpack.Buildpack, error) { +// \_ /cnb/buildpacks/{ID} +// \_ /cnb/buildpacks/{ID}/{version} +// \_ /cnb/buildpacks/{ID}/{version}/buildpack.toml +// \_ /cnb/buildpacks/{ID}/{version}/bin +// \_ /cnb/buildpacks/{ID}/{version}/bin/build +// build-contents +// \_ /cnb/buildpacks/{ID}/{version}/bin/detect +// detect-contents +func NewFakeBuildpack(descriptor dist.BuildpackDescriptor, chmod int64, options ...FakeBuildpackOption) (buildpack.BuildModule, error) { return &fakeBuildpack{ descriptor: descriptor, chmod: chmod, @@ -61,8 +61,8 @@ func NewFakeBuildpack(descriptor dist.BuildpackDescriptor, chmod int64, options }, nil } -func (b *fakeBuildpack) Descriptor() dist.BuildpackDescriptor { - return b.descriptor +func (b *fakeBuildpack) Descriptor() buildpack.Descriptor { + return &b.descriptor } func (b *fakeBuildpack) Open() (io.ReadCloser, error) { @@ -83,11 +83,11 @@ func (b *fakeBuildpack) Open() (io.ReadCloser, error) { tarBuilder := archive.TarBuilder{} ts := archive.NormalizedDateTime tarBuilder.AddDir(fmt.Sprintf("/cnb/buildpacks/%s", b.descriptor.EscapedID()), b.chmod, ts) - bpDir := fmt.Sprintf("/cnb/buildpacks/%s/%s", b.descriptor.EscapedID(), b.descriptor.Info.Version) + bpDir := fmt.Sprintf("/cnb/buildpacks/%s/%s", b.descriptor.EscapedID(), b.descriptor.Info().Version) tarBuilder.AddDir(bpDir, b.chmod, ts) tarBuilder.AddFile(bpDir+"/buildpack.toml", b.chmod, ts, buf.Bytes()) - if len(b.descriptor.Order) == 0 { + if len(b.descriptor.Order()) == 0 { tarBuilder.AddDir(bpDir+"/bin", b.chmod, ts) tarBuilder.AddFile(bpDir+"/bin/build", b.chmod, ts, []byte("build-contents")) tarBuilder.AddFile(bpDir+"/bin/detect", b.chmod, ts, []byte("detect-contents")) diff --git a/internal/fakes/fake_buildpack_blob.go b/internal/fakes/fake_buildpack_blob.go index b15c792b76..d5b6a4aede 100644 --- a/internal/fakes/fake_buildpack_blob.go +++ b/internal/fakes/fake_buildpack_blob.go @@ -9,23 +9,23 @@ import ( "github.com/buildpacks/pack/pkg/archive" "github.com/buildpacks/pack/pkg/blob" - "github.com/buildpacks/pack/pkg/dist" + "github.com/buildpacks/pack/pkg/buildpack" ) type fakeBuildpackBlob struct { - descriptor dist.BuildpackDescriptor + descriptor buildpack.Descriptor chmod int64 } // NewFakeBuildpackBlob creates a fake blob with contents: // -// \_ buildpack.toml -// \_ bin -// \_ bin/build -// build-contents -// \_ bin/detect -// detect-contents -func NewFakeBuildpackBlob(descriptor dist.BuildpackDescriptor, chmod int64) (blob.Blob, error) { +// \_ buildpack.toml +// \_ bin +// \_ bin/build +// build-contents +// \_ bin/detect +// detect-contents +func NewFakeBuildpackBlob(descriptor buildpack.Descriptor, chmod int64) (blob.Blob, error) { return &fakeBuildpackBlob{ descriptor: descriptor, chmod: chmod, diff --git a/internal/fakes/fake_buildpack_tar.go b/internal/fakes/fake_buildpack_tar.go index aa6b1bf2d6..7745018a34 100644 --- a/internal/fakes/fake_buildpack_tar.go +++ b/internal/fakes/fake_buildpack_tar.go @@ -2,7 +2,7 @@ package fakes import ( "io" - "io/ioutil" + "os" "testing" "github.com/buildpacks/pack/pkg/dist" @@ -10,10 +10,10 @@ import ( ) func CreateBuildpackTar(t *testing.T, tmpDir string, descriptor dist.BuildpackDescriptor) string { - buildpack, err := NewFakeBuildpackBlob(descriptor, 0777) + buildpack, err := NewFakeBuildpackBlob(&descriptor, 0777) h.AssertNil(t, err) - tempFile, err := ioutil.TempFile(tmpDir, "bp-*.tar") + tempFile, err := os.CreateTemp(tmpDir, "bp-*.tar") h.AssertNil(t, err) defer tempFile.Close() diff --git a/internal/fakes/fake_extension.go b/internal/fakes/fake_extension.go new file mode 100644 index 0000000000..1063f3391b --- /dev/null +++ b/internal/fakes/fake_extension.go @@ -0,0 +1,99 @@ +package fakes + +import ( + "bytes" + "fmt" + "io" + "path/filepath" + + "github.com/BurntSushi/toml" + + "github.com/buildpacks/pack/pkg/archive" + "github.com/buildpacks/pack/pkg/buildpack" + "github.com/buildpacks/pack/pkg/dist" +) + +type fakeExtension struct { + descriptor dist.ExtensionDescriptor + chmod int64 + options []FakeExtensionOption +} + +type fakeExtensionConfig struct { + // maping of extrafilename to stringified contents + ExtraFiles map[string]string + OpenError error +} + +func newFakeExtensionConfig() *fakeExtensionConfig { + return &fakeExtensionConfig{ExtraFiles: map[string]string{}} +} + +type FakeExtensionOption func(*fakeExtensionConfig) + +func WithExtraExtensionContents(filename, contents string) FakeExtensionOption { + return func(f *fakeExtensionConfig) { + f.ExtraFiles[filename] = contents + } +} + +func WithExtOpenError(err error) FakeExtensionOption { + return func(f *fakeExtensionConfig) { + f.OpenError = err + } +} + +// NewFakeExtension creates a fake extension with contents: +// +// \_ /cnb/extensions/{ID} +// \_ /cnb/extensions/{ID}/{version} +// \_ /cnb/extensions/{ID}/{version}/extension.toml +// \_ /cnb/extensions/{ID}/{version}/bin +// \_ /cnb/extensions/{ID}/{version}/bin/generate +// generate-contents +// \_ /cnb/extensions/{ID}/{version}/bin/detect +// detect-contents +func NewFakeExtension(descriptor dist.ExtensionDescriptor, chmod int64, options ...FakeExtensionOption) (buildpack.BuildModule, error) { + return &fakeExtension{ + descriptor: descriptor, + chmod: chmod, + options: options, + }, nil +} + +func (b *fakeExtension) Descriptor() buildpack.Descriptor { + return &b.descriptor +} + +func (b *fakeExtension) Open() (io.ReadCloser, error) { + fConfig := newFakeExtensionConfig() + for _, option := range b.options { + option(fConfig) + } + + if fConfig.OpenError != nil { + return nil, fConfig.OpenError + } + + buf := &bytes.Buffer{} + if err := toml.NewEncoder(buf).Encode(b.descriptor); err != nil { + return nil, err + } + + tarBuilder := archive.TarBuilder{} + ts := archive.NormalizedDateTime + tarBuilder.AddDir(fmt.Sprintf("/cnb/extensions/%s", b.descriptor.EscapedID()), b.chmod, ts) + extDir := fmt.Sprintf("/cnb/extensions/%s/%s", b.descriptor.EscapedID(), b.descriptor.Info().Version) + tarBuilder.AddDir(extDir, b.chmod, ts) + tarBuilder.AddFile(extDir+"/extension.toml", b.chmod, ts, buf.Bytes()) + + tarBuilder.AddDir(extDir+"/bin", b.chmod, ts) + tarBuilder.AddFile(extDir+"/bin/generate", b.chmod, ts, []byte("generate-contents")) + tarBuilder.AddFile(extDir+"/bin/detect", b.chmod, ts, []byte("detect-contents")) + + for extraFilename, extraContents := range fConfig.ExtraFiles { + tarBuilder.AddFile(filepath.Join(extDir, extraFilename), b.chmod, ts, []byte(extraContents)) + } + + return tarBuilder.Reader(archive.DefaultTarWriterFactory()), nil +} diff --git a/internal/fakes/fake_extension_blob.go b/internal/fakes/fake_extension_blob.go new file mode 100644 index 0000000000..f243ff6f70 --- /dev/null +++ b/internal/fakes/fake_extension_blob.go @@ -0,0 +1,40 @@ +package fakes + +import ( + "bytes" + "io" + "time" + + "github.com/BurntSushi/toml" + + "github.com/buildpacks/pack/pkg/archive" + "github.com/buildpacks/pack/pkg/blob" + "github.com/buildpacks/pack/pkg/buildpack" +) + +type fakeExtensionBlob struct { + descriptor buildpack.Descriptor + chmod int64 +} + +func NewFakeExtensionBlob(descriptor buildpack.Descriptor, chmod int64) (blob.Blob, error) { + return &fakeExtensionBlob{ + descriptor: descriptor, + chmod: chmod, + }, nil +} + +func (b *fakeExtensionBlob) Open() (reader io.ReadCloser, err error) { + buf := &bytes.Buffer{} + if err = toml.NewEncoder(buf).Encode(b.descriptor); err != nil { + return nil, err + } + + tarBuilder := archive.TarBuilder{} + tarBuilder.AddFile("extension.toml", b.chmod, time.Now(), buf.Bytes()) + tarBuilder.AddDir("bin", b.chmod, time.Now()) + tarBuilder.AddFile("bin/build", b.chmod, time.Now(), []byte("build-contents")) + tarBuilder.AddFile("bin/detect", b.chmod, time.Now(), []byte("detect-contents")) + + return tarBuilder.Reader(archive.DefaultTarWriterFactory()), err +} diff --git a/internal/fakes/fake_extension_tar.go b/internal/fakes/fake_extension_tar.go new file mode 100644 index 0000000000..8ae46a2c9d --- /dev/null +++ b/internal/fakes/fake_extension_tar.go @@ -0,0 +1,27 @@ +package fakes + +import ( + "io" + "os" + "testing" + + "github.com/buildpacks/pack/pkg/dist" + h "github.com/buildpacks/pack/testhelpers" +) + +func CreateExtensionTar(t *testing.T, tmpDir string, descriptor dist.ExtensionDescriptor) string { + extension, err := NewFakeExtensionBlob(&descriptor, 0777) + h.AssertNil(t, err) + + tempFile, err := os.CreateTemp(tmpDir, "ex-*.tar") + h.AssertNil(t, err) + defer tempFile.Close() + + reader, err := extension.Open() + h.AssertNil(t, err) + + _, err = io.Copy(tempFile, reader) + h.AssertNil(t, err) + + return tempFile.Name() +} diff --git a/internal/fakes/fake_image_fetcher.go b/internal/fakes/fake_image_fetcher.go index c0a331f616..d65a4914cf 100644 --- a/internal/fakes/fake_image_fetcher.go +++ b/internal/fakes/fake_image_fetcher.go @@ -6,13 +6,15 @@ import ( "github.com/buildpacks/imgutil" "github.com/pkg/errors" + "github.com/buildpacks/pack/pkg/dist" "github.com/buildpacks/pack/pkg/image" ) type FetchArgs struct { - Daemon bool - PullPolicy image.PullPolicy - Platform string + Daemon bool + PullPolicy image.PullPolicy + LayoutOption image.LayoutOption + Target *dist.Target } type FakeImageFetcher struct { @@ -30,7 +32,7 @@ func NewFakeImageFetcher() *FakeImageFetcher { } func (f *FakeImageFetcher) Fetch(ctx context.Context, name string, options image.FetchOptions) (imgutil.Image, error) { - f.FetchCalls[name] = &FetchArgs{Daemon: options.Daemon, PullPolicy: options.PullPolicy, Platform: options.Platform} + f.FetchCalls[name] = &FetchArgs{Daemon: options.Daemon, PullPolicy: options.PullPolicy, Target: options.Target, LayoutOption: options.LayoutOption} ri, remoteFound := f.RemoteImages[name] @@ -54,6 +56,16 @@ func (f *FakeImageFetcher) Fetch(ctx context.Context, name string, options image return ri, nil } +func (f *FakeImageFetcher) CheckReadAccess(_ string, _ image.FetchOptions) bool { + return true +} + +func (f *FakeImageFetcher) FetchForPlatform(ctx context.Context, name string, options image.FetchOptions) (imgutil.Image, error) { + // For the fake implementation, FetchForPlatform behaves the same as Fetch + // since we don't need to simulate the platform-specific digest resolution + return f.Fetch(ctx, name, options) +} + func shouldPull(localFound, remoteFound bool, policy image.PullPolicy) bool { if remoteFound && !localFound && policy == image.PullIfNotPresent { return true diff --git a/internal/fakes/fake_images.go b/internal/fakes/fake_images.go index 5057968c45..46ab8b4dae 100644 --- a/internal/fakes/fake_images.go +++ b/internal/fakes/fake_images.go @@ -18,7 +18,7 @@ import ( type FakeImageCreator func(name string, topLayerSha string, identifier imgutil.Identifier) *fakes.Image -func NewFakeBuilderImage(t *testing.T, tmpDir, name string, stackID, uid, gid string, metadata builder.Metadata, bpLayers dist.BuildpackLayers, order dist.Order, creator FakeImageCreator) *fakes.Image { +func NewFakeBuilderImage(t *testing.T, tmpDir, name string, stackID, uid, gid string, metadata builder.Metadata, bpLayers dist.ModuleLayers, order dist.Order, exLayers dist.ModuleLayers, orderExtensions dist.Order, system dist.System, creator FakeImageCreator) *fakes.Image { fakeBuilderImage := creator(name, "", nil) h.AssertNil(t, fakeBuilderImage.SetLabel("io.buildpacks.stack.id", stackID)) @@ -30,16 +30,16 @@ func NewFakeBuilderImage(t *testing.T, tmpDir, name string, stackID, uid, gid st for bpID, v := range bpLayers { for bpVersion, bpLayerInfo := range v { - bpInfo := dist.BuildpackInfo{ + bpInfo := dist.ModuleInfo{ ID: bpID, Version: bpVersion, } buildpackDescriptor := dist.BuildpackDescriptor{ - API: bpLayerInfo.API, - Info: bpInfo, - Stacks: bpLayerInfo.Stacks, - Order: bpLayerInfo.Order, + WithAPI: bpLayerInfo.API, + WithInfo: bpInfo, + WithStacks: bpLayerInfo.Stacks, + WithOrder: bpLayerInfo.Order, } buildpackTar := CreateBuildpackTar(t, tmpDir, buildpackDescriptor) @@ -48,20 +48,57 @@ func NewFakeBuilderImage(t *testing.T, tmpDir, name string, stackID, uid, gid st } } + for exID, v := range exLayers { + for exVersion, exLayerInfo := range v { + exInfo := dist.ModuleInfo{ + ID: exID, + Version: exVersion, + } + + extensionDescriptor := dist.ExtensionDescriptor{ + WithAPI: exLayerInfo.API, + WithInfo: exInfo, + } + + extensionTar := CreateExtensionTar(t, tmpDir, extensionDescriptor) + err := fakeBuilderImage.AddLayer(extensionTar) + h.AssertNil(t, err) + } + } + h.AssertNil(t, dist.SetLabel(fakeBuilderImage, "io.buildpacks.buildpack.order", order)) + h.AssertNil(t, dist.SetLabel(fakeBuilderImage, "io.buildpacks.extension.order", orderExtensions)) tarBuilder := archive.TarBuilder{} orderTomlBytes := &bytes.Buffer{} - h.AssertNil(t, toml.NewEncoder(orderTomlBytes).Encode(orderTOML{Order: order})) + h.AssertNil(t, toml.NewEncoder(orderTomlBytes).Encode(orderTOML{Order: order, OrderExtensions: orderExtensions})) tarBuilder.AddFile("/cnb/order.toml", 0777, archive.NormalizedDateTime, orderTomlBytes.Bytes()) orderTar := filepath.Join(tmpDir, fmt.Sprintf("order.%s.toml", h.RandString(8))) h.AssertNil(t, tarBuilder.WriteToPath(orderTar, archive.DefaultTarWriterFactory())) h.AssertNil(t, fakeBuilderImage.AddLayer(orderTar)) + if len(system.Pre.Buildpacks) > 0 || len(system.Post.Buildpacks) > 0 { + h.AssertNil(t, dist.SetLabel(fakeBuilderImage, "io.buildpacks.buildpack.system", system)) + systemTarBuilder := archive.TarBuilder{} + systemTomlBytes := &bytes.Buffer{} + h.AssertNil(t, toml.NewEncoder(systemTomlBytes).Encode(systemTOML{System: system})) + + systemTarBuilder.AddFile("/cnb/system.toml", 0777, archive.NormalizedDateTime, systemTomlBytes.Bytes()) + + systemTar := filepath.Join(tmpDir, fmt.Sprintf("system.%s.toml", h.RandString(8))) + h.AssertNil(t, systemTarBuilder.WriteToPath(systemTar, archive.DefaultTarWriterFactory())) + h.AssertNil(t, fakeBuilderImage.AddLayer(systemTar)) + } + return fakeBuilderImage } type orderTOML struct { - Order dist.Order `toml:"order"` + Order dist.Order `toml:"order"` + OrderExtensions dist.Order `toml:"orderExtensions"` +} + +type systemTOML struct { + System dist.System `toml:"system"` } diff --git a/internal/fakes/fake_package.go b/internal/fakes/fake_package.go index 0a139098ad..4713d2ffd6 100644 --- a/internal/fakes/fake_package.go +++ b/internal/fakes/fake_package.go @@ -14,7 +14,7 @@ import ( type Package interface { Name() string - BuildpackLayers() dist.BuildpackLayers + BuildpackLayers() dist.ModuleLayers GetLayer(diffID string) (io.ReadCloser, error) } @@ -23,11 +23,11 @@ var _ Package = (*fakePackage)(nil) type fakePackage struct { name string bpTarFiles map[string]string - bpLayers dist.BuildpackLayers + bpLayers dist.ModuleLayers } -func NewPackage(tmpDir string, name string, buildpacks []buildpack.Buildpack) (Package, error) { - processBuildpack := func(bp buildpack.Buildpack) (tarFile string, diffID string, err error) { +func NewPackage(tmpDir string, name string, buildpacks []buildpack.BuildModule) (Package, error) { + processBuildpack := func(bp buildpack.BuildModule) (tarFile string, diffID string, err error) { tarFile, err = buildpack.ToLayerTar(tmpDir, bp) if err != nil { return "", "", err @@ -46,7 +46,7 @@ func NewPackage(tmpDir string, name string, buildpacks []buildpack.Buildpack) (P return tarFile, hash.String(), nil } - bpLayers := dist.BuildpackLayers{} + bpLayers := dist.ModuleLayers{} bpTarFiles := map[string]string{} for _, bp := range buildpacks { tarFile, diffID, err := processBuildpack(bp) @@ -54,7 +54,7 @@ func NewPackage(tmpDir string, name string, buildpacks []buildpack.Buildpack) (P return nil, err } bpTarFiles[diffID] = tarFile - dist.AddBuildpackToLayersMD(bpLayers, bp.Descriptor(), diffID) + dist.AddToLayersMD(bpLayers, bp.Descriptor(), diffID) } return &fakePackage{ @@ -68,7 +68,7 @@ func (f *fakePackage) Name() string { return f.name } -func (f *fakePackage) BuildpackLayers() dist.BuildpackLayers { +func (f *fakePackage) BuildpackLayers() dist.ModuleLayers { return f.bpLayers } diff --git a/internal/inspectimage/bom_display.go b/internal/inspectimage/bom_display.go index 9a438ae916..28e273288d 100644 --- a/internal/inspectimage/bom_display.go +++ b/internal/inspectimage/bom_display.go @@ -18,13 +18,16 @@ type BOMEntryDisplay struct { Name string `toml:"name" json:"name" yaml:"name"` Version string `toml:"version,omitempty" json:"version,omitempty" yaml:"version,omitempty"` Metadata map[string]interface{} `toml:"metadata" json:"metadata" yaml:"metadata"` - Buildpack dist.BuildpackRef `json:"buildpacks" yaml:"buildpacks" toml:"buildpacks"` + Buildpack dist.ModuleRef `json:"buildpacks" yaml:"buildpacks" toml:"buildpacks"` } func NewBOMDisplay(info *client.ImageInfo) []BOMEntryDisplay { if info == nil { return nil } + if info != nil && info.Extensions != nil { + return displayBOMWithExtension(info.BOM) + } return displayBOM(info.BOM) } @@ -36,8 +39,29 @@ func displayBOM(bom []buildpack.BOMEntry) []BOMEntryDisplay { Version: entry.Version, Metadata: entry.Metadata, - Buildpack: dist.BuildpackRef{ - BuildpackInfo: dist.BuildpackInfo{ + Buildpack: dist.ModuleRef{ + ModuleInfo: dist.ModuleInfo{ + ID: entry.Buildpack.ID, + Version: entry.Buildpack.Version, + }, + Optional: entry.Buildpack.Optional, + }, + }) + } + + return result +} + +func displayBOMWithExtension(bom []buildpack.BOMEntry) []BOMEntryDisplay { + var result []BOMEntryDisplay + for _, entry := range bom { + result = append(result, BOMEntryDisplay{ + Name: entry.Name, + Version: entry.Version, + Metadata: entry.Metadata, + + Buildpack: dist.ModuleRef{ + ModuleInfo: dist.ModuleInfo{ ID: entry.Buildpack.ID, Version: entry.Buildpack.Version, }, diff --git a/internal/inspectimage/info_display.go b/internal/inspectimage/info_display.go index ca6fb15c77..af03dcc9f4 100644 --- a/internal/inspectimage/info_display.go +++ b/internal/inspectimage/info_display.go @@ -3,7 +3,7 @@ package inspectimage import ( "github.com/buildpacks/lifecycle/buildpack" "github.com/buildpacks/lifecycle/launch" - "github.com/buildpacks/lifecycle/platform" + "github.com/buildpacks/lifecycle/platform/files" "github.com/buildpacks/pack/internal/config" "github.com/buildpacks/pack/pkg/client" @@ -31,6 +31,7 @@ type ProcessDisplay struct { Command string `json:"command" yaml:"command" toml:"command"` Default bool `json:"default" yaml:"default" toml:"default"` Args []string `json:"args" yaml:"args" toml:"args"` + WorkDir string `json:"working-dir" yaml:"working-dir" toml:"working-dir"` } type BaseDisplay struct { @@ -42,8 +43,10 @@ type InfoDisplay struct { StackID string `json:"stack" yaml:"stack" toml:"stack"` Base BaseDisplay `json:"base_image" yaml:"base_image" toml:"base_image"` RunImageMirrors []RunImageMirrorDisplay `json:"run_images" yaml:"run_images" toml:"run_images"` - Buildpacks []dist.BuildpackInfo `json:"buildpacks" yaml:"buildpacks" toml:"buildpacks"` + Buildpacks []dist.ModuleInfo `json:"buildpacks" yaml:"buildpacks" toml:"buildpacks"` + Extensions []dist.ModuleInfo `json:"extensions" yaml:"extensions" toml:"extensions"` Processes []ProcessDisplay `json:"processes" yaml:"processes" toml:"processes"` + Rebasable bool `json:"rebasable" yaml:"rebasable" toml:"rebasable"` } type InspectOutput struct { @@ -56,12 +59,24 @@ func NewInfoDisplay(info *client.ImageInfo, generalInfo GeneralInfo) *InfoDispla if info == nil { return nil } + if info != nil && info.Extensions != nil { + return &InfoDisplay{ + StackID: info.StackID, + Base: displayBase(info.Base), + RunImageMirrors: displayMirrors(info, generalInfo), + Buildpacks: displayBuildpacks(info.Buildpacks), + Extensions: displayExtensions(info.Extensions), + Processes: displayProcesses(info.Processes), + Rebasable: info.Rebasable, + } + } return &InfoDisplay{ StackID: info.StackID, Base: displayBase(info.Base), RunImageMirrors: displayMirrors(info, generalInfo), Buildpacks: displayBuildpacks(info.Buildpacks), Processes: displayProcesses(info.Processes), + Rebasable: info.Rebasable, } } @@ -83,7 +98,7 @@ func getConfigMirrors(info *client.ImageInfo, imageMirrors []config.RunImage) [] return nil } -func displayBase(base platform.RunImageMetadata) BaseDisplay { +func displayBase(base files.RunImageForRebase) BaseDisplay { return BaseDisplay{ TopLayer: base.TopLayer, Reference: base.Reference, @@ -127,10 +142,10 @@ func displayMirrors(info *client.ImageInfo, generalInfo GeneralInfo) []RunImageM return result } -func displayBuildpacks(buildpacks []buildpack.GroupBuildpack) []dist.BuildpackInfo { - var result []dist.BuildpackInfo +func displayBuildpacks(buildpacks []buildpack.GroupElement) []dist.ModuleInfo { + var result []dist.ModuleInfo for _, buildpack := range buildpacks { - result = append(result, dist.BuildpackInfo{ + result = append(result, dist.ModuleInfo{ ID: buildpack.ID, Version: buildpack.Version, Homepage: buildpack.Homepage, @@ -139,6 +154,18 @@ func displayBuildpacks(buildpacks []buildpack.GroupBuildpack) []dist.BuildpackIn return result } +func displayExtensions(extensions []buildpack.GroupElement) []dist.ModuleInfo { + var result []dist.ModuleInfo + for _, extension := range extensions { + result = append(result, dist.ModuleInfo{ + ID: extension.ID, + Version: extension.Version, + Homepage: extension.Homepage, + }) + } + return result +} + func displayProcesses(details client.ProcessDetails) []ProcessDisplay { var result []ProcessDisplay detailsArray := details.OtherProcesses @@ -160,12 +187,18 @@ func convertToDisplay(proc launch.Process, isDefault bool) ProcessDisplay { case false: shell = "bash" } + var argsToUse []string + if len(proc.Command.Entries) > 1 { + argsToUse = proc.Command.Entries[1:] + } + argsToUse = append(argsToUse, proc.Args...) result := ProcessDisplay{ Type: proc.Type, Shell: shell, - Command: proc.Command, + Command: proc.Command.Entries[0], Default: isDefault, - Args: proc.Args, + Args: argsToUse, // overridable args are supported for platform API >= 0.10 with buildpack API >= 0.9, but we can't determine the buildpack API from the metadata label (to be fixed in platform 0.11) + WorkDir: proc.WorkingDirectory, } return result diff --git a/internal/inspectimage/writer/bom_json_test.go b/internal/inspectimage/writer/bom_json_test.go index 4a786b779a..5e0b8f8fb0 100644 --- a/internal/inspectimage/writer/bom_json_test.go +++ b/internal/inspectimage/writer/bom_json_test.go @@ -105,7 +105,7 @@ func testJSONBOM(t *testing.T, when spec.G, it spec.S) { }, }, }, - Buildpack: buildpack.GroupBuildpack{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage"}, + Buildpack: buildpack.GroupElement{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage"}, }}} localInfo = &client.ImageInfo{ @@ -120,7 +120,7 @@ func testJSONBOM(t *testing.T, when spec.G, it spec.S) { }, }, }, - Buildpack: buildpack.GroupBuildpack{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage"}, + Buildpack: buildpack.GroupElement{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage"}, }}, } diff --git a/internal/inspectimage/writer/bom_yaml_test.go b/internal/inspectimage/writer/bom_yaml_test.go index 540c88b004..855d065166 100644 --- a/internal/inspectimage/writer/bom_yaml_test.go +++ b/internal/inspectimage/writer/bom_yaml_test.go @@ -91,7 +91,7 @@ remote: }, }, }, - Buildpack: buildpack.GroupBuildpack{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage"}, + Buildpack: buildpack.GroupElement{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage"}, }}} localInfo = &client.ImageInfo{ @@ -106,7 +106,7 @@ remote: }, }, }, - Buildpack: buildpack.GroupBuildpack{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage"}, + Buildpack: buildpack.GroupElement{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage"}, }}, } diff --git a/internal/inspectimage/writer/human_readable.go b/internal/inspectimage/writer/human_readable.go index 517330b10f..d9b58ee826 100644 --- a/internal/inspectimage/writer/human_readable.go +++ b/internal/inspectimage/writer/human_readable.go @@ -31,18 +31,38 @@ func (h *HumanReadable) Print( return fmt.Errorf("unable to find image '%s' locally or remotely", generalInfo.Name) } - localDisplay := inspectimage.NewInfoDisplay(local, generalInfo) - remoteDisplay := inspectimage.NewInfoDisplay(remote, generalInfo) - logger.Infof("Inspecting image: %s\n", style.Symbol(generalInfo.Name)) - logger.Info("\nREMOTE:\n") - err := writeImageInfo(logger, remoteDisplay, remoteErr) - if err != nil { - return fmt.Errorf("writing remote builder info: %w", err) + if err := writeRemoteImageInfo(logger, generalInfo, remote, remoteErr); err != nil { + return err + } + + if err := writeLocalImageInfo(logger, generalInfo, local, localErr); err != nil { + return err } + + return nil +} + +func writeLocalImageInfo( + logger logging.Logger, + generalInfo inspectimage.GeneralInfo, + local *client.ImageInfo, + localErr error) error { logger.Info("\nLOCAL:\n") - err = writeImageInfo(logger, localDisplay, localErr) + + if localErr != nil { + logger.Errorf("%s\n", localErr) + return nil + } + + localDisplay := inspectimage.NewInfoDisplay(local, generalInfo) + if localDisplay == nil { + logger.Info("(not present)\n") + return nil + } + + err := writeImageInfo(logger, localDisplay) if err != nil { return fmt.Errorf("writing local builder info: %w", err) } @@ -50,40 +70,70 @@ func (h *HumanReadable) Print( return nil } -func writeImageInfo( +func writeRemoteImageInfo( logger logging.Logger, - info *inspectimage.InfoDisplay, - err error, -) error { - imgTpl := template.Must(template.New("runImages"). - Funcs(template.FuncMap{"StringsJoin": strings.Join}). - Funcs(template.FuncMap{"StringsValueOrDefault": strs.ValueOrDefault}). - Parse(runImagesTemplate)) - imgTpl = template.Must(imgTpl.New("buildpacks"). - Parse(buildpacksTemplate)) - imgTpl = template.Must(imgTpl.New("processes"). - Parse(processesTemplate)) - imgTpl = template.Must(imgTpl.New("image"). - Parse(imageTemplate)) - if err != nil { - logger.Errorf("%s\n", err) + generalInfo inspectimage.GeneralInfo, + remote *client.ImageInfo, + remoteErr error) error { + logger.Info("\nREMOTE:\n") + + if remoteErr != nil { + logger.Errorf("%s\n", remoteErr) return nil } - if info == nil { + remoteDisplay := inspectimage.NewInfoDisplay(remote, generalInfo) + if remoteDisplay == nil { logger.Info("(not present)\n") return nil } - remoteOutput, err := inspectImageOutput(info, imgTpl) + + err := writeImageInfo(logger, remoteDisplay) + if err != nil { + return fmt.Errorf("writing remote builder info: %w", err) + } + + return nil +} + +func writeImageInfo( + logger logging.Logger, + info *inspectimage.InfoDisplay, +) error { + imgTpl := getImageTemplate(info) + remoteOutput, err := getInspectImageOutput(imgTpl, info) if err != nil { logger.Error(err.Error()) + return err } else { logger.Info(remoteOutput.String()) + return nil } - return nil } -func inspectImageOutput(info *inspectimage.InfoDisplay, tpl *template.Template) (*bytes.Buffer, error) { +func getImageTemplate(info *inspectimage.InfoDisplay) *template.Template { + imgTpl := template.Must(template.New("runImages"). + Funcs(template.FuncMap{"StringsJoin": strings.Join}). + Funcs(template.FuncMap{"StringsValueOrDefault": strs.ValueOrDefault}). + Parse(runImagesTemplate)) + imgTpl = template.Must(imgTpl.New("buildpacks").Parse(buildpacksTemplate)) + + imgTpl = template.Must(imgTpl.New("processes").Parse(processesTemplate)) + + imgTpl = template.Must(imgTpl.New("rebasable").Parse(rebasableTemplate)) + + if info != nil && info.Extensions != nil { + imgTpl = template.Must(imgTpl.New("extensions").Parse(extensionsTemplate)) + imgTpl = template.Must(imgTpl.New("image").Parse(imageWithExtensionTemplate)) + } else { + imgTpl = template.Must(imgTpl.New("image").Parse(imageTemplate)) + } + return imgTpl +} + +func getInspectImageOutput( + tpl *template.Template, + info *inspectimage.InfoDisplay) (*bytes.Buffer, error) { if info == nil { return bytes.NewBuffer([]byte("(not present)")), nil } @@ -99,7 +149,6 @@ func inspectImageOutput(info *inspectimage.InfoDisplay, tpl *template.Template) }); err != nil { return bytes.NewBuffer(nil), err } - return buf, nil } @@ -127,20 +176,38 @@ Buildpacks: (buildpack metadata not present) {{- end }}` +var extensionsTemplate = ` +Extensions: +{{- if .Info.Extensions }} + ID VERSION HOMEPAGE +{{- range $_, $b := .Info.Extensions }} + {{ $b.ID }} {{ $b.Version }} {{ StringsValueOrDefault $b.Homepage "-" }} +{{- end }} +{{- else }} + (extension metadata not present) +{{- end }}` + var processesTemplate = ` {{- if .Info.Processes }} Processes: - TYPE SHELL COMMAND ARGS + TYPE SHELL COMMAND ARGS WORK DIR {{- range $_, $p := .Info.Processes }} {{- if $p.Default }} - {{ (printf "%s %s" $p.Type "(default)") }} {{ $p.Shell }} {{ $p.Command }} {{ StringsJoin $p.Args " " }} + {{ (printf "%s %s" $p.Type "(default)") }} {{ $p.Shell }} {{ $p.Command }} {{ StringsJoin $p.Args " " }} {{ $p.WorkDir }} {{- else }} - {{ $p.Type }} {{ $p.Shell }} {{ $p.Command }} {{ StringsJoin $p.Args " " }} + {{ $p.Type }} {{ $p.Shell }} {{ $p.Command }} {{ StringsJoin $p.Args " " }} {{ $p.WorkDir }} {{- end }} {{- end }} {{- end }}` +var rebasableTemplate = ` + +Rebasable: +{{- if or .Info.Rebasable (eq .Info.Rebasable true) }} true +{{- else }} false +{{- end }}` + var imageTemplate = ` Stack: {{ .Info.StackID }} @@ -150,4 +217,19 @@ Base Image: {{- end}} Top Layer: {{ .Info.Base.TopLayer }} {{ template "runImages" . }} +{{- template "rebasable" . }} {{ template "buildpacks" . }}{{ template "processes" . }}` + +var imageWithExtensionTemplate = ` +Stack: {{ .Info.StackID }} + +Base Image: +{{- if .Info.Base.Reference}} + Reference: {{ .Info.Base.Reference }} +{{- end}} + Top Layer: {{ .Info.Base.TopLayer }} +{{ template "runImages" . }} +{{- template "rebasable" . }} +{{ template "buildpacks" . }} +{{ template "extensions" . -}} +{{ template "processes" . }}` diff --git a/internal/inspectimage/writer/human_readable_test.go b/internal/inspectimage/writer/human_readable_test.go index 1ed456b9bc..e77cec639e 100644 --- a/internal/inspectimage/writer/human_readable_test.go +++ b/internal/inspectimage/writer/human_readable_test.go @@ -8,7 +8,7 @@ import ( "github.com/buildpacks/lifecycle/buildpack" "github.com/buildpacks/lifecycle/launch" - "github.com/buildpacks/lifecycle/platform" + "github.com/buildpacks/lifecycle/platform/files" "github.com/heroku/color" "github.com/sclevine/spec" "github.com/sclevine/spec/report" @@ -32,8 +32,13 @@ func testHumanReadable(t *testing.T, when spec.G, it spec.S) { assert = h.NewAssertionManager(t) outBuf bytes.Buffer - remoteInfo *client.ImageInfo - localInfo *client.ImageInfo + remoteInfo *client.ImageInfo + remoteWithExtensionInfo *client.ImageInfo + remoteInfoNoRebasable *client.ImageInfo + + localInfo *client.ImageInfo + localWithExtensionInfo *client.ImageInfo + localInfoNoRebasable *client.ImageInfo expectedRemoteOutput = `REMOTE: @@ -49,6 +54,8 @@ Run Images: some-remote-mirror other-remote-mirror +Rebasable: true + Buildpacks: ID VERSION HOMEPAGE test.bp.one.remote 1.0.0 https://some-homepage-one @@ -56,9 +63,68 @@ Buildpacks: test.bp.three.remote 3.0.0 - Processes: - TYPE SHELL COMMAND ARGS - some-remote-type (default) bash /some/remote command some remote args - other-remote-type /other/remote/command other remote args` + TYPE SHELL COMMAND ARGS WORK DIR + some-remote-type (default) bash /some/remote command some remote args /some-test-work-dir + other-remote-type /other/remote/command other remote args /other-test-work-dir` + expectedRemoteNoRebasableOutput = `REMOTE: + +Stack: test.stack.id.remote + +Base Image: + Reference: some-remote-run-image-reference + Top Layer: some-remote-top-layer + +Run Images: + user-configured-mirror-for-remote (user-configured) + some-remote-run-image + some-remote-mirror + other-remote-mirror + +Rebasable: false + +Buildpacks: + ID VERSION HOMEPAGE + test.bp.one.remote 1.0.0 https://some-homepage-one + test.bp.two.remote 2.0.0 https://some-homepage-two + test.bp.three.remote 3.0.0 - + +Processes: + TYPE SHELL COMMAND ARGS WORK DIR + some-remote-type (default) bash /some/remote command some remote args /some-test-work-dir + other-remote-type /other/remote/command other remote args /other-test-work-dir` + + expectedRemoteWithExtensionOutput = `REMOTE: + +Stack: test.stack.id.remote + +Base Image: + Reference: some-remote-run-image-reference + Top Layer: some-remote-top-layer + +Run Images: + user-configured-mirror-for-remote (user-configured) + some-remote-run-image + some-remote-mirror + other-remote-mirror + +Rebasable: true + +Buildpacks: + ID VERSION HOMEPAGE + test.bp.one.remote 1.0.0 https://some-homepage-one + test.bp.two.remote 2.0.0 https://some-homepage-two + test.bp.three.remote 3.0.0 - + +Extensions: + ID VERSION HOMEPAGE + test.bp.one.remote 1.0.0 https://some-homepage-one + test.bp.two.remote 2.0.0 https://some-homepage-two + test.bp.three.remote 3.0.0 - + +Processes: + TYPE SHELL COMMAND ARGS WORK DIR + some-remote-type (default) bash /some/remote command some remote args /some-test-work-dir + other-remote-type /other/remote/command other remote args /other-test-work-dir` expectedLocalOutput = `LOCAL: @@ -74,130 +140,88 @@ Run Images: some-local-mirror other-local-mirror +Rebasable: true + +Buildpacks: + ID VERSION HOMEPAGE + test.bp.one.local 1.0.0 https://some-homepage-one + test.bp.two.local 2.0.0 https://some-homepage-two + test.bp.three.local 3.0.0 - + +Processes: + TYPE SHELL COMMAND ARGS WORK DIR + some-local-type (default) bash /some/local command some local args /some-test-work-dir + other-local-type /other/local/command other local args /other-test-work-dir` + expectedLocalNoRebasableOutput = `LOCAL: + +Stack: test.stack.id.local + +Base Image: + Reference: some-local-run-image-reference + Top Layer: some-local-top-layer + +Run Images: + user-configured-mirror-for-local (user-configured) + some-local-run-image + some-local-mirror + other-local-mirror + +Rebasable: false + +Buildpacks: + ID VERSION HOMEPAGE + test.bp.one.local 1.0.0 https://some-homepage-one + test.bp.two.local 2.0.0 https://some-homepage-two + test.bp.three.local 3.0.0 - + +Processes: + TYPE SHELL COMMAND ARGS WORK DIR + some-local-type (default) bash /some/local command some local args /some-test-work-dir + other-local-type /other/local/command other local args /other-test-work-dir` + + expectedLocalWithExtensionOutput = `LOCAL: + +Stack: test.stack.id.local + +Base Image: + Reference: some-local-run-image-reference + Top Layer: some-local-top-layer + +Run Images: + user-configured-mirror-for-local (user-configured) + some-local-run-image + some-local-mirror + other-local-mirror + +Rebasable: true + Buildpacks: ID VERSION HOMEPAGE test.bp.one.local 1.0.0 https://some-homepage-one test.bp.two.local 2.0.0 https://some-homepage-two test.bp.three.local 3.0.0 - +Extensions: + ID VERSION HOMEPAGE + test.bp.one.local 1.0.0 https://some-homepage-one + test.bp.two.local 2.0.0 https://some-homepage-two + test.bp.three.local 3.0.0 - + Processes: - TYPE SHELL COMMAND ARGS - some-local-type (default) bash /some/local command some local args - other-local-type /other/local/command other local args` + TYPE SHELL COMMAND ARGS WORK DIR + some-local-type (default) bash /some/local command some local args /some-test-work-dir + other-local-type /other/local/command other local args /other-test-work-dir` ) when("Print", func() { it.Before(func() { - type someData struct { - String string - Bool bool - Int int - Nested struct { - String string - } - } - - remoteInfo = &client.ImageInfo{ - StackID: "test.stack.id.remote", - Buildpacks: []buildpack.GroupBuildpack{ - {ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, - {ID: "test.bp.two.remote", Version: "2.0.0", Homepage: "https://some-homepage-two"}, - {ID: "test.bp.three.remote", Version: "3.0.0"}, - }, - Base: platform.RunImageMetadata{ - TopLayer: "some-remote-top-layer", - Reference: "some-remote-run-image-reference", - }, - Stack: platform.StackMetadata{ - RunImage: platform.StackRunImageMetadata{ - Image: "some-remote-run-image", - Mirrors: []string{"some-remote-mirror", "other-remote-mirror"}, - }, - }, - BOM: []buildpack.BOMEntry{{ - Require: buildpack.Require{ - Name: "name-1", - Version: "version-1", - Metadata: map[string]interface{}{ - "RemoteData": someData{ - String: "aString", - Bool: true, - Int: 123, - Nested: struct { - String string - }{ - String: "anotherString", - }, - }, - }, - }, - Buildpack: buildpack.GroupBuildpack{ID: "test.bp.one.remote", Version: "1.0.0"}, - }}, - Processes: client.ProcessDetails{ - DefaultProcess: &launch.Process{ - Type: "some-remote-type", - Command: "/some/remote command", - Args: []string{"some", "remote", "args"}, - Direct: false, - }, - OtherProcesses: []launch.Process{ - { - Type: "other-remote-type", - Command: "/other/remote/command", - Args: []string{"other", "remote", "args"}, - Direct: true, - }, - }, - }, - } - - localInfo = &client.ImageInfo{ - StackID: "test.stack.id.local", - Buildpacks: []buildpack.GroupBuildpack{ - {ID: "test.bp.one.local", Version: "1.0.0", Homepage: "https://some-homepage-one"}, - {ID: "test.bp.two.local", Version: "2.0.0", Homepage: "https://some-homepage-two"}, - {ID: "test.bp.three.local", Version: "3.0.0"}, - }, - Base: platform.RunImageMetadata{ - TopLayer: "some-local-top-layer", - Reference: "some-local-run-image-reference", - }, - Stack: platform.StackMetadata{ - RunImage: platform.StackRunImageMetadata{ - Image: "some-local-run-image", - Mirrors: []string{"some-local-mirror", "other-local-mirror"}, - }, - }, - BOM: []buildpack.BOMEntry{{ - Require: buildpack.Require{ - Name: "name-1", - Version: "version-1", - Metadata: map[string]interface{}{ - "LocalData": someData{ - Bool: false, - Int: 456, - }, - }, - }, - Buildpack: buildpack.GroupBuildpack{ID: "test.bp.one.remote", Version: "1.0.0"}, - }}, - Processes: client.ProcessDetails{ - DefaultProcess: &launch.Process{ - Type: "some-local-type", - Command: "/some/local command", - Args: []string{"some", "local", "args"}, - Direct: false, - }, - OtherProcesses: []launch.Process{ - { - Type: "other-local-type", - Command: "/other/local/command", - Args: []string{"other", "local", "args"}, - Direct: true, - }, - }, - }, - } + remoteInfo = getRemoteBasicImageInfo(t) + remoteWithExtensionInfo = getRemoteImageInfoWithExtension(t) + remoteInfoNoRebasable = getRemoteImageInfoNoRebasable(t) + + localInfo = getBasicLocalImageInfo(t) + localWithExtensionInfo = getLocalImageInfoWithExtension(t) + localInfoNoRebasable = getLocalImageInfoNoRebasable(t) outBuf = bytes.Buffer{} }) @@ -233,6 +257,37 @@ Processes: }) }) + when("localWithExtension and remoteWithExtension image exits", func() { + it("prints both localWithExtension and remoteWithExtension image info in a human readable format", func() { + runImageMirrors := []config.RunImage{ + { + Image: "un-used-run-image", + Mirrors: []string{"un-used"}, + }, + { + Image: "some-local-run-image", + Mirrors: []string{"user-configured-mirror-for-local"}, + }, + { + Image: "some-remote-run-image", + Mirrors: []string{"user-configured-mirror-for-remote"}, + }, + } + sharedImageInfo := inspectimage.GeneralInfo{ + Name: "test-image", + RunImageMirrors: runImageMirrors, + } + humanReadableWriter := writer.NewHumanReadable() + + logger := logging.NewLogWithWriters(&outBuf, &outBuf) + err := humanReadableWriter.Print(logger, sharedImageInfo, localWithExtensionInfo, remoteWithExtensionInfo, nil, nil) + assert.Nil(err) + + assert.Contains(outBuf.String(), expectedLocalWithExtensionOutput) + assert.Contains(outBuf.String(), expectedRemoteWithExtensionOutput) + }) + }) + when("only local image exists", func() { it("prints local image info in a human readable format", func() { runImageMirrors := []config.RunImage{ @@ -262,6 +317,65 @@ Processes: assert.Contains(outBuf.String(), expectedLocalOutput) assert.NotContains(outBuf.String(), expectedRemoteOutput) }) + it("prints local no rebasable image info in a human readable format", func() { + runImageMirrors := []config.RunImage{ + { + Image: "un-used-run-image", + Mirrors: []string{"un-used"}, + }, + { + Image: "some-local-run-image", + Mirrors: []string{"user-configured-mirror-for-local"}, + }, + { + Image: "some-remote-run-image", + Mirrors: []string{"user-configured-mirror-for-remote"}, + }, + } + sharedImageInfo := inspectimage.GeneralInfo{ + Name: "test-image", + RunImageMirrors: runImageMirrors, + } + humanReadableWriter := writer.NewHumanReadable() + + logger := logging.NewLogWithWriters(&outBuf, &outBuf) + err := humanReadableWriter.Print(logger, sharedImageInfo, localInfoNoRebasable, nil, nil, nil) + assert.Nil(err) + + assert.Contains(outBuf.String(), expectedLocalNoRebasableOutput) + assert.NotContains(outBuf.String(), expectedRemoteOutput) + }) + }) + + when("only localWithExtension image exists", func() { + it("prints localWithExtension image info in a human readable format", func() { + runImageMirrors := []config.RunImage{ + { + Image: "un-used-run-image", + Mirrors: []string{"un-used"}, + }, + { + Image: "some-local-run-image", + Mirrors: []string{"user-configured-mirror-for-local"}, + }, + { + Image: "some-remote-run-image", + Mirrors: []string{"user-configured-mirror-for-remote"}, + }, + } + sharedImageInfo := inspectimage.GeneralInfo{ + Name: "test-image", + RunImageMirrors: runImageMirrors, + } + humanReadableWriter := writer.NewHumanReadable() + + logger := logging.NewLogWithWriters(&outBuf, &outBuf) + err := humanReadableWriter.Print(logger, sharedImageInfo, localWithExtensionInfo, nil, nil, nil) + assert.Nil(err) + + assert.Contains(outBuf.String(), expectedLocalWithExtensionOutput) + assert.NotContains(outBuf.String(), expectedRemoteWithExtensionOutput) + }) }) when("only remote image exists", func() { @@ -293,10 +407,38 @@ Processes: assert.NotContains(outBuf.String(), expectedLocalOutput) assert.Contains(outBuf.String(), expectedRemoteOutput) }) + it("prints remote no rebasable image info in a human readable format", func() { + runImageMirrors := []config.RunImage{ + { + Image: "un-used-run-image", + Mirrors: []string{"un-used"}, + }, + { + Image: "some-local-run-image", + Mirrors: []string{"user-configured-mirror-for-local"}, + }, + { + Image: "some-remote-run-image", + Mirrors: []string{"user-configured-mirror-for-remote"}, + }, + } + sharedImageInfo := inspectimage.GeneralInfo{ + Name: "test-image", + RunImageMirrors: runImageMirrors, + } + humanReadableWriter := writer.NewHumanReadable() + + logger := logging.NewLogWithWriters(&outBuf, &outBuf) + err := humanReadableWriter.Print(logger, sharedImageInfo, nil, remoteInfoNoRebasable, nil, nil) + assert.Nil(err) + + assert.NotContains(outBuf.String(), expectedLocalOutput) + assert.Contains(outBuf.String(), expectedRemoteNoRebasableOutput) + }) when("buildpack metadata is missing", func() { it.Before(func() { - remoteInfo.Buildpacks = []buildpack.GroupBuildpack{} + remoteInfo.Buildpacks = []buildpack.GroupElement{} }) it("displays a message indicating missing metadata", func() { sharedImageInfo := inspectimage.GeneralInfo{ @@ -316,7 +458,7 @@ Processes: when("there are no run images", func() { it.Before(func() { - remoteInfo.Stack = platform.StackMetadata{} + remoteInfo.Stack = files.Stack{} }) it("displays a message indicating missing run images", func() { sharedImageInfo := inspectimage.GeneralInfo{ @@ -335,6 +477,77 @@ Processes: }) }) + when("only remoteWithExtension image exists", func() { + it("prints remoteWithExtension image info in a human readable format", func() { + runImageMirrors := []config.RunImage{ + { + Image: "un-used-run-image", + Mirrors: []string{"un-used"}, + }, + { + Image: "some-local-run-image", + Mirrors: []string{"user-configured-mirror-for-local"}, + }, + { + Image: "some-remote-run-image", + Mirrors: []string{"user-configured-mirror-for-remote"}, + }, + } + sharedImageInfo := inspectimage.GeneralInfo{ + Name: "test-image", + RunImageMirrors: runImageMirrors, + } + humanReadableWriter := writer.NewHumanReadable() + + logger := logging.NewLogWithWriters(&outBuf, &outBuf) + err := humanReadableWriter.Print(logger, sharedImageInfo, nil, remoteWithExtensionInfo, nil, nil) + assert.Nil(err) + + assert.NotContains(outBuf.String(), expectedLocalWithExtensionOutput) + assert.Contains(outBuf.String(), expectedRemoteWithExtensionOutput) + }) + + when("buildpack metadata is missing", func() { + it.Before(func() { + remoteWithExtensionInfo.Buildpacks = []buildpack.GroupElement{} + }) + it("displays a message indicating missing metadata", func() { + sharedImageInfo := inspectimage.GeneralInfo{ + Name: "test-image", + RunImageMirrors: []config.RunImage{}, + } + + humanReadableWriter := writer.NewHumanReadable() + + logger := logging.NewLogWithWriters(&outBuf, &outBuf) + err := humanReadableWriter.Print(logger, sharedImageInfo, nil, remoteWithExtensionInfo, nil, nil) + assert.Nil(err) + + assert.Contains(outBuf.String(), "(buildpack metadata not present)") + }) + }) + + when("there are no run images", func() { + it.Before(func() { + remoteWithExtensionInfo.Stack = files.Stack{} + }) + it("displays a message indicating missing run images", func() { + sharedImageInfo := inspectimage.GeneralInfo{ + Name: "test-image", + RunImageMirrors: []config.RunImage{}, + } + + humanReadableWriter := writer.NewHumanReadable() + + logger := logging.NewLogWithWriters(&outBuf, &outBuf) + err := humanReadableWriter.Print(logger, sharedImageInfo, nil, remoteWithExtensionInfo, nil, nil) + assert.Nil(err) + + assert.Contains(outBuf.String(), "Run Images:\n (none)") + }) + }) + }) + when("error handled cases", func() { when("there is a remoteErr", func() { var remoteErr error @@ -406,6 +619,78 @@ Processes: }) }) + when("error handled cases", func() { + when("there is a remoteErr", func() { + var remoteErr error + it.Before(func() { + remoteErr = errors.New("some remote error") + }) + it("displays the remote error and local info", func() { + runImageMirrors := []config.RunImage{ + { + Image: "un-used-run-image", + Mirrors: []string{"un-used"}, + }, + { + Image: "some-local-run-image", + Mirrors: []string{"user-configured-mirror-for-local"}, + }, + { + Image: "some-remote-run-image", + Mirrors: []string{"user-configured-mirror-for-remote"}, + }, + } + sharedImageInfo := inspectimage.GeneralInfo{ + Name: "test-image", + RunImageMirrors: runImageMirrors, + } + humanReadableWriter := writer.NewHumanReadable() + + logger := logging.NewLogWithWriters(&outBuf, &outBuf) + err := humanReadableWriter.Print(logger, sharedImageInfo, localWithExtensionInfo, remoteWithExtensionInfo, nil, remoteErr) + assert.Nil(err) + + assert.Contains(outBuf.String(), expectedLocalWithExtensionOutput) + assert.Contains(outBuf.String(), "some remote error") + }) + }) + + when("there is a localErr", func() { + var localErr error + it.Before(func() { + localErr = errors.New("some local error") + }) + it("displays the remote info and local error", func() { + runImageMirrors := []config.RunImage{ + { + Image: "un-used-run-image", + Mirrors: []string{"un-used"}, + }, + { + Image: "some-local-run-image", + Mirrors: []string{"user-configured-mirror-for-local"}, + }, + { + Image: "some-remote-run-image", + Mirrors: []string{"user-configured-mirror-for-remote"}, + }, + } + sharedImageInfo := inspectimage.GeneralInfo{ + Name: "test-image", + RunImageMirrors: runImageMirrors, + } + humanReadableWriter := writer.NewHumanReadable() + + logger := logging.NewLogWithWriters(&outBuf, &outBuf) + err := humanReadableWriter.Print(logger, sharedImageInfo, localWithExtensionInfo, remoteWithExtensionInfo, localErr, nil) + assert.Nil(err) + + assert.Contains(outBuf.String(), expectedRemoteWithExtensionOutput) + assert.Contains(outBuf.String(), "some local error") + }) + }) + }) + when("error cases", func() { when("both localInfo and remoteInfo are nil", func() { it("displays a 'missing image' error message", func() { @@ -420,3 +705,215 @@ Processes: }) }) } + +func getRemoteBasicImageInfo(t testing.TB) *client.ImageInfo { + t.Helper() + return getRemoteImageInfo(t, false, true) +} +func getRemoteImageInfoWithExtension(t testing.TB) *client.ImageInfo { + t.Helper() + return getRemoteImageInfo(t, true, true) +} + +func getRemoteImageInfoNoRebasable(t testing.TB) *client.ImageInfo { + t.Helper() + return getRemoteImageInfo(t, false, false) +} + +func getRemoteImageInfo(t testing.TB, extension bool, rebasable bool) *client.ImageInfo { + t.Helper() + + mockedStackID := "test.stack.id.remote" + + mockedBuildpacks := []buildpack.GroupElement{ + {ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + {ID: "test.bp.two.remote", Version: "2.0.0", Homepage: "https://some-homepage-two"}, + {ID: "test.bp.three.remote", Version: "3.0.0"}, + } + + mockedBase := files.RunImageForRebase{ + TopLayer: "some-remote-top-layer", + Reference: "some-remote-run-image-reference", + } + + mockedStack := files.Stack{ + RunImage: files.RunImageForExport{ + Image: "some-remote-run-image", + Mirrors: []string{"some-remote-mirror", "other-remote-mirror"}, + }, + } + + type someData struct { + String string + Bool bool + Int int + Nested struct { + String string + } + } + mockedMetadata := map[string]interface{}{ + "RemoteData": someData{ + String: "aString", + Bool: true, + Int: 123, + Nested: struct { + String string + }{ + String: "anotherString", + }, + }, + } + + mockedBOM := []buildpack.BOMEntry{{ + Require: buildpack.Require{ + Name: "name-1", + Metadata: mockedMetadata, + }, + Buildpack: buildpack.GroupElement{ID: "test.bp.one.remote", Version: "1.0.0"}, + }} + + mockedProcesses := client.ProcessDetails{ + DefaultProcess: &launch.Process{ + Type: "some-remote-type", + Command: launch.RawCommand{Entries: []string{"/some/remote command"}}, + Args: []string{"some", "remote", "args"}, + Direct: false, + WorkingDirectory: "/some-test-work-dir", + }, + OtherProcesses: []launch.Process{ + { + Type: "other-remote-type", + Command: launch.RawCommand{Entries: []string{"/other/remote/command"}}, + Args: []string{"other", "remote", "args"}, + Direct: true, + WorkingDirectory: "/other-test-work-dir", + }, + }, + } + + mockedExtension := []buildpack.GroupElement{ + {ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + {ID: "test.bp.two.remote", Version: "2.0.0", Homepage: "https://some-homepage-two"}, + {ID: "test.bp.three.remote", Version: "3.0.0"}, + } + + imageInfo := &client.ImageInfo{ + StackID: mockedStackID, + Buildpacks: mockedBuildpacks, + Base: mockedBase, + Stack: mockedStack, + BOM: mockedBOM, + Processes: mockedProcesses, + Rebasable: rebasable, + } + + if extension { + imageInfo.Extensions = mockedExtension + } + + return imageInfo +} + +func getBasicLocalImageInfo(t testing.TB) *client.ImageInfo { + t.Helper() + return getLocalImageInfo(t, false, true) +} + +func getLocalImageInfoWithExtension(t testing.TB) *client.ImageInfo { + t.Helper() + return getLocalImageInfo(t, true, true) +} + +func getLocalImageInfoNoRebasable(t testing.TB) *client.ImageInfo { + t.Helper() + return getLocalImageInfo(t, false, false) +} + +func getLocalImageInfo(t testing.TB, extension bool, rebasable bool) *client.ImageInfo { + t.Helper() + + mockedStackID := "test.stack.id.local" + + mockedBuildpacks := []buildpack.GroupElement{ + {ID: "test.bp.one.local", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + {ID: "test.bp.two.local", Version: "2.0.0", Homepage: "https://some-homepage-two"}, + {ID: "test.bp.three.local", Version: "3.0.0"}, + } + + mockedBase := files.RunImageForRebase{ + TopLayer: "some-local-top-layer", + Reference: "some-local-run-image-reference", + } + + mockedPlatform := files.Stack{ + RunImage: files.RunImageForExport{ + Image: "some-local-run-image", + Mirrors: []string{"some-local-mirror", "other-local-mirror"}, + }, + } + + type someData struct { + String string + Bool bool + Int int + Nested struct { + String string + } + } + mockedMetadata := map[string]interface{}{ + "LocalData": someData{ + Bool: false, + Int: 456, + }, + } + + mockedBOM := []buildpack.BOMEntry{{ + Require: buildpack.Require{ + Name: "name-1", + Version: "version-1", + Metadata: mockedMetadata, + }, + Buildpack: buildpack.GroupElement{ID: "test.bp.one.remote", Version: "1.0.0"}, + }} + + mockedProcesses := client.ProcessDetails{ + DefaultProcess: &launch.Process{ + Type: "some-local-type", + Command: launch.RawCommand{Entries: []string{"/some/local command"}}, + Args: []string{"some", "local", "args"}, + Direct: false, + WorkingDirectory: "/some-test-work-dir", + }, + OtherProcesses: []launch.Process{ + { + Type: "other-local-type", + Command: launch.RawCommand{Entries: []string{"/other/local/command"}}, + Args: []string{"other", "local", "args"}, + Direct: true, + WorkingDirectory: "/other-test-work-dir", + }, + }, + } + + mockedExtension := []buildpack.GroupElement{ + {ID: "test.bp.one.local", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + {ID: "test.bp.two.local", Version: "2.0.0", Homepage: "https://some-homepage-two"}, + {ID: "test.bp.three.local", Version: "3.0.0"}, + } + + imageInfo := &client.ImageInfo{ + StackID: mockedStackID, + Buildpacks: mockedBuildpacks, + Base: mockedBase, + Stack: mockedPlatform, + BOM: mockedBOM, + Processes: mockedProcesses, + Rebasable: rebasable, + } + + if extension { + imageInfo.Extensions = mockedExtension + } + + return imageInfo +} diff --git a/internal/inspectimage/writer/json_test.go b/internal/inspectimage/writer/json_test.go index 1944d05f00..fa61c3f18e 100644 --- a/internal/inspectimage/writer/json_test.go +++ b/internal/inspectimage/writer/json_test.go @@ -6,7 +6,7 @@ import ( "github.com/buildpacks/lifecycle/buildpack" "github.com/buildpacks/lifecycle/launch" - "github.com/buildpacks/lifecycle/platform" + "github.com/buildpacks/lifecycle/platform/files" "github.com/heroku/color" "github.com/sclevine/spec" "github.com/sclevine/spec/report" @@ -30,12 +30,15 @@ func testJSON(t *testing.T, when spec.G, it spec.S) { assert = h.NewAssertionManager(t) outBuf bytes.Buffer - remoteInfo *client.ImageInfo - localInfo *client.ImageInfo + remoteInfo *client.ImageInfo + remoteInfoNoRebasable *client.ImageInfo + localInfo *client.ImageInfo + localInfoNoRebasable *client.ImageInfo expectedLocalOutput = `{ "local_info": { "stack": "test.stack.id.local", + "rebasable": true, "base_image": { "top_layer": "some-local-top-layer", "reference": "some-local-run-image-reference" @@ -67,6 +70,7 @@ func testJSON(t *testing.T, when spec.G, it spec.S) { "version": "2.0.0" } ], + "extensions": null, "processes": [ { "type": "some-local-type", @@ -77,7 +81,8 @@ func testJSON(t *testing.T, when spec.G, it spec.S) { "some", "local", "args" - ] + ], + "working-dir": "/some-test-work-dir" }, { "type": "other-local-type", @@ -88,7 +93,72 @@ func testJSON(t *testing.T, when spec.G, it spec.S) { "other", "local", "args" - ] + ], + "working-dir": "/other-test-work-dir" + } + ] + } +}` + expectedLocalNoRebasableOutput = `{ + "local_info": { + "stack": "test.stack.id.local", + "rebasable": false, + "base_image": { + "top_layer": "some-local-top-layer", + "reference": "some-local-run-image-reference" + }, + "run_images": [ + { + "name": "user-configured-mirror-for-local", + "user_configured": true + }, + { + "name": "some-local-run-image" + }, + { + "name": "some-local-mirror" + }, + { + "name": "other-local-mirror" + } + ], + "buildpacks": [ + { + "homepage": "https://some-homepage-one", + "id": "test.bp.one.local", + "version": "1.0.0" + }, + { + "homepage": "https://some-homepage-two", + "id": "test.bp.two.local", + "version": "2.0.0" + } + ], + "extensions": null, + "processes": [ + { + "type": "some-local-type", + "shell": "bash", + "command": "/some/local command", + "default": true, + "args": [ + "some", + "local", + "args" + ], + "working-dir": "/some-test-work-dir" + }, + { + "type": "other-local-type", + "shell": "", + "command": "/other/local/command", + "default": false, + "args": [ + "other", + "local", + "args" + ], + "working-dir": "/other-test-work-dir" } ] } @@ -96,6 +166,7 @@ func testJSON(t *testing.T, when spec.G, it spec.S) { expectedRemoteOutput = `{ "remote_info": { "stack": "test.stack.id.remote", + "rebasable": true, "base_image": { "top_layer": "some-remote-top-layer", "reference": "some-remote-run-image-reference" @@ -127,6 +198,7 @@ func testJSON(t *testing.T, when spec.G, it spec.S) { "homepage": "https://some-homepage-two" } ], + "extensions": null, "processes": [ { "type": "some-remote-type", @@ -137,7 +209,8 @@ func testJSON(t *testing.T, when spec.G, it spec.S) { "some", "remote", "args" - ] + ], + "working-dir": "/some-test-work-dir" }, { "type": "other-remote-type", @@ -148,7 +221,72 @@ func testJSON(t *testing.T, when spec.G, it spec.S) { "other", "remote", "args" - ] + ], + "working-dir": "/other-test-work-dir" + } + ] + } +}` + expectedRemoteNoRebasableOutput = `{ + "remote_info": { + "stack": "test.stack.id.remote", + "rebasable": false, + "base_image": { + "top_layer": "some-remote-top-layer", + "reference": "some-remote-run-image-reference" + }, + "run_images": [ + { + "name": "user-configured-mirror-for-remote", + "user_configured": true + }, + { + "name": "some-remote-run-image" + }, + { + "name": "some-remote-mirror" + }, + { + "name": "other-remote-mirror" + } + ], + "buildpacks": [ + { + "id": "test.bp.one.remote", + "version": "1.0.0", + "homepage": "https://some-homepage-one" + }, + { + "id": "test.bp.two.remote", + "version": "2.0.0", + "homepage": "https://some-homepage-two" + } + ], + "extensions": null, + "processes": [ + { + "type": "some-remote-type", + "shell": "bash", + "command": "/some/remote command", + "default": true, + "args": [ + "some", + "remote", + "args" + ], + "working-dir": "/some-test-work-dir" + }, + { + "type": "other-remote-type", + "shell": "", + "command": "/other/remote/command", + "default": false, + "args": [ + "other", + "remote", + "args" + ], + "working-dir": "/other-test-work-dir" } ] } @@ -168,16 +306,71 @@ func testJSON(t *testing.T, when spec.G, it spec.S) { remoteInfo = &client.ImageInfo{ StackID: "test.stack.id.remote", - Buildpacks: []buildpack.GroupBuildpack{ + Buildpacks: []buildpack.GroupElement{ + {ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + {ID: "test.bp.two.remote", Version: "2.0.0", Homepage: "https://some-homepage-two"}, + }, + Base: files.RunImageForRebase{ + TopLayer: "some-remote-top-layer", + Reference: "some-remote-run-image-reference", + }, + Stack: files.Stack{ + RunImage: files.RunImageForExport{ + Image: "some-remote-run-image", + Mirrors: []string{"some-remote-mirror", "other-remote-mirror"}, + }, + }, + BOM: []buildpack.BOMEntry{{ + Require: buildpack.Require{ + Name: "name-1", + Version: "version-1", + Metadata: map[string]interface{}{ + "RemoteData": someData{ + String: "aString", + Bool: true, + Int: 123, + Nested: struct { + String string + }{ + String: "anotherString", + }, + }, + }, + }, + Buildpack: buildpack.GroupElement{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + }}, + Processes: client.ProcessDetails{ + DefaultProcess: &launch.Process{ + Type: "some-remote-type", + Command: launch.RawCommand{Entries: []string{"/some/remote command"}}, + Args: []string{"some", "remote", "args"}, + Direct: false, + WorkingDirectory: "/some-test-work-dir", + }, + OtherProcesses: []launch.Process{ + { + Type: "other-remote-type", + Command: launch.RawCommand{Entries: []string{"/other/remote/command"}}, + Args: []string{"other", "remote", "args"}, + Direct: true, + WorkingDirectory: "/other-test-work-dir", + }, + }, + }, + Rebasable: true, + } + remoteInfoNoRebasable = &client.ImageInfo{ + StackID: "test.stack.id.remote", + Buildpacks: []buildpack.GroupElement{ {ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, {ID: "test.bp.two.remote", Version: "2.0.0", Homepage: "https://some-homepage-two"}, }, - Base: platform.RunImageMetadata{ + Base: files.RunImageForRebase{ TopLayer: "some-remote-top-layer", Reference: "some-remote-run-image-reference", }, - Stack: platform.StackMetadata{ - RunImage: platform.StackRunImageMetadata{ + Stack: files.Stack{ + RunImage: files.RunImageForExport{ Image: "some-remote-run-image", Mirrors: []string{"some-remote-mirror", "other-remote-mirror"}, }, @@ -199,38 +392,90 @@ func testJSON(t *testing.T, when spec.G, it spec.S) { }, }, }, - Buildpack: buildpack.GroupBuildpack{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + Buildpack: buildpack.GroupElement{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, }}, Processes: client.ProcessDetails{ DefaultProcess: &launch.Process{ - Type: "some-remote-type", - Command: "/some/remote command", - Args: []string{"some", "remote", "args"}, - Direct: false, + Type: "some-remote-type", + Command: launch.RawCommand{Entries: []string{"/some/remote command"}}, + Args: []string{"some", "remote", "args"}, + Direct: false, + WorkingDirectory: "/some-test-work-dir", }, OtherProcesses: []launch.Process{ { - Type: "other-remote-type", - Command: "/other/remote/command", - Args: []string{"other", "remote", "args"}, - Direct: true, + Type: "other-remote-type", + Command: launch.RawCommand{Entries: []string{"/other/remote/command"}}, + Args: []string{"other", "remote", "args"}, + Direct: true, + WorkingDirectory: "/other-test-work-dir", }, }, }, + Rebasable: false, } localInfo = &client.ImageInfo{ StackID: "test.stack.id.local", - Buildpacks: []buildpack.GroupBuildpack{ + Buildpacks: []buildpack.GroupElement{ + {ID: "test.bp.one.local", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + {ID: "test.bp.two.local", Version: "2.0.0", Homepage: "https://some-homepage-two"}, + }, + Base: files.RunImageForRebase{ + TopLayer: "some-local-top-layer", + Reference: "some-local-run-image-reference", + }, + Stack: files.Stack{ + RunImage: files.RunImageForExport{ + Image: "some-local-run-image", + Mirrors: []string{"some-local-mirror", "other-local-mirror"}, + }, + }, + BOM: []buildpack.BOMEntry{{ + Require: buildpack.Require{ + Name: "name-1", + Version: "version-1", + Metadata: map[string]interface{}{ + "LocalData": someData{ + Bool: false, + Int: 456, + }, + }, + }, + Buildpack: buildpack.GroupElement{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + }}, + Processes: client.ProcessDetails{ + DefaultProcess: &launch.Process{ + Type: "some-local-type", + Command: launch.RawCommand{Entries: []string{"/some/local command"}}, + Args: []string{"some", "local", "args"}, + Direct: false, + WorkingDirectory: "/some-test-work-dir", + }, + OtherProcesses: []launch.Process{ + { + Type: "other-local-type", + Command: launch.RawCommand{Entries: []string{"/other/local/command"}}, + Args: []string{"other", "local", "args"}, + Direct: true, + WorkingDirectory: "/other-test-work-dir", + }, + }, + }, + Rebasable: true, + } + localInfoNoRebasable = &client.ImageInfo{ + StackID: "test.stack.id.local", + Buildpacks: []buildpack.GroupElement{ {ID: "test.bp.one.local", Version: "1.0.0", Homepage: "https://some-homepage-one"}, {ID: "test.bp.two.local", Version: "2.0.0", Homepage: "https://some-homepage-two"}, }, - Base: platform.RunImageMetadata{ + Base: files.RunImageForRebase{ TopLayer: "some-local-top-layer", Reference: "some-local-run-image-reference", }, - Stack: platform.StackMetadata{ - RunImage: platform.StackRunImageMetadata{ + Stack: files.Stack{ + RunImage: files.RunImageForExport{ Image: "some-local-run-image", Mirrors: []string{"some-local-mirror", "other-local-mirror"}, }, @@ -246,24 +491,27 @@ func testJSON(t *testing.T, when spec.G, it spec.S) { }, }, }, - Buildpack: buildpack.GroupBuildpack{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + Buildpack: buildpack.GroupElement{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, }}, Processes: client.ProcessDetails{ DefaultProcess: &launch.Process{ - Type: "some-local-type", - Command: "/some/local command", - Args: []string{"some", "local", "args"}, - Direct: false, + Type: "some-local-type", + Command: launch.RawCommand{Entries: []string{"/some/local command"}}, + Args: []string{"some", "local", "args"}, + Direct: false, + WorkingDirectory: "/some-test-work-dir", }, OtherProcesses: []launch.Process{ { - Type: "other-local-type", - Command: "/other/local/command", - Args: []string{"other", "local", "args"}, - Direct: true, + Type: "other-local-type", + Command: launch.RawCommand{Entries: []string{"/other/local/command"}}, + Args: []string{"other", "local", "args"}, + Direct: true, + WorkingDirectory: "/other-test-work-dir", }, }, }, + Rebasable: false, } outBuf = bytes.Buffer{} @@ -299,6 +547,35 @@ func testJSON(t *testing.T, when spec.G, it spec.S) { assert.ContainsJSON(outBuf.String(), expectedLocalOutput) assert.ContainsJSON(outBuf.String(), expectedRemoteOutput) }) + it("prints both local and remote no rebasable images info in a JSON format", func() { + runImageMirrors := []config.RunImage{ + { + Image: "un-used-run-image", + Mirrors: []string{"un-used"}, + }, + { + Image: "some-local-run-image", + Mirrors: []string{"user-configured-mirror-for-local"}, + }, + { + Image: "some-remote-run-image", + Mirrors: []string{"user-configured-mirror-for-remote"}, + }, + } + sharedImageInfo := inspectimage.GeneralInfo{ + Name: "test-image", + RunImageMirrors: runImageMirrors, + } + jsonWriter := writer.NewJSON() + + logger := logging.NewLogWithWriters(&outBuf, &outBuf) + err := jsonWriter.Print(logger, sharedImageInfo, localInfoNoRebasable, remoteInfoNoRebasable, nil, nil) + assert.Nil(err) + + assert.ContainsJSON(outBuf.String(), `{ "image_name": "test-image" }`) + assert.ContainsJSON(outBuf.String(), expectedLocalNoRebasableOutput) + assert.ContainsJSON(outBuf.String(), expectedRemoteNoRebasableOutput) + }) }) when("only local image exists", func() { diff --git a/internal/inspectimage/writer/structured_bom_format.go b/internal/inspectimage/writer/structured_bom_format.go index a7069274dd..ebfdc05b1d 100644 --- a/internal/inspectimage/writer/structured_bom_format.go +++ b/internal/inspectimage/writer/structured_bom_format.go @@ -27,7 +27,6 @@ func (w *StructuredBOMFormat) Print( if localErr != nil && remoteErr != nil { return fmt.Errorf("preparing BOM output for %s: local :%s remote: %s", style.Symbol(generalInfo.Name), localErr, remoteErr) } - out, err := w.MarshalFunc(inspectimage.BOMDisplay{ Remote: inspectimage.NewBOMDisplay(remote), Local: inspectimage.NewBOMDisplay(local), diff --git a/internal/inspectimage/writer/structured_bom_format_test.go b/internal/inspectimage/writer/structured_bom_format_test.go index d7e61dcd43..cdf9adfccc 100644 --- a/internal/inspectimage/writer/structured_bom_format_test.go +++ b/internal/inspectimage/writer/structured_bom_format_test.go @@ -31,10 +31,12 @@ func testStructuredBOMFormat(t *testing.T, when spec.G, it spec.S) { assert = h.NewAssertionManager(t) outBuf *bytes.Buffer - remoteInfo *client.ImageInfo - localInfo *client.ImageInfo - generalInfo inspectimage.GeneralInfo - logger *logging.LogWithWriters + remoteInfo *client.ImageInfo + localInfo *client.ImageInfo + remoteWithExtensionInfo *client.ImageInfo + localWithExtensionInfo *client.ImageInfo + generalInfo inspectimage.GeneralInfo + logger *logging.LogWithWriters ) when("Print", func() { @@ -51,7 +53,7 @@ func testStructuredBOMFormat(t *testing.T, when spec.G, it spec.S) { "cool-remote": "beans", }, }, - Buildpack: buildpack.GroupBuildpack{ + Buildpack: buildpack.GroupElement{ ID: "remote-buildpack", Version: "remote-buildpack-version", }, @@ -68,13 +70,49 @@ func testStructuredBOMFormat(t *testing.T, when spec.G, it spec.S) { "cool-local": "beans", }, }, - Buildpack: buildpack.GroupBuildpack{ + Buildpack: buildpack.GroupElement{ ID: "local-buildpack", Version: "local-buildpack-version", }, }, }, } + + remoteWithExtensionInfo = &client.ImageInfo{ + BOM: []buildpack.BOMEntry{ + { + Require: buildpack.Require{ + Name: "remote-require", + Version: "1.2.3", + Metadata: map[string]interface{}{ + "cool-remote": "beans", + }, + }, + Buildpack: buildpack.GroupElement{ + ID: "remote-buildpack", + Version: "remote-buildpack-version", + }, + }, + }, + } + localWithExtensionInfo = &client.ImageInfo{ + BOM: []buildpack.BOMEntry{ + { + Require: buildpack.Require{ + Name: "local-require", + Version: "4.5.6", + Metadata: map[string]interface{}{ + "cool-local": "beans", + }, + }, + Buildpack: buildpack.GroupElement{ + ID: "local-buildpack", + Version: "local-buildpack-version", + }, + }, + }, + } + generalInfo = inspectimage.GeneralInfo{ Name: "some-image-name", RunImageMirrors: []config.RunImage{ @@ -88,8 +126,10 @@ func testStructuredBOMFormat(t *testing.T, when spec.G, it spec.S) { when("structured output", func() { var ( - localBomDisplay []inspectimage.BOMEntryDisplay - remoteBomDisplay []inspectimage.BOMEntryDisplay + localBomDisplay []inspectimage.BOMEntryDisplay + remoteBomDisplay []inspectimage.BOMEntryDisplay + localBomWithExtensionDisplay []inspectimage.BOMEntryDisplay + remoteBomWithExtensionDisplay []inspectimage.BOMEntryDisplay ) it.Before(func() { localBomDisplay = []inspectimage.BOMEntryDisplay{{ @@ -98,8 +138,8 @@ func testStructuredBOMFormat(t *testing.T, when spec.G, it spec.S) { Metadata: map[string]interface{}{ "cool-local": "beans", }, - Buildpack: dist.BuildpackRef{ - BuildpackInfo: dist.BuildpackInfo{ + Buildpack: dist.ModuleRef{ + ModuleInfo: dist.ModuleInfo{ ID: "local-buildpack", Version: "local-buildpack-version", }, @@ -111,8 +151,35 @@ func testStructuredBOMFormat(t *testing.T, when spec.G, it spec.S) { Metadata: map[string]interface{}{ "cool-remote": "beans", }, - Buildpack: dist.BuildpackRef{ - BuildpackInfo: dist.BuildpackInfo{ + Buildpack: dist.ModuleRef{ + ModuleInfo: dist.ModuleInfo{ + ID: "remote-buildpack", + Version: "remote-buildpack-version", + }, + }, + }} + + localBomWithExtensionDisplay = []inspectimage.BOMEntryDisplay{{ + Name: "local-require", + Version: "4.5.6", + Metadata: map[string]interface{}{ + "cool-local": "beans", + }, + Buildpack: dist.ModuleRef{ + ModuleInfo: dist.ModuleInfo{ + ID: "local-buildpack", + Version: "local-buildpack-version", + }, + }, + }} + remoteBomWithExtensionDisplay = []inspectimage.BOMEntryDisplay{{ + Name: "remote-require", + Version: "1.2.3", + Metadata: map[string]interface{}{ + "cool-remote": "beans", + }, + Buildpack: dist.ModuleRef{ + ModuleInfo: dist.ModuleInfo{ ID: "remote-buildpack", Version: "remote-buildpack-version", }, @@ -137,6 +204,25 @@ func testStructuredBOMFormat(t *testing.T, when spec.G, it spec.S) { Local: localBomDisplay, }) }) + + it("passes correct info to structuredBOMWriter", func() { + var marshalInput interface{} + + structuredBOMWriter := writer.StructuredBOMFormat{ + MarshalFunc: func(i interface{}) ([]byte, error) { + marshalInput = i + return []byte("marshalled"), nil + }, + } + + err := structuredBOMWriter.Print(logger, generalInfo, localWithExtensionInfo, remoteWithExtensionInfo, nil, nil) + assert.Nil(err) + + assert.Equal(marshalInput, inspectimage.BOMDisplay{ + Remote: remoteBomWithExtensionDisplay, + Local: localBomWithExtensionDisplay, + }) + }) when("a localErr is passed to Print", func() { it("still marshals remote information", func() { var marshalInput interface{} @@ -160,6 +246,29 @@ func testStructuredBOMFormat(t *testing.T, when spec.G, it spec.S) { }) }) + when("a localErr is passed to Print", func() { + it("still marshals remote information", func() { + var marshalInput interface{} + + localErr := errors.New("a local error occurred") + structuredBOMWriter := writer.StructuredBOMFormat{ + MarshalFunc: func(i interface{}) ([]byte, error) { + marshalInput = i + return []byte("marshalled"), nil + }, + } + + err := structuredBOMWriter.Print(logger, generalInfo, nil, remoteWithExtensionInfo, localErr, nil) + assert.Nil(err) + + assert.Equal(marshalInput, inspectimage.BOMDisplay{ + Remote: remoteBomWithExtensionDisplay, + Local: nil, + LocalErr: localErr.Error(), + }) + }) + }) + when("a remoteErr is passed to Print", func() { it("still marshals local information", func() { var marshalInput interface{} @@ -182,6 +291,29 @@ func testStructuredBOMFormat(t *testing.T, when spec.G, it spec.S) { }) }) }) + + when("a remoteErr is passed to Print", func() { + it("still marshals local information", func() { + var marshalInput interface{} + + remoteErr := errors.New("a remote error occurred") + structuredBOMWriter := writer.StructuredBOMFormat{ + MarshalFunc: func(i interface{}) ([]byte, error) { + marshalInput = i + return []byte("marshalled"), nil + }, + } + + err := structuredBOMWriter.Print(logger, generalInfo, localWithExtensionInfo, nil, nil, remoteErr) + assert.Nil(err) + + assert.Equal(marshalInput, inspectimage.BOMDisplay{ + Remote: nil, + Local: localBomWithExtensionDisplay, + RemoteErr: remoteErr.Error(), + }) + }) + }) }) // Just test error cases, all error-free cases will be tested in JSON, TOML, and YAML subclasses. @@ -211,6 +343,22 @@ func testStructuredBOMFormat(t *testing.T, when spec.G, it spec.S) { assert.ErrorContains(err, localErr.Error()) }) }) + + when("fetching local and remote info errors", func() { + it("returns an error", func() { + structuredBOMWriter := writer.StructuredBOMFormat{ + MarshalFunc: func(i interface{}) ([]byte, error) { + return []byte("cool"), nil + }, + } + remoteErr := errors.New("a remote error occurred") + localErr := errors.New("a local error occurred") + + err := structuredBOMWriter.Print(logger, generalInfo, localWithExtensionInfo, remoteWithExtensionInfo, localErr, remoteErr) + assert.ErrorContains(err, remoteErr.Error()) + assert.ErrorContains(err, localErr.Error()) + }) + }) }) }) } diff --git a/internal/inspectimage/writer/structured_format_test.go b/internal/inspectimage/writer/structured_format_test.go index f6e64f7fb2..2a82ceb631 100644 --- a/internal/inspectimage/writer/structured_format_test.go +++ b/internal/inspectimage/writer/structured_format_test.go @@ -10,10 +10,13 @@ import ( "github.com/sclevine/spec" "github.com/sclevine/spec/report" + "github.com/buildpacks/lifecycle/buildpack" + "github.com/buildpacks/pack/internal/config" "github.com/buildpacks/pack/internal/inspectimage" "github.com/buildpacks/pack/internal/inspectimage/writer" "github.com/buildpacks/pack/pkg/client" + "github.com/buildpacks/pack/pkg/dist" "github.com/buildpacks/pack/pkg/logging" h "github.com/buildpacks/pack/testhelpers" ) @@ -29,14 +32,44 @@ func testStructuredFormat(t *testing.T, when spec.G, it spec.S) { assert = h.NewAssertionManager(t) outBuf bytes.Buffer - remoteInfo *client.ImageInfo - localInfo *client.ImageInfo + remoteInfo *client.ImageInfo + localInfo *client.ImageInfo + remoteWithExtensionInfo *client.ImageInfo + localWithExtensionInfo *client.ImageInfo + localInfoWithExtensionDisplay *inspectimage.InfoDisplay ) when("Print", func() { it.Before(func() { remoteInfo = &client.ImageInfo{} localInfo = &client.ImageInfo{} + remoteWithExtensionInfo = &client.ImageInfo{} + localWithExtensionInfo = &client.ImageInfo{ + StackID: "test.stack.id.local", + Buildpacks: []buildpack.GroupElement{ + {ID: "test.bp.one.local", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + }, + Extensions: []buildpack.GroupElement{ + {ID: "test.bp.one.local", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + }, + } + localInfoWithExtensionDisplay = &inspectimage.InfoDisplay{ + StackID: "test.stack.id.local", + Buildpacks: []dist.ModuleInfo{ + { + ID: "test.bp.one.local", + Version: "1.0.0", + Homepage: "https://some-homepage-one", + }, + }, + Extensions: []dist.ModuleInfo{ + { + ID: "test.bp.one.local", + Version: "1.0.0", + Homepage: "https://some-homepage-one", + }, + }, + } outBuf = bytes.Buffer{} }) @@ -76,6 +109,48 @@ func testStructuredFormat(t *testing.T, when spec.G, it spec.S) { }) }) + when("a localWithExtension is passed to Print", func() { + it("prints localWithExtension information", func() { + var marshalInput interface{} + sharedImageInfo := inspectimage.GeneralInfo{ + Name: "localExtension-image", + RunImageMirrors: []config.RunImage{}, + } + structuredWriter := writer.StructuredFormat{ + MarshalFunc: func(i interface{}) ([]byte, error) { + marshalInput = i + return []byte("marshalled"), nil + }, + } + + logger := logging.NewLogWithWriters(&outBuf, &outBuf) + err := structuredWriter.Print(logger, sharedImageInfo, localWithExtensionInfo, nil, nil, nil) + assert.Nil(err) + assert.Equal(marshalInput, inspectimage.InspectOutput{ + ImageName: "localExtension-image", + Local: localInfoWithExtensionDisplay, + }) + }) + }) + + when("a localErr is passed to Print", func() { + it("still prints remote information", func() { + sharedImageInfo := inspectimage.GeneralInfo{ + Name: "localErr-image", + RunImageMirrors: []config.RunImage{}, + } + structuredWriter := writer.StructuredFormat{ + MarshalFunc: testMarshalFunc, + } + + localErr := errors.New("a local error occurred") + + logger := logging.NewLogWithWriters(&outBuf, &outBuf) + err := structuredWriter.Print(logger, sharedImageInfo, nil, remoteWithExtensionInfo, localErr, nil) + assert.ErrorWithMessage(err, "preparing output for 'localErr-image': a local error occurred") + }) + }) + when("a remoteErr is passed to print", func() { it("still prints local information", func() { sharedImageInfo := inspectimage.GeneralInfo{ @@ -93,6 +168,24 @@ func testStructuredFormat(t *testing.T, when spec.G, it spec.S) { assert.ErrorWithMessage(err, "preparing output for 'remoteErr-image': a remote error occurred") }) }) + + when("a remoteErr is passed to print", func() { + it("still prints local information", func() { + sharedImageInfo := inspectimage.GeneralInfo{ + Name: "remoteErr-image", + RunImageMirrors: []config.RunImage{}, + } + structuredWriter := writer.StructuredFormat{ + MarshalFunc: testMarshalFunc, + } + + remoteErr := errors.New("a remote error occurred") + + logger := logging.NewLogWithWriters(&outBuf, &outBuf) + err := structuredWriter.Print(logger, sharedImageInfo, localWithExtensionInfo, nil, nil, remoteErr) + assert.ErrorWithMessage(err, "preparing output for 'remoteErr-image': a remote error occurred") + }) + }) }) }) } diff --git a/internal/inspectimage/writer/toml_test.go b/internal/inspectimage/writer/toml_test.go index 39cf213b4c..27f92f4deb 100644 --- a/internal/inspectimage/writer/toml_test.go +++ b/internal/inspectimage/writer/toml_test.go @@ -6,7 +6,7 @@ import ( "github.com/buildpacks/lifecycle/buildpack" "github.com/buildpacks/lifecycle/launch" - "github.com/buildpacks/lifecycle/platform" + "github.com/buildpacks/lifecycle/platform/files" "github.com/heroku/color" "github.com/sclevine/spec" "github.com/sclevine/spec/report" @@ -30,11 +30,14 @@ func testTOML(t *testing.T, when spec.G, it spec.S) { assert = h.NewAssertionManager(t) outBuf bytes.Buffer - remoteInfo *client.ImageInfo - localInfo *client.ImageInfo + remoteInfo *client.ImageInfo + remoteInfoNoRebasable *client.ImageInfo + localInfo *client.ImageInfo + localInfoNoRebasable *client.ImageInfo expectedLocalOutput = `[local_info] stack = 'test.stack.id.local' +rebasable = true [local_info.base_image] top_layer = 'some-local-top-layer' @@ -73,6 +76,7 @@ args = [ 'local', 'args', ] +working-dir = "/some-test-work-dir" [[local_info.processes]] type = 'other-local-type' @@ -84,11 +88,124 @@ args = [ 'local', 'args', ] +working-dir = "/other-test-work-dir" +` + expectedLocalNoRebasableOutput = `[local_info] +stack = 'test.stack.id.local' +rebasable = false + +[local_info.base_image] +top_layer = 'some-local-top-layer' +reference = 'some-local-run-image-reference' + +[[local_info.run_images]] +name = 'user-configured-mirror-for-local' +user_configured = true + +[[local_info.run_images]] +name = 'some-local-run-image' + +[[local_info.run_images]] +name = 'some-local-mirror' + +[[local_info.run_images]] +name = 'other-local-mirror' + +[[local_info.buildpacks]] +id = 'test.bp.one.local' +version = '1.0.0' +homepage = 'https://some-homepage-one' + +[[local_info.buildpacks]] +id = 'test.bp.two.local' +version = '2.0.0' +homepage = 'https://some-homepage-two' + +[[local_info.processes]] +type = 'some-local-type' +shell = 'bash' +command = '/some/local command' +default = true +args = [ + 'some', + 'local', + 'args', +] +working-dir = "/some-test-work-dir" + +[[local_info.processes]] +type = 'other-local-type' +shell = '' +command = '/other/local/command' +default = false +args = [ + 'other', + 'local', + 'args', +] +working-dir = "/other-test-work-dir" ` expectedRemoteOutput = ` [remote_info] stack = 'test.stack.id.remote' +rebasable = true + +[remote_info.base_image] +top_layer = 'some-remote-top-layer' +reference = 'some-remote-run-image-reference' + +[[remote_info.run_images]] +name = 'user-configured-mirror-for-remote' +user_configured = true + +[[remote_info.run_images]] +name = 'some-remote-run-image' + +[[remote_info.run_images]] +name = 'some-remote-mirror' + +[[remote_info.run_images]] +name = 'other-remote-mirror' + +[[remote_info.buildpacks]] +id = 'test.bp.one.remote' +version = '1.0.0' +homepage = 'https://some-homepage-one' + +[[remote_info.buildpacks]] +id = 'test.bp.two.remote' +version = '2.0.0' +homepage = 'https://some-homepage-two' + +[[remote_info.processes]] +type = 'some-remote-type' +shell = 'bash' +command = '/some/remote command' +default = true +args = [ + 'some', + 'remote', + 'args', +] +working-dir = "/some-test-work-dir" + +[[remote_info.processes]] +type = 'other-remote-type' +shell = '' +command = '/other/remote/command' +default = false +args = [ + 'other', + 'remote', + 'args', +] +working-dir = "/other-test-work-dir" +` + expectedRemoteNoRebasableOutput = ` +[remote_info] +stack = 'test.stack.id.remote' +rebasable = false [remote_info.base_image] top_layer = 'some-remote-top-layer' @@ -127,6 +244,7 @@ args = [ 'remote', 'args', ] +working-dir = "/some-test-work-dir" [[remote_info.processes]] type = 'other-remote-type' @@ -138,6 +256,7 @@ args = [ 'remote', 'args', ] +working-dir = "/other-test-work-dir" ` ) @@ -154,16 +273,71 @@ args = [ remoteInfo = &client.ImageInfo{ StackID: "test.stack.id.remote", - Buildpacks: []buildpack.GroupBuildpack{ + Buildpacks: []buildpack.GroupElement{ + {ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + {ID: "test.bp.two.remote", Version: "2.0.0", Homepage: "https://some-homepage-two"}, + }, + Base: files.RunImageForRebase{ + TopLayer: "some-remote-top-layer", + Reference: "some-remote-run-image-reference", + }, + Stack: files.Stack{ + RunImage: files.RunImageForExport{ + Image: "some-remote-run-image", + Mirrors: []string{"some-remote-mirror", "other-remote-mirror"}, + }, + }, + BOM: []buildpack.BOMEntry{{ + Require: buildpack.Require{ + Name: "name-1", + Version: "version-1", + Metadata: map[string]interface{}{ + "RemoteData": someData{ + String: "aString", + Bool: true, + Int: 123, + Nested: struct { + String string + }{ + String: "anotherString", + }, + }, + }, + }, + Buildpack: buildpack.GroupElement{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + }}, + Processes: client.ProcessDetails{ + DefaultProcess: &launch.Process{ + Type: "some-remote-type", + Command: launch.RawCommand{Entries: []string{"/some/remote command"}}, + Args: []string{"some", "remote", "args"}, + Direct: false, + WorkingDirectory: "/some-test-work-dir", + }, + OtherProcesses: []launch.Process{ + { + Type: "other-remote-type", + Command: launch.RawCommand{Entries: []string{"/other/remote/command"}}, + Args: []string{"other", "remote", "args"}, + Direct: true, + WorkingDirectory: "/other-test-work-dir", + }, + }, + }, + Rebasable: true, + } + remoteInfoNoRebasable = &client.ImageInfo{ + StackID: "test.stack.id.remote", + Buildpacks: []buildpack.GroupElement{ {ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, {ID: "test.bp.two.remote", Version: "2.0.0", Homepage: "https://some-homepage-two"}, }, - Base: platform.RunImageMetadata{ + Base: files.RunImageForRebase{ TopLayer: "some-remote-top-layer", Reference: "some-remote-run-image-reference", }, - Stack: platform.StackMetadata{ - RunImage: platform.StackRunImageMetadata{ + Stack: files.Stack{ + RunImage: files.RunImageForExport{ Image: "some-remote-run-image", Mirrors: []string{"some-remote-mirror", "other-remote-mirror"}, }, @@ -185,38 +359,41 @@ args = [ }, }, }, - Buildpack: buildpack.GroupBuildpack{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + Buildpack: buildpack.GroupElement{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, }}, Processes: client.ProcessDetails{ DefaultProcess: &launch.Process{ - Type: "some-remote-type", - Command: "/some/remote command", - Args: []string{"some", "remote", "args"}, - Direct: false, + Type: "some-remote-type", + Command: launch.RawCommand{Entries: []string{"/some/remote command"}}, + Args: []string{"some", "remote", "args"}, + Direct: false, + WorkingDirectory: "/some-test-work-dir", }, OtherProcesses: []launch.Process{ { - Type: "other-remote-type", - Command: "/other/remote/command", - Args: []string{"other", "remote", "args"}, - Direct: true, + Type: "other-remote-type", + Command: launch.RawCommand{Entries: []string{"/other/remote/command"}}, + Args: []string{"other", "remote", "args"}, + Direct: true, + WorkingDirectory: "/other-test-work-dir", }, }, }, + Rebasable: false, } localInfo = &client.ImageInfo{ StackID: "test.stack.id.local", - Buildpacks: []buildpack.GroupBuildpack{ + Buildpacks: []buildpack.GroupElement{ {ID: "test.bp.one.local", Version: "1.0.0", Homepage: "https://some-homepage-one"}, {ID: "test.bp.two.local", Version: "2.0.0", Homepage: "https://some-homepage-two"}, }, - Base: platform.RunImageMetadata{ + Base: files.RunImageForRebase{ TopLayer: "some-local-top-layer", Reference: "some-local-run-image-reference", }, - Stack: platform.StackMetadata{ - RunImage: platform.StackRunImageMetadata{ + Stack: files.Stack{ + RunImage: files.RunImageForExport{ Image: "some-local-run-image", Mirrors: []string{"some-local-mirror", "other-local-mirror"}, }, @@ -232,24 +409,76 @@ args = [ }, }, }, - Buildpack: buildpack.GroupBuildpack{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + Buildpack: buildpack.GroupElement{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, }}, Processes: client.ProcessDetails{ DefaultProcess: &launch.Process{ - Type: "some-local-type", - Command: "/some/local command", - Args: []string{"some", "local", "args"}, - Direct: false, + Type: "some-local-type", + Command: launch.RawCommand{Entries: []string{"/some/local command"}}, + Args: []string{"some", "local", "args"}, + Direct: false, + WorkingDirectory: "/some-test-work-dir", }, OtherProcesses: []launch.Process{ { - Type: "other-local-type", - Command: "/other/local/command", - Args: []string{"other", "local", "args"}, - Direct: true, + Type: "other-local-type", + Command: launch.RawCommand{Entries: []string{"/other/local/command"}}, + Args: []string{"other", "local", "args"}, + Direct: true, + WorkingDirectory: "/other-test-work-dir", }, }, }, + Rebasable: true, + } + localInfoNoRebasable = &client.ImageInfo{ + StackID: "test.stack.id.local", + Buildpacks: []buildpack.GroupElement{ + {ID: "test.bp.one.local", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + {ID: "test.bp.two.local", Version: "2.0.0", Homepage: "https://some-homepage-two"}, + }, + Base: files.RunImageForRebase{ + TopLayer: "some-local-top-layer", + Reference: "some-local-run-image-reference", + }, + Stack: files.Stack{ + RunImage: files.RunImageForExport{ + Image: "some-local-run-image", + Mirrors: []string{"some-local-mirror", "other-local-mirror"}, + }, + }, + BOM: []buildpack.BOMEntry{{ + Require: buildpack.Require{ + Name: "name-1", + Version: "version-1", + Metadata: map[string]interface{}{ + "LocalData": someData{ + Bool: false, + Int: 456, + }, + }, + }, + Buildpack: buildpack.GroupElement{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + }}, + Processes: client.ProcessDetails{ + DefaultProcess: &launch.Process{ + Type: "some-local-type", + Command: launch.RawCommand{Entries: []string{"/some/local command"}}, + Args: []string{"some", "local", "args"}, + Direct: false, + WorkingDirectory: "/some-test-work-dir", + }, + OtherProcesses: []launch.Process{ + { + Type: "other-local-type", + Command: launch.RawCommand{Entries: []string{"/other/local/command"}}, + Args: []string{"other", "local", "args"}, + Direct: true, + WorkingDirectory: "/other-test-work-dir", + }, + }, + }, + Rebasable: false, } outBuf = bytes.Buffer{} @@ -285,6 +514,35 @@ args = [ assert.ContainsTOML(outBuf.String(), expectedLocalOutput) assert.ContainsTOML(outBuf.String(), expectedRemoteOutput) }) + it("prints both local and remote no rebasable images info in a TOML format", func() { + runImageMirrors := []config.RunImage{ + { + Image: "un-used-run-image", + Mirrors: []string{"un-used"}, + }, + { + Image: "some-local-run-image", + Mirrors: []string{"user-configured-mirror-for-local"}, + }, + { + Image: "some-remote-run-image", + Mirrors: []string{"user-configured-mirror-for-remote"}, + }, + } + sharedImageInfo := inspectimage.GeneralInfo{ + Name: "test-image", + RunImageMirrors: runImageMirrors, + } + tomlWriter := writer.NewTOML() + + logger := logging.NewLogWithWriters(&outBuf, &outBuf) + err := tomlWriter.Print(logger, sharedImageInfo, localInfoNoRebasable, remoteInfoNoRebasable, nil, nil) + assert.Nil(err) + + assert.ContainsTOML(outBuf.String(), `image_name = "test-image"`) + assert.ContainsTOML(outBuf.String(), expectedLocalNoRebasableOutput) + assert.ContainsTOML(outBuf.String(), expectedRemoteNoRebasableOutput) + }) }) when("only local image exists", func() { diff --git a/internal/inspectimage/writer/yaml_test.go b/internal/inspectimage/writer/yaml_test.go index 0ce95a08c3..ee97ec7a09 100644 --- a/internal/inspectimage/writer/yaml_test.go +++ b/internal/inspectimage/writer/yaml_test.go @@ -6,7 +6,7 @@ import ( "github.com/buildpacks/lifecycle/buildpack" "github.com/buildpacks/lifecycle/launch" - "github.com/buildpacks/lifecycle/platform" + "github.com/buildpacks/lifecycle/platform/files" "github.com/heroku/color" "github.com/sclevine/spec" "github.com/sclevine/spec/report" @@ -30,12 +30,15 @@ func testYAML(t *testing.T, when spec.G, it spec.S) { assert = h.NewAssertionManager(t) outBuf bytes.Buffer - remoteInfo *client.ImageInfo - localInfo *client.ImageInfo + remoteInfo *client.ImageInfo + remoteInfoNoRebasable *client.ImageInfo + localInfo *client.ImageInfo + localInfoNoRebasable *client.ImageInfo expectedLocalOutput = `--- local_info: stack: test.stack.id.local + rebasable: true base_image: top_layer: some-local-top-layer reference: some-local-run-image-reference @@ -52,6 +55,7 @@ local_info: - homepage: https://some-homepage-two id: test.bp.two.local version: 2.0.0 + extensions: [] processes: - type: some-local-type shell: bash @@ -61,6 +65,7 @@ local_info: - some - local - args + working-dir: /some-test-work-dir - type: other-local-type shell: '' command: "/other/local/command" @@ -69,10 +74,53 @@ local_info: - other - local - args + working-dir: /other-test-work-dir +` + expectedLocalNoRebasableOutput = `--- +local_info: + stack: test.stack.id.local + rebasable: false + base_image: + top_layer: some-local-top-layer + reference: some-local-run-image-reference + run_images: + - name: user-configured-mirror-for-local + user_configured: true + - name: some-local-run-image + - name: some-local-mirror + - name: other-local-mirror + buildpacks: + - homepage: https://some-homepage-one + id: test.bp.one.local + version: 1.0.0 + - homepage: https://some-homepage-two + id: test.bp.two.local + version: 2.0.0 + extensions: [] + processes: + - type: some-local-type + shell: bash + command: "/some/local command" + default: true + args: + - some + - local + - args + working-dir: /some-test-work-dir + - type: other-local-type + shell: '' + command: "/other/local/command" + default: false + args: + - other + - local + - args + working-dir: /other-test-work-dir ` expectedRemoteOutput = `--- remote_info: stack: test.stack.id.remote + rebasable: true base_image: top_layer: some-remote-top-layer reference: some-remote-run-image-reference @@ -89,6 +137,7 @@ remote_info: - homepage: https://some-homepage-two id: test.bp.two.remote version: 2.0.0 + extensions: [] processes: - type: some-remote-type shell: bash @@ -98,6 +147,7 @@ remote_info: - some - remote - args + working-dir: /some-test-work-dir - type: other-remote-type shell: '' command: "/other/remote/command" @@ -106,6 +156,48 @@ remote_info: - other - remote - args + working-dir: /other-test-work-dir +` + expectedRemoteNoRebasableOutput = `--- +remote_info: + stack: test.stack.id.remote + rebasable: false + base_image: + top_layer: some-remote-top-layer + reference: some-remote-run-image-reference + run_images: + - name: user-configured-mirror-for-remote + user_configured: true + - name: some-remote-run-image + - name: some-remote-mirror + - name: other-remote-mirror + buildpacks: + - homepage: https://some-homepage-one + id: test.bp.one.remote + version: 1.0.0 + - homepage: https://some-homepage-two + id: test.bp.two.remote + version: 2.0.0 + extensions: [] + processes: + - type: some-remote-type + shell: bash + command: "/some/remote command" + default: true + args: + - some + - remote + - args + working-dir: /some-test-work-dir + - type: other-remote-type + shell: '' + command: "/other/remote/command" + default: false + args: + - other + - remote + - args + working-dir: /other-test-work-dir ` ) @@ -122,16 +214,16 @@ remote_info: remoteInfo = &client.ImageInfo{ StackID: "test.stack.id.remote", - Buildpacks: []buildpack.GroupBuildpack{ + Buildpacks: []buildpack.GroupElement{ {ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, {ID: "test.bp.two.remote", Version: "2.0.0", Homepage: "https://some-homepage-two"}, }, - Base: platform.RunImageMetadata{ + Base: files.RunImageForRebase{ TopLayer: "some-remote-top-layer", Reference: "some-remote-run-image-reference", }, - Stack: platform.StackMetadata{ - RunImage: platform.StackRunImageMetadata{ + Stack: files.Stack{ + RunImage: files.RunImageForExport{ Image: "some-remote-run-image", Mirrors: []string{"some-remote-mirror", "other-remote-mirror"}, }, @@ -153,38 +245,145 @@ remote_info: }, }, }, - Buildpack: buildpack.GroupBuildpack{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + Buildpack: buildpack.GroupElement{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, }}, Processes: client.ProcessDetails{ DefaultProcess: &launch.Process{ - Type: "some-remote-type", - Command: "/some/remote command", - Args: []string{"some", "remote", "args"}, - Direct: false, + Type: "some-remote-type", + Command: launch.RawCommand{Entries: []string{"/some/remote command"}}, + Args: []string{"some", "remote", "args"}, + Direct: false, + WorkingDirectory: "/some-test-work-dir", }, OtherProcesses: []launch.Process{ { - Type: "other-remote-type", - Command: "/other/remote/command", - Args: []string{"other", "remote", "args"}, - Direct: true, + Type: "other-remote-type", + Command: launch.RawCommand{Entries: []string{"/other/remote/command"}}, + Args: []string{"other", "remote", "args"}, + Direct: true, + WorkingDirectory: "/other-test-work-dir", }, }, }, + Rebasable: true, + } + remoteInfoNoRebasable = &client.ImageInfo{ + StackID: "test.stack.id.remote", + Buildpacks: []buildpack.GroupElement{ + {ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + {ID: "test.bp.two.remote", Version: "2.0.0", Homepage: "https://some-homepage-two"}, + }, + Base: files.RunImageForRebase{ + TopLayer: "some-remote-top-layer", + Reference: "some-remote-run-image-reference", + }, + Stack: files.Stack{ + RunImage: files.RunImageForExport{ + Image: "some-remote-run-image", + Mirrors: []string{"some-remote-mirror", "other-remote-mirror"}, + }, + }, + BOM: []buildpack.BOMEntry{{ + Require: buildpack.Require{ + Name: "name-1", + Version: "version-1", + Metadata: map[string]interface{}{ + "RemoteData": someData{ + String: "aString", + Bool: true, + Int: 123, + Nested: struct { + String string + }{ + String: "anotherString", + }, + }, + }, + }, + Buildpack: buildpack.GroupElement{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + }}, + Processes: client.ProcessDetails{ + DefaultProcess: &launch.Process{ + Type: "some-remote-type", + Command: launch.RawCommand{Entries: []string{"/some/remote command"}}, + Args: []string{"some", "remote", "args"}, + Direct: false, + WorkingDirectory: "/some-test-work-dir", + }, + OtherProcesses: []launch.Process{ + { + Type: "other-remote-type", + Command: launch.RawCommand{Entries: []string{"/other/remote/command"}}, + Args: []string{"other", "remote", "args"}, + Direct: true, + WorkingDirectory: "/other-test-work-dir", + }, + }, + }, + Rebasable: false, } localInfo = &client.ImageInfo{ StackID: "test.stack.id.local", - Buildpacks: []buildpack.GroupBuildpack{ + Buildpacks: []buildpack.GroupElement{ + {ID: "test.bp.one.local", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + {ID: "test.bp.two.local", Version: "2.0.0", Homepage: "https://some-homepage-two"}, + }, + Base: files.RunImageForRebase{ + TopLayer: "some-local-top-layer", + Reference: "some-local-run-image-reference", + }, + Stack: files.Stack{ + RunImage: files.RunImageForExport{ + Image: "some-local-run-image", + Mirrors: []string{"some-local-mirror", "other-local-mirror"}, + }, + }, + BOM: []buildpack.BOMEntry{{ + Require: buildpack.Require{ + Name: "name-1", + Version: "version-1", + Metadata: map[string]interface{}{ + "LocalData": someData{ + Bool: false, + Int: 456, + }, + }, + }, + Buildpack: buildpack.GroupElement{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + }}, + Processes: client.ProcessDetails{ + DefaultProcess: &launch.Process{ + Type: "some-local-type", + Command: launch.RawCommand{Entries: []string{"/some/local command"}}, + Args: []string{"some", "local", "args"}, + Direct: false, + WorkingDirectory: "/some-test-work-dir", + }, + OtherProcesses: []launch.Process{ + { + Type: "other-local-type", + Command: launch.RawCommand{Entries: []string{"/other/local/command"}}, + Args: []string{"other", "local", "args"}, + Direct: true, + WorkingDirectory: "/other-test-work-dir", + }, + }, + }, + Rebasable: true, + } + localInfoNoRebasable = &client.ImageInfo{ + StackID: "test.stack.id.local", + Buildpacks: []buildpack.GroupElement{ {ID: "test.bp.one.local", Version: "1.0.0", Homepage: "https://some-homepage-one"}, {ID: "test.bp.two.local", Version: "2.0.0", Homepage: "https://some-homepage-two"}, }, - Base: platform.RunImageMetadata{ + Base: files.RunImageForRebase{ TopLayer: "some-local-top-layer", Reference: "some-local-run-image-reference", }, - Stack: platform.StackMetadata{ - RunImage: platform.StackRunImageMetadata{ + Stack: files.Stack{ + RunImage: files.RunImageForExport{ Image: "some-local-run-image", Mirrors: []string{"some-local-mirror", "other-local-mirror"}, }, @@ -200,24 +399,27 @@ remote_info: }, }, }, - Buildpack: buildpack.GroupBuildpack{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, + Buildpack: buildpack.GroupElement{ID: "test.bp.one.remote", Version: "1.0.0", Homepage: "https://some-homepage-one"}, }}, Processes: client.ProcessDetails{ DefaultProcess: &launch.Process{ - Type: "some-local-type", - Command: "/some/local command", - Args: []string{"some", "local", "args"}, - Direct: false, + Type: "some-local-type", + Command: launch.RawCommand{Entries: []string{"/some/local command"}}, + Args: []string{"some", "local", "args"}, + Direct: false, + WorkingDirectory: "/some-test-work-dir", }, OtherProcesses: []launch.Process{ { - Type: "other-local-type", - Command: "/other/local/command", - Args: []string{"other", "local", "args"}, - Direct: true, + Type: "other-local-type", + Command: launch.RawCommand{Entries: []string{"/other/local/command"}}, + Args: []string{"other", "local", "args"}, + Direct: true, + WorkingDirectory: "/other-test-work-dir", }, }, }, + Rebasable: false, } outBuf = bytes.Buffer{} @@ -253,6 +455,35 @@ remote_info: assert.ContainsYAML(outBuf.String(), expectedLocalOutput) assert.ContainsYAML(outBuf.String(), expectedRemoteOutput) }) + it("prints both local and remote no rebasable images info in a YAML format", func() { + runImageMirrors := []config.RunImage{ + { + Image: "un-used-run-image", + Mirrors: []string{"un-used"}, + }, + { + Image: "some-local-run-image", + Mirrors: []string{"user-configured-mirror-for-local"}, + }, + { + Image: "some-remote-run-image", + Mirrors: []string{"user-configured-mirror-for-remote"}, + }, + } + sharedImageInfo := inspectimage.GeneralInfo{ + Name: "test-image", + RunImageMirrors: runImageMirrors, + } + yamlWriter := writer.NewYAML() + + logger := logging.NewLogWithWriters(&outBuf, &outBuf) + err := yamlWriter.Print(logger, sharedImageInfo, localInfoNoRebasable, remoteInfoNoRebasable, nil, nil) + assert.Nil(err) + + assert.ContainsYAML(outBuf.String(), `"image_name": "test-image"`) + assert.ContainsYAML(outBuf.String(), expectedLocalNoRebasableOutput) + assert.ContainsYAML(outBuf.String(), expectedRemoteNoRebasableOutput) + }) }) when("only local image exists", func() { @@ -283,9 +514,7 @@ remote_info: assert.ContainsYAML(outBuf.String(), `"image_name": "test-image"`) assert.ContainsYAML(outBuf.String(), expectedLocalOutput) - assert.NotContains(outBuf.String(), "test.stack.id.remote") - assert.ContainsYAML(outBuf.String(), expectedLocalOutput) }) }) diff --git a/internal/layer/writer_factory.go b/internal/layer/writer_factory.go index 24f9502f82..66de0fe6cb 100644 --- a/internal/layer/writer_factory.go +++ b/internal/layer/writer_factory.go @@ -15,8 +15,8 @@ type WriterFactory struct { } func NewWriterFactory(imageOS string) (*WriterFactory, error) { - if imageOS != "linux" && imageOS != "windows" { - return nil, fmt.Errorf("provided image OS '%s' must be either 'linux' or 'windows'", imageOS) + if imageOS != "freebsd" && imageOS != "linux" && imageOS != "windows" { + return nil, fmt.Errorf("provided image OS '%s' must be either 'freebsd', 'linux' or 'windows'", imageOS) } return &WriterFactory{os: imageOS}, nil @@ -27,6 +27,6 @@ func (f *WriterFactory) NewWriter(fileWriter io.Writer) archive.TarWriter { return ilayer.NewWindowsWriter(fileWriter) } - // Linux images use tar.Writer + // Linux and FreeBSD images use tar.Writer return tar.NewWriter(fileWriter) } diff --git a/internal/layer/writer_factory_test.go b/internal/layer/writer_factory_test.go index af9c5cfc59..fdae4c8d75 100644 --- a/internal/layer/writer_factory_test.go +++ b/internal/layer/writer_factory_test.go @@ -1,7 +1,7 @@ package layer_test import ( - "archive/tar" + "archive/tar" //nolint "testing" ilayer "github.com/buildpacks/imgutil/layer" @@ -20,11 +20,21 @@ func testWriterFactory(t *testing.T, when spec.G, it spec.S) { when("#NewWriterFactory", func() { it("returns an error for invalid image OS", func() { _, err := layer.NewWriterFactory("not-an-os") - h.AssertError(t, err, "provided image OS 'not-an-os' must be either 'linux' or 'windows'") + h.AssertError(t, err, "provided image OS 'not-an-os' must be either 'freebsd', 'linux' or 'windows'") }) }) when("#NewWriter", func() { + it("returns a regular tar writer for FreeBSD", func() { + factory, err := layer.NewWriterFactory("freebsd") + h.AssertNil(t, err) + + _, ok := factory.NewWriter(nil).(*tar.Writer) + if !ok { + t.Fatal("returned writer was not a regular tar writer") + } + }) + it("returns a regular tar writer for Linux", func() { factory, err := layer.NewWriterFactory("linux") h.AssertNil(t, err) diff --git a/internal/name/name.go b/internal/name/name.go index c840da5fe6..0ea67c585e 100644 --- a/internal/name/name.go +++ b/internal/name/name.go @@ -2,12 +2,20 @@ package name import ( "fmt" + "strings" + + "github.com/buildpacks/pack/pkg/dist" gname "github.com/google/go-containerregistry/pkg/name" "github.com/buildpacks/pack/internal/style" ) +const ( + defaultRefFormat = "%s/%s:%s" + digestRefFormat = "%s/%s@%s" +) + type Logger interface { Infof(fmt string, v ...interface{}) } @@ -28,7 +36,12 @@ func TranslateRegistry(name string, registryMirrors map[string]string, logger Lo return name, nil } - refName := fmt.Sprintf("%s/%s:%s", registryMirror, srcContext.RepositoryStr(), srcRef.Identifier()) + refFormat := defaultRefFormat + if strings.Contains(srcRef.Identifier(), ":") { + refFormat = digestRefFormat + } + + refName := fmt.Sprintf(refFormat, registryMirror, srcContext.RepositoryStr(), srcRef.Identifier()) _, err = gname.ParseReference(refName, gname.WeakValidation) if err != nil { return "", err @@ -38,6 +51,24 @@ func TranslateRegistry(name string, registryMirrors map[string]string, logger Lo return refName, nil } +func AppendSuffix(name string, target dist.Target) (string, error) { + reference, err := gname.ParseReference(name, gname.WeakValidation) + if err != nil { + return "", err + } + + suffixPlatformTag := targetToTag(target) + if suffixPlatformTag != "" { + if reference.Identifier() == "latest" { + return fmt.Sprintf("%s:%s", reference.Context(), suffixPlatformTag), nil + } + if !strings.Contains(reference.Identifier(), ":") { + return fmt.Sprintf("%s:%s-%s", reference.Context(), reference.Identifier(), suffixPlatformTag), nil + } + } + return name, nil +} + func getMirror(repo gname.Repository, registryMirrors map[string]string) (string, bool) { mirror, ok := registryMirrors["*"] if ok { @@ -47,3 +78,7 @@ func getMirror(repo gname.Repository, registryMirrors map[string]string) (string mirror, ok = registryMirrors[repo.RegistryStr()] return mirror, ok } + +func targetToTag(target dist.Target) string { + return strings.Join(target.ValuesAsSlice(), "-") +} diff --git a/internal/name/name_test.go b/internal/name/name_test.go index bbadb4120f..98e28cf20b 100644 --- a/internal/name/name_test.go +++ b/internal/name/name_test.go @@ -1,9 +1,11 @@ package name_test import ( - "io/ioutil" + "io" "testing" + "github.com/buildpacks/pack/pkg/dist" + "github.com/sclevine/spec" "github.com/sclevine/spec/report" @@ -19,7 +21,7 @@ func TestTranslateRegistry(t *testing.T) { func testTranslateRegistry(t *testing.T, when spec.G, it spec.S) { var ( assert = h.NewAssertionManager(t) - logger = logging.NewSimpleLogger(ioutil.Discard) + logger = logging.NewSimpleLogger(io.Discard) ) when("#TranslateRegistry", func() { @@ -66,5 +68,124 @@ func testTranslateRegistry(t *testing.T, when spec.G, it spec.S) { assert.Nil(err) assert.Equal(output, expected) }) + + it("translate a buildpack referenced by a digest", func() { + input := "buildpack/bp@sha256:7f48a442c056cd19ea48462e05faa2837ac3a13732c47616d20f11f8c847a8c4" + expected := "myregistry.com/buildpack/bp@sha256:7f48a442c056cd19ea48462e05faa2837ac3a13732c47616d20f11f8c847a8c4" + registryMirrors := map[string]string{ + "index.docker.io": "myregistry.com", + } + + output, err := name.TranslateRegistry(input, registryMirrors, logger) + assert.Nil(err) + assert.Equal(output, expected) + }) + }) + + when("#AppendSuffix", func() { + when("[os] is provided", func() { + when("[arch]] is provided", func() { + when("[arch-variant] is provided", func() { + when("tag is provided", func() { + it("append [os]-[arch]-[arch-variant] to the given tag", func() { + input := "my.registry.com/my-repo/my-image:some-tag" + target := dist.Target{ + OS: "linux", + Arch: "amd64", + ArchVariant: "v6", + } + + result, err := name.AppendSuffix(input, target) + assert.Nil(err) + assert.Equal(result, "my.registry.com/my-repo/my-image:some-tag-linux-amd64-v6") + }) + }) + when("tag is not provided", func() { + it("add tag: [os]-[arch]-[arch-variant] to the given ", func() { + input := "my.registry.com/my-repo/my-image" + target := dist.Target{ + OS: "linux", + Arch: "amd64", + ArchVariant: "v6", + } + + result, err := name.AppendSuffix(input, target) + assert.Nil(err) + assert.Equal(result, "my.registry.com/my-repo/my-image:linux-amd64-v6") + }) + }) + }) + when("[arch-variant] is not provided", func() { + when("tag is provided", func() { + // my.registry.com/my-repo/my-image:some-tag + it("append [os]-[arch] to the given tag", func() { + input := "my.registry.com/my-repo/my-image:some-tag" + target := dist.Target{ + OS: "linux", + Arch: "amd64", + } + + result, err := name.AppendSuffix(input, target) + assert.Nil(err) + assert.Equal(result, "my.registry.com/my-repo/my-image:some-tag-linux-amd64") + }) + }) + when("tag is NOT provided", func() { + // my.registry.com/my-repo/my-image + it("add tag: [os]-[arch] to the given ", func() { + input := "my.registry.com/my-repo/my-image" + target := dist.Target{ + OS: "linux", + Arch: "amd64", + } + + result, err := name.AppendSuffix(input, target) + assert.Nil(err) + assert.Equal(result, "my.registry.com/my-repo/my-image:linux-amd64") + }) + }) + }) + }) + + when("[arch] is not provided", func() { + when("tag is provided", func() { + // my.registry.com/my-repo/my-image:some-tag + it("append [os] to the given tag", func() { + input := "my.registry.com/my-repo/my-image:some-tag" + target := dist.Target{ + OS: "linux", + } + + result, err := name.AppendSuffix(input, target) + assert.Nil(err) + assert.Equal(result, "my.registry.com/my-repo/my-image:some-tag-linux") + }) + }) + when("tag is not provided", func() { + // my.registry.com/my-repo/my-image + it("add tag: [os] to the given ", func() { + input := "my.registry.com/my-repo/my-image" + target := dist.Target{ + OS: "linux", + } + + result, err := name.AppendSuffix(input, target) + assert.Nil(err) + assert.Equal(result, "my.registry.com/my-repo/my-image:linux") + }) + }) + }) + }) + + when("[os] is not provided", func() { + it("doesn't append anything and return the same name", func() { + input := "my.registry.com/my-repo/my-image" + target := dist.Target{} + + result, err := name.AppendSuffix(input, target) + assert.Nil(err) + assert.Equal(result, input) + }) + }) }) } diff --git a/internal/paths/defaults_unix.go b/internal/paths/defaults_unix.go new file mode 100644 index 0000000000..9c532674fa --- /dev/null +++ b/internal/paths/defaults_unix.go @@ -0,0 +1,7 @@ +//go:build unix + +package paths + +const ( + RootDir = `/` +) diff --git a/internal/paths/defaults_windows.go b/internal/paths/defaults_windows.go new file mode 100644 index 0000000000..a5f0846643 --- /dev/null +++ b/internal/paths/defaults_windows.go @@ -0,0 +1,5 @@ +package paths + +const ( + RootDir = `c:\` +) diff --git a/internal/paths/paths.go b/internal/paths/paths.go index f78695622a..a240a6d6f2 100644 --- a/internal/paths/paths.go +++ b/internal/paths/paths.go @@ -3,6 +3,7 @@ package paths import ( "net/url" "os" + "path" "path/filepath" "regexp" "runtime" @@ -56,7 +57,6 @@ func FilePathToURI(path, relativeTo string) (string, error) { // - windows drive: file:///C:/Documents%20and%20Settings/file.tgz // // - windows share: file://laptop/My%20Documents/file.tgz -// func URIToFilePath(uri string) (string, error) { var ( osPath string @@ -126,10 +126,15 @@ func WindowsToSlash(p string) string { } // WindowsPathSID returns the appropriate SID for a given UID and GID -// This the basic logic for path permissions in Pack and Lifecycle +// This is the basic logic for path permissions in Pack and Lifecycle func WindowsPathSID(uid, gid int) string { if uid == 0 && gid == 0 { return "S-1-5-32-544" // BUILTIN\Administrators } return "S-1-5-32-545" // BUILTIN\Users } + +// CanonicalTarPath return a cleaned path (see path.Clean) with leading slashes removed +func CanonicalTarPath(p string) string { + return strings.TrimPrefix(path.Clean(p), "/") +} diff --git a/internal/paths/paths_test.go b/internal/paths/paths_test.go index 893a9ea80d..287997a4b2 100644 --- a/internal/paths/paths_test.go +++ b/internal/paths/paths_test.go @@ -259,4 +259,41 @@ func testPaths(t *testing.T, when spec.G, it spec.S) { }) }) }) + + when("#CanonicalTarPath", func() { + for _, params := range []struct { + desc string + path string + expected string + }{ + { + desc: "noop", + path: "my/clean/path", + expected: "my/clean/path", + }, + { + desc: "leading slash", + path: "/my/path", + expected: "my/path", + }, + { + desc: "dot", + path: "my/./path", + expected: "my/path", + }, + { + desc: "dotdot", + path: "my/../my/path", + expected: "my/path", + }, + } { + params := params + + when(params.desc+":"+params.path, func() { + it(fmt.Sprintf("returns %v", params.expected), func() { + h.AssertEq(t, paths.CanonicalTarPath(params.path), params.expected) + }) + }) + } + }) } diff --git a/internal/registry/git_test.go b/internal/registry/git_test.go index e360f16926..060df13ba4 100644 --- a/internal/registry/git_test.go +++ b/internal/registry/git_test.go @@ -2,14 +2,14 @@ package registry_test import ( "bytes" - "io/ioutil" "os" "path/filepath" + "runtime" "testing" + "github.com/go-git/go-git/v5" "github.com/sclevine/spec" "github.com/sclevine/spec/report" - "gopkg.in/src-d/go-git.v4" "github.com/buildpacks/pack/internal/registry" "github.com/buildpacks/pack/pkg/logging" @@ -34,7 +34,7 @@ func testGit(t *testing.T, when spec.G, it spec.S) { it.Before(func() { logger = logging.NewLogWithWriters(&outBuf, &outBuf) - tmpDir, err = ioutil.TempDir("", "registry") + tmpDir, err = os.MkdirTemp("", "registry") h.AssertNil(t, err) registryFixture = h.CreateRegistryFixture(t, tmpDir, filepath.Join("..", "..", "testdata", "registry")) @@ -43,7 +43,10 @@ func testGit(t *testing.T, when spec.G, it spec.S) { }) it.After(func() { - h.AssertNil(t, os.RemoveAll(tmpDir)) + if runtime.GOOS != "windows" { + h.AssertNil(t, os.RemoveAll(tmpDir)) + } + os.RemoveAll(tmpDir) }) when("#GitCommit", func() { diff --git a/internal/registry/registry_cache.go b/internal/registry/registry_cache.go index 95c9862c61..6ae7e08766 100644 --- a/internal/registry/registry_cache.go +++ b/internal/registry/registry_cache.go @@ -6,17 +6,17 @@ import ( "encoding/hex" "encoding/json" "fmt" - "io/ioutil" "net/url" "os" + "os/exec" "path/filepath" "runtime" "time" + "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/plumbing/object" "github.com/pkg/errors" "golang.org/x/mod/semver" - "gopkg.in/src-d/go-git.v4" - "gopkg.in/src-d/go-git.v4/plumbing/object" "github.com/buildpacks/pack/internal/style" "github.com/buildpacks/pack/pkg/buildpack" @@ -169,20 +169,33 @@ func (r *Cache) Initialize() error { // CreateCache creates the cache on the filesystem func (r *Cache) CreateCache() error { + var repository *git.Repository r.logger.Debugf("Creating registry cache for %s/%s", r.url.Host, r.url.Path) - registryDir, err := ioutil.TempDir(filepath.Dir(r.Root), "registry") + registryDir, err := os.MkdirTemp(filepath.Dir(r.Root), "registry") if err != nil { return err } r.RegistryDir = registryDir - repository, err := git.PlainClone(r.RegistryDir, false, &git.CloneOptions{ - URL: r.url.String(), - }) - if err != nil { - return errors.Wrap(err, "cloning remote registry") + if r.url.Host == "dev.azure.com" { + err = exec.Command("git", "clone", r.url.String(), r.RegistryDir).Run() + if err != nil { + return errors.Wrap(err, "cloning remote registry with native git") + } + + repository, err = git.PlainOpen(r.RegistryDir) + if err != nil { + return errors.Wrap(err, "opening remote registry clone") + } + } else { + repository, err = git.PlainClone(r.RegistryDir, false, &git.CloneOptions{ + URL: r.url.String(), + }) + if err != nil { + return errors.Wrap(err, "cloning remote registry") + } } w, err := repository.Worktree() diff --git a/internal/registry/registry_cache_test.go b/internal/registry/registry_cache_test.go index 3bb0e9cc4e..080987b755 100644 --- a/internal/registry/registry_cache_test.go +++ b/internal/registry/registry_cache_test.go @@ -2,7 +2,6 @@ package registry import ( "bytes" - "io/ioutil" "net/url" "os" "path/filepath" @@ -10,11 +9,11 @@ import ( "testing" "time" + "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/plumbing/object" "github.com/heroku/color" "github.com/sclevine/spec" "github.com/sclevine/spec/report" - "gopkg.in/src-d/go-git.v4" - "gopkg.in/src-d/go-git.v4/plumbing/object" "github.com/buildpacks/pack/pkg/logging" h "github.com/buildpacks/pack/testhelpers" @@ -37,15 +36,15 @@ func testRegistryCache(t *testing.T, when spec.G, it spec.S) { it.Before(func() { logger = logging.NewLogWithWriters(&outBuf, &outBuf) - tmpDir, err = ioutil.TempDir("", "registry") + tmpDir, err = os.MkdirTemp("", "registry") h.AssertNil(t, err) registryFixture = h.CreateRegistryFixture(t, tmpDir, filepath.Join("..", "..", "testdata", "registry")) }) it.After(func() { - err := os.RemoveAll(tmpDir) - h.AssertNil(t, err) + // Ignoring the error for now, it failed randomly on windows + _ = os.RemoveAll(tmpDir) }) when("#NewDefaultRegistryCache", func() { @@ -74,6 +73,13 @@ func testRegistryCache(t *testing.T, when spec.G, it spec.S) { }) }) + when("registryURL is Azure", func() { + it("fails to create a registry cache", func() { + _, err := NewRegistryCache(logger, tmpDir, "https://dev.azure.com/") + h.AssertNil(t, err) + }) + }) + it("creates a RegistryCache", func() { registryCache, err := NewRegistryCache(logger, tmpDir, registryFixture) h.AssertNil(t, err) @@ -171,6 +177,7 @@ func testRegistryCache(t *testing.T, when spec.G, it spec.S) { Email: "john@doe.org", When: time.Now(), }, + AllowEmptyCommits: true, }) h.AssertNil(t, err) diff --git a/internal/sshdialer/posix_test.go b/internal/sshdialer/posix_test.go index a296894f9d..e24d176f43 100644 --- a/internal/sshdialer/posix_test.go +++ b/internal/sshdialer/posix_test.go @@ -1,5 +1,4 @@ //go:build !windows -// +build !windows package sshdialer_test diff --git a/internal/sshdialer/server_test.go b/internal/sshdialer/server_test.go index 0a301c4ac1..bc35a730a5 100644 --- a/internal/sshdialer/server_test.go +++ b/internal/sshdialer/server_test.go @@ -103,7 +103,7 @@ func prepareSSHServer(t *testing.T) (sshServer *SSHServer, stopSSH func(), err e sshTCPListener, err := net.Listen("tcp4", "localhost:0") if err != nil { - return + return sshServer, stopSSH, err } hasIPv6 := true @@ -115,11 +115,11 @@ func prepareSSHServer(t *testing.T) (sshServer *SSHServer, stopSSH func(), err e host, p, err := net.SplitHostPort(sshTCPListener.Addr().String()) if err != nil { - return + return sshServer, stopSSH, err } port, err := strconv.ParseInt(p, 10, 32) if err != nil { - return + return sshServer, stopSSH, err } sshServer.hostIPv4 = host sshServer.portIPv4 = int(port) @@ -127,11 +127,11 @@ func prepareSSHServer(t *testing.T) (sshServer *SSHServer, stopSSH func(), err e if hasIPv6 { host, p, err = net.SplitHostPort(sshTCP6Listener.Addr().String()) if err != nil { - return + return sshServer, stopSSH, err } port, err = strconv.ParseInt(p, 10, 32) if err != nil { - return + return sshServer, stopSSH, err } sshServer.hostIPv6 = host sshServer.portIPv6 = int(port) @@ -231,12 +231,12 @@ func setupServerAuth(conf *ssh.ServerConfig) (err error) { var bs []byte bs, err = os.ReadFile(keyFileName) if err != nil { - return + return err } var pk ssh.PublicKey pk, _, _, _, err = ssh.ParseAuthorizedKey(bs) if err != nil { - return + return err } bs = pk.Marshal() @@ -266,12 +266,12 @@ func setupServerAuth(conf *ssh.ServerConfig) (err error) { var b []byte b, err = os.ReadFile(keyFileName) if err != nil { - return + return err } var signer ssh.Signer signer, err = ssh.ParsePrivateKey(b) if err != nil { - return + return err } conf.AddHostKey(signer) } diff --git a/internal/sshdialer/ssh_agent_unix.go b/internal/sshdialer/ssh_agent_unix.go index 7e11b725ff..aa538f6a7d 100644 --- a/internal/sshdialer/ssh_agent_unix.go +++ b/internal/sshdialer/ssh_agent_unix.go @@ -1,5 +1,4 @@ -//go:build aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris -// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris +//go:build unix package sshdialer diff --git a/internal/sshdialer/ssh_agent_windows.go b/internal/sshdialer/ssh_agent_windows.go index b6000ef8a1..c2780e4d24 100644 --- a/internal/sshdialer/ssh_agent_windows.go +++ b/internal/sshdialer/ssh_agent_windows.go @@ -4,12 +4,12 @@ import ( "net" "strings" - "gopkg.in/natefinch/npipe.v2" + "github.com/Microsoft/go-winio" ) func dialSSHAgent(addr string) (net.Conn, error) { if strings.Contains(addr, "\\pipe\\") { - return npipe.Dial(addr) + return winio.DialPipe(addr, nil) } return net.Dial("unix", addr) } diff --git a/internal/sshdialer/ssh_dialer.go b/internal/sshdialer/ssh_dialer.go index 5769a29a0b..8864915d7f 100644 --- a/internal/sshdialer/ssh_dialer.go +++ b/internal/sshdialer/ssh_dialer.go @@ -8,7 +8,6 @@ import ( "context" "errors" "fmt" - "io/ioutil" "net" urlPkg "net/url" "os" @@ -59,12 +58,15 @@ func NewDialContext(url *urlPkg.URL, config Config) (func(ctx context.Context, n } }() - dialContext, err := tryGetStdioDialContext(url, sshClient, config.Identity) - if err != nil { - return nil, err - } - if dialContext != nil { - return dialContext, nil + var dialContext func(ctx context.Context, network, addr string) (net.Conn, error) + if url.Path == "" { + dialContext, err = tryGetStdioDialContext(url, sshClient, config.Identity) + if err != nil { + return nil, err + } + if dialContext != nil { + return dialContext, nil + } } var addr string @@ -136,19 +138,19 @@ func isWindowsMachine(sshClient *ssh.Client) (bool, error) { func networkAndAddressFromRemoteDockerHost(sshClient *ssh.Client) (network string, addr string, err error) { session, err := sshClient.NewSession() if err != nil { - return + return network, addr, err } defer session.Close() out, err := session.CombinedOutput("set") if err != nil { - return + return network, addr, err } remoteDockerHost := "unix:///var/run/docker.sock" isWin, err := isWindowsMachine(sshClient) if err != nil { - return + return network, addr, err } if isWin { @@ -166,7 +168,7 @@ func networkAndAddressFromRemoteDockerHost(sshClient *ssh.Client) (network strin remoteDockerHostURL, err := urlPkg.Parse(remoteDockerHost) if err != nil { - return + return network, addr, err } switch remoteDockerHostURL.Scheme { case "unix": @@ -267,10 +269,9 @@ func NewSSHClientConfig(url *urlPkg.URL, config Config) (*ssh.ClientConfig, erro ssh.KeyAlgoECDSA384, ssh.KeyAlgoECDSA521, ssh.KeyAlgoED25519, - ssh.SigAlgoRSASHA2512, - ssh.SigAlgoRSASHA2256, + ssh.KeyAlgoRSASHA512, + ssh.KeyAlgoRSASHA256, ssh.KeyAlgoRSA, - ssh.KeyAlgoDSA, }, Timeout: sshTimeout * time.Second, } @@ -347,7 +348,7 @@ func signersToAuthMethods(signers []ssh.Signer) []ssh.AuthMethod { // reads key from given path // if necessary it will decrypt it func loadSignerFromFile(path string, passphrase []byte, passPhraseCallback SecretCallback) (ssh.Signer, error) { - key, err := ioutil.ReadFile(path) + key, err := os.ReadFile(path) if err != nil { return nil, fmt.Errorf("failed to read key file: %w", err) } diff --git a/internal/sshdialer/ssh_dialer_test.go b/internal/sshdialer/ssh_dialer_test.go index 654f122660..728a656648 100644 --- a/internal/sshdialer/ssh_dialer_test.go +++ b/internal/sshdialer/ssh_dialer_test.go @@ -3,7 +3,7 @@ package sshdialer_test import ( "context" "fmt" - "io/ioutil" + "io" "net" "net/http" "net/url" @@ -436,7 +436,7 @@ func testCreateDialer(connConfig *SSHServer, tt testParams) func(t *testing.T, w } defer resp.Body.Close() - b, err := ioutil.ReadAll(resp.Body) + b, err := io.ReadAll(resp.Body) th.AssertTrue(t, err == nil) if err != nil { return @@ -476,7 +476,7 @@ func cp(src, dest string) error { return fmt.Errorf("the cp() function failed to stat source file: %w", err) } - data, err := ioutil.ReadFile(src) + data, err := os.ReadFile(src) if err != nil { return fmt.Errorf("the cp() function failed to read source file: %w", err) } @@ -486,7 +486,7 @@ func cp(src, dest string) error { return fmt.Errorf("destination file already exists: %w", os.ErrExist) } - return ioutil.WriteFile(dest, data, srcFs.Mode()) + return os.WriteFile(dest, data, srcFs.Mode()) } // puts key from ./testdata/{keyName} to $HOME/.ssh/{keyName} @@ -545,7 +545,7 @@ func withCleanHome(t *testing.T) func() { if runtime.GOOS == "windows" { homeName = "USERPROFILE" } - tmpDir, err := ioutil.TempDir("", "tmpHome") + tmpDir, err := os.MkdirTemp("", "tmpHome") th.AssertNil(t, err) oldHome, hadHome := os.LookupEnv(homeName) @@ -584,7 +584,7 @@ func withKnowHosts(connConfig *SSHServer) setUpEnvFn { serverKeysDir := filepath.Join("testdata", "etc", "ssh") for _, k := range []string{"ecdsa"} { keyPath := filepath.Join(serverKeysDir, fmt.Sprintf("ssh_host_%s_key.pub", k)) - key, err := ioutil.ReadFile(keyPath) + key, err := os.ReadFile(keyPath) th.AssertNil(t, err) fmt.Fprintf(f, "%s %s", connConfig.hostIPv4, string(key)) @@ -753,7 +753,7 @@ func withBadSSHAgentSocket(t *testing.T) func() { func withGoodSSHAgent(t *testing.T) func() { t.Helper() - key, err := ioutil.ReadFile(filepath.Join("testdata", "id_ed25519")) + key, err := os.ReadFile(filepath.Join("testdata", "id_ed25519")) th.AssertNil(t, err) signer, err := ssh.ParsePrivateKey(key) @@ -778,7 +778,7 @@ func withSSHAgent(t *testing.T, ag agent.Agent) func() { if runtime.GOOS == "windows" { agentSocketPath = `\\.\pipe\openssh-ssh-agent-test` } else { - tmpDirForSocket, err = ioutil.TempDir("", "forAuthSock") + tmpDirForSocket, err = os.MkdirTemp("", "forAuthSock") th.AssertNil(t, err) agentSocketPath = filepath.Join(tmpDirForSocket, "agent.sock") @@ -920,17 +920,9 @@ func (b badAgent) Signers() ([]ssh.Signer, error) { func withFixedUpSSHCLI(t *testing.T) func() { t.Helper() - which := "which" - if runtime.GOOS == "windows" { - which = "where" - } - - out, err := exec.Command(which, "ssh").CombinedOutput() + sshAbsPath, err := exec.LookPath("ssh") th.AssertNil(t, err) - sshAbsPath := string(out) - sshAbsPath = strings.Trim(sshAbsPath, "\r\n") - sshScript := `#!/bin/sh SSH_BIN -o PasswordAuthentication=no -o ConnectTimeout=3 -o UserKnownHostsFile="$HOME/.ssh/known_hosts" $@ ` @@ -954,7 +946,7 @@ SSH_BIN -o PasswordAuthentication=no -o ConnectTimeout=3 -o UserKnownHostsFile=% } sshScriptFullPath := filepath.Join(homeBin, sshScriptName) - err = ioutil.WriteFile(sshScriptFullPath, []byte(sshScript), 0700) + err = os.WriteFile(sshScriptFullPath, []byte(sshScript), 0700) th.AssertNil(t, err) oldPath := os.Getenv("PATH") diff --git a/internal/sshdialer/windows_test.go b/internal/sshdialer/windows_test.go index 304549d967..1f17de8943 100644 --- a/internal/sshdialer/windows_test.go +++ b/internal/sshdialer/windows_test.go @@ -1,5 +1,4 @@ //go:build windows -// +build windows package sshdialer_test @@ -9,8 +8,8 @@ import ( "os/user" "strings" + "github.com/Microsoft/go-winio" "github.com/hectane/go-acl" - "gopkg.in/natefinch/npipe.v2" ) func fixupPrivateKeyMod(path string) { @@ -22,7 +21,7 @@ func fixupPrivateKeyMod(path string) { err = acl.Apply(path, true, false, - acl.GrantName(((mode&0700)<<23)|((mode&0200)<<9), usr.Name)) + acl.GrantName(((mode&0700)<<23)|((mode&0200)<<9), usr.Username)) // See https://github.com/hectane/go-acl/issues/1 if err != nil && err.Error() != "The operation completed successfully." { @@ -32,11 +31,11 @@ func fixupPrivateKeyMod(path string) { func listen(addr string) (net.Listener, error) { if strings.Contains(addr, "\\pipe\\") { - return npipe.Listen(addr) + return winio.ListenPipe(addr, nil) } return net.Listen("unix", addr) } func isErrClosed(err error) bool { - return errors.Is(err, net.ErrClosed) || errors.Is(err, npipe.ErrClosed) + return errors.Is(err, net.ErrClosed) || errors.Is(err, winio.ErrPipeListenerClosed) || errors.Is(err, winio.ErrFileClosed) } diff --git a/internal/stack/merge.go b/internal/stack/merge.go index 7c9975126a..d35119ec81 100644 --- a/internal/stack/merge.go +++ b/internal/stack/merge.go @@ -12,44 +12,43 @@ const WildcardStack = "*" // MergeCompatible determines the allowable set of stacks that a combination of buildpacks may run on, given each // buildpack's set of stacks. Compatibility between the two sets of buildpack stacks is defined by the following rules: // -// 1. The stack must be supported by both buildpacks. That is, any resulting stack ID must appear in both input sets. -// 2. For each supported stack ID, all required mixins for all buildpacks must be provided by the result. That is, -// mixins for the stack ID in both input sets are unioned. -// 3. If there is a wildcard stack in either of the stack list, the stack list not having the wild card stack is returned. -// 4. If both the stack lists contain a wildcard stack, a list containing just the wildcard stack is returned. +// 1. The stack must be supported by both buildpacks. That is, any resulting stack ID must appear in both input sets. +// 2. For each supported stack ID, all required mixins for all buildpacks must be provided by the result. That is, +// mixins for the stack ID in both input sets are unioned. +// 3. If there is a wildcard stack in either of the stack list, the stack list not having the wild card stack is returned. +// 4. If both the stack lists contain a wildcard stack, a list containing just the wildcard stack is returned. // // --- // // Examples: // -// stacksA = [{ID: "stack1", mixins: ["build:mixinA", "mixinB", "run:mixinC"]}}] -// stacksB = [{ID: "stack1", mixins: ["build:mixinA", "run:mixinC"]}}] -// result = [{ID: "stack1", mixins: ["build:mixinA", "mixinB", "run:mixinC"]}}] +// stacksA = [{ID: "stack1", mixins: ["build:mixinA", "mixinB", "run:mixinC"]}}] +// stacksB = [{ID: "stack1", mixins: ["build:mixinA", "run:mixinC"]}}] +// result = [{ID: "stack1", mixins: ["build:mixinA", "mixinB", "run:mixinC"]}}] // -// stacksA = [{ID: "stack1", mixins: ["build:mixinA"]}}, {ID: "stack2", mixins: ["mixinA"]}}] -// stacksB = [{ID: "stack1", mixins: ["run:mixinC"]}}, {ID: "stack2", mixins: ["mixinA"]}}] -// result = [{ID: "stack1", mixins: ["build:mixinA", "run:mixinC"]}}, {ID: "stack2", mixins: ["mixinA"]}}] +// stacksA = [{ID: "stack1", mixins: ["build:mixinA"]}}, {ID: "stack2", mixins: ["mixinA"]}}] +// stacksB = [{ID: "stack1", mixins: ["run:mixinC"]}}, {ID: "stack2", mixins: ["mixinA"]}}] +// result = [{ID: "stack1", mixins: ["build:mixinA", "run:mixinC"]}}, {ID: "stack2", mixins: ["mixinA"]}}] // -// stacksA = [{ID: "stack1", mixins: ["build:mixinA"]}}, {ID: "stack2", mixins: ["mixinA"]}}] -// stacksB = [{ID: "stack2", mixins: ["mixinA", "run:mixinB"]}}] -// result = [{ID: "stack2", mixins: ["mixinA", "run:mixinB"]}}] +// stacksA = [{ID: "stack1", mixins: ["build:mixinA"]}}, {ID: "stack2", mixins: ["mixinA"]}}] +// stacksB = [{ID: "stack2", mixins: ["mixinA", "run:mixinB"]}}] +// result = [{ID: "stack2", mixins: ["mixinA", "run:mixinB"]}}] // -// stacksA = [{ID: "stack1", mixins: ["build:mixinA"]}}] -// stacksB = [{ID: "stack2", mixins: ["mixinA", "run:mixinB"]}}] -// result = [] +// stacksA = [{ID: "stack1", mixins: ["build:mixinA"]}}] +// stacksB = [{ID: "stack2", mixins: ["mixinA", "run:mixinB"]}}] +// result = [] // -// stacksA = [{ID: "*"}, {ID: "stack1", mixins: ["build:mixinC"]}] -// stacksB = [{ID: "stack1", mixins: ["build:mixinA"]}, {ID: "stack2", mixins: ["mixinA", "run:mixinB"]}] -// result = [{ID: "stack1", mixins: ["build:mixinA"]}, {ID: "stack2", mixins: ["mixinA", "run:mixinB"]}] +// stacksA = [{ID: "*"}, {ID: "stack1", mixins: ["build:mixinC"]}] +// stacksB = [{ID: "stack1", mixins: ["build:mixinA"]}, {ID: "stack2", mixins: ["mixinA", "run:mixinB"]}] +// result = [{ID: "stack1", mixins: ["build:mixinA"]}, {ID: "stack2", mixins: ["mixinA", "run:mixinB"]}] // -// stacksA = [{ID: "stack1", mixins: ["build:mixinA"]}, {ID: "stack2", mixins: ["mixinA", "run:mixinB"]}] -// stacksB = [{ID: "*"}, {ID: "stack1", mixins: ["build:mixinC"]}] -// result = [{ID: "stack1", mixins: ["build:mixinA"]}, {ID: "stack2", mixins: ["mixinA", "run:mixinB"]}] -// -// stacksA = [{ID: "*"}, {ID: "stack1", mixins: ["build:mixinA"]}, {ID: "stack2", mixins: ["mixinA", "run:mixinB"]}] -// stacksB = [{ID: "*"}, {ID: "stack1", mixins: ["build:mixinC"]}] -// result = [{ID: "*"}] +// stacksA = [{ID: "stack1", mixins: ["build:mixinA"]}, {ID: "stack2", mixins: ["mixinA", "run:mixinB"]}] +// stacksB = [{ID: "*"}, {ID: "stack1", mixins: ["build:mixinC"]}] +// result = [{ID: "stack1", mixins: ["build:mixinA"]}, {ID: "stack2", mixins: ["mixinA", "run:mixinB"]}] // +// stacksA = [{ID: "*"}, {ID: "stack1", mixins: ["build:mixinA"]}, {ID: "stack2", mixins: ["mixinA", "run:mixinB"]}] +// stacksB = [{ID: "*"}, {ID: "stack1", mixins: ["build:mixinC"]}] +// result = [{ID: "*"}] func MergeCompatible(stacksA []dist.Stack, stacksB []dist.Stack) []dist.Stack { set := map[string][]string{} AHasWildcardStack, BHasWildcardStack := false, false diff --git a/internal/strings/strings.go b/internal/strings/strings.go index 1029fdd100..4cfbed4cf3 100644 --- a/internal/strings/strings.go +++ b/internal/strings/strings.go @@ -1,5 +1,10 @@ package strings +import ( + "golang.org/x/text/cases" + "golang.org/x/text/language" +) + func ValueOrDefault(str, def string) string { if str == "" { return def @@ -7,3 +12,7 @@ func ValueOrDefault(str, def string) string { return str } + +func Title(lower string) string { + return cases.Title(language.English).String(lower) +} diff --git a/internal/strings/strings_test.go b/internal/strings/strings_test.go index ebf7f5eb91..9c6b29259a 100644 --- a/internal/strings/strings_test.go +++ b/internal/strings/strings_test.go @@ -27,5 +27,12 @@ func TestValueOrDefault(t *testing.T) { assert.Equal(output, "-") }) }) + + when("#Title", func() { + it("returns the provided string with title casing", func() { + output := strings.Title("to title case") + assert.Equal(output, "To Title Case") + }) + }) }) } diff --git a/internal/target/parse.go b/internal/target/parse.go new file mode 100644 index 0000000000..61a6f4da05 --- /dev/null +++ b/internal/target/parse.go @@ -0,0 +1,106 @@ +package target + +import ( + "fmt" + "strings" + + "github.com/pkg/errors" + + "github.com/buildpacks/pack/internal/style" + "github.com/buildpacks/pack/pkg/dist" + "github.com/buildpacks/pack/pkg/logging" +) + +func ParseTargets(t []string, logger logging.Logger) (targets []dist.Target, err error) { + for _, v := range t { + target, err := ParseTarget(v, logger) + if err != nil { + return nil, err + } + targets = append(targets, target) + } + return targets, nil +} + +func ParseTarget(t string, logger logging.Logger) (output dist.Target, err error) { + nonDistro, distros, err := getTarget(t, logger) + if v, _ := getSliceAt[string](nonDistro, 0); len(nonDistro) <= 1 && v == "" { + logger.Warn("os/arch must be defined") + } + if err != nil { + return output, err + } + os, arch, variant, err := getPlatform(nonDistro, logger) + if err != nil { + return output, err + } + v, err := ParseDistros(distros, logger) + if err != nil { + return output, err + } + output = dist.Target{ + OS: os, + Arch: arch, + ArchVariant: variant, + Distributions: v, + } + return output, err +} + +func ParseDistros(distroSlice string, logger logging.Logger) (distros []dist.Distribution, err error) { + distro := strings.Split(distroSlice, ";") + if l := len(distro); l == 1 && distro[0] == "" { + return nil, err + } + for _, d := range distro { + v, err := ParseDistro(d, logger) + if err != nil { + return nil, err + } + distros = append(distros, v) + } + return distros, nil +} + +func ParseDistro(distroString string, logger logging.Logger) (distro dist.Distribution, err error) { + d := strings.Split(distroString, "@") + if d[0] == "" || len(d) == 0 { + return distro, errors.Errorf("distro's versions %s cannot be specified without distro's name", style.Symbol("@"+strings.Join(d[1:], "@"))) + } + distro.Name = d[0] + if len(d) < 2 { + logger.Warnf("distro with name %s has no specific version!", style.Symbol(d[0])) + return distro, err + } + if len(d) > 2 { + return distro, fmt.Errorf("invalid distro: %s", distroString) + } + distro.Version = d[1] + return distro, err +} + +func getTarget(t string, logger logging.Logger) (nonDistro []string, distros string, err error) { + target := strings.Split(t, ":") + if (len(target) == 1 && target[0] == "") || len(target) == 0 { + return nonDistro, distros, errors.Errorf("invalid target %s, atleast one of [os][/arch][/archVariant] must be specified", t) + } + if len(target) == 2 && target[0] == "" { + v, _ := getSliceAt[string](target, 1) + logger.Warn(style.Warn("adding distros %s without [os][/arch][/variant]", v)) + } else { + i, _ := getSliceAt[string](target, 0) + nonDistro = strings.Split(i, "/") + } + if i, err := getSliceAt[string](target, 1); err == nil { + distros = i + } + return nonDistro, distros, err +} + +func getSliceAt[T interface{}](slice []T, index int) (value T, err error) { + if index < 0 || index >= len(slice) { + return value, errors.Errorf("index out of bound, cannot access item at index %d of slice with length %d", index, len(slice)) + } + + return slice[index], err +} diff --git a/internal/target/parse_test.go b/internal/target/parse_test.go new file mode 100644 index 0000000000..61b2c6e39e --- /dev/null +++ b/internal/target/parse_test.go @@ -0,0 +1,158 @@ +package target_test + +import ( + "bytes" + "testing" + + "github.com/heroku/color" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + "github.com/buildpacks/pack/internal/target" + "github.com/buildpacks/pack/pkg/dist" + "github.com/buildpacks/pack/pkg/logging" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestParseTargets(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "ParseTargets", testParseTargets, spec.Parallel(), spec.Report(report.Terminal{})) +} + +func testParseTargets(t *testing.T, when spec.G, it spec.S) { + outBuf := bytes.Buffer{} + it.Before(func() { + outBuf = bytes.Buffer{} + h.AssertEq(t, outBuf.String(), "") + var err error + h.AssertNil(t, err) + }) + + when("target#ParseTarget", func() { + it("should show a warn when [os][/arch][/variant] is nil", func() { + target.ParseTarget(":distro@version", logging.NewLogWithWriters(&outBuf, &outBuf)) + h.AssertNotEq(t, outBuf.String(), "") + }) + it("should parse target as expected", func() { + output, err := target.ParseTarget("linux/arm/v6", logging.NewLogWithWriters(&outBuf, &outBuf)) + h.AssertEq(t, outBuf.String(), "") + h.AssertNil(t, err) + h.AssertEq(t, output, dist.Target{ + OS: "linux", + Arch: "arm", + ArchVariant: "v6", + }) + }) + it("should return an error", func() { + _, err := target.ParseTarget("", logging.NewLogWithWriters(&outBuf, &outBuf)) + h.AssertNotNil(t, err) + }) + it("should log a warning when only [os] has typo or is unknown", func() { + target.ParseTarget("os/arm/v6", logging.NewLogWithWriters(&outBuf, &outBuf)) + h.AssertNotEq(t, outBuf.String(), "") + }) + it("should log a warning when only [arch] has typo or is unknown", func() { + target.ParseTarget("darwin/arm/v6", logging.NewLogWithWriters(&outBuf, &outBuf)) + h.AssertNotEq(t, outBuf.String(), "") + }) + it("should log a warning when only [variant] has typo or is unknown", func() { + target.ParseTarget("linux/arm/unknown", logging.NewLogWithWriters(&outBuf, &outBuf)) + h.AssertNotEq(t, outBuf.String(), "") + }) + }) + + when("target#ParseTargets", func() { + it("should throw an error when atleast one target throws error", func() { + _, err := target.ParseTargets([]string{"linux/arm/v6", ":distro@version"}, logging.NewLogWithWriters(&outBuf, &outBuf)) + h.AssertNotNil(t, err) + }) + it("should parse targets as expected", func() { + output, err := target.ParseTargets([]string{"linux/arm/v6", "linux/amd64:ubuntu@22.04;debian@8.10;debian@10.06"}, logging.NewLogWithWriters(&outBuf, &outBuf)) + h.AssertNil(t, err) + h.AssertEq(t, output, []dist.Target{ + { + OS: "linux", + Arch: "arm", + ArchVariant: "v6", + }, + { + OS: "linux", + Arch: "amd64", + Distributions: []dist.Distribution{ + { + Name: "ubuntu", + Version: "22.04", + }, + { + Name: "debian", + Version: "8.10", + }, + { + Name: "debian", + Version: "10.06", + }, + }, + }, + }) + }) + }) + + when("target#ParseDistro", func() { + it("should parse distro as expected", func() { + output, err := target.ParseDistro("ubuntu@22.04", logging.NewLogWithWriters(&outBuf, &outBuf)) + h.AssertEq(t, output, dist.Distribution{ + Name: "ubuntu", + Version: "22.04", + }) + h.AssertNil(t, err) + }) + it("should return an error when name is missing", func() { + _, err := target.ParseDistro("@22.04@20.08", logging.NewLogWithWriters(&outBuf, &outBuf)) + h.AssertNotNil(t, err) + }) + it("should return an error when there are two versions", func() { + _, err := target.ParseDistro("some-distro@22.04@20.08", logging.NewLogWithWriters(&outBuf, &outBuf)) + h.AssertNotNil(t, err) + h.AssertError(t, err, "invalid distro") + }) + it("should warn when distro version is not specified", func() { + target.ParseDistro("ubuntu", logging.NewLogWithWriters(&outBuf, &outBuf)) + h.AssertNotEq(t, outBuf.String(), "") + }) + }) + + when("target#ParseDistros", func() { + it("should parse distros as expected", func() { + output, err := target.ParseDistros("ubuntu@22.04;ubuntu@20.08;debian@8.10;debian@10.06", logging.NewLogWithWriters(&outBuf, &outBuf)) + h.AssertEq(t, output, []dist.Distribution{ + { + Name: "ubuntu", + Version: "22.04", + }, + { + Name: "ubuntu", + Version: "20.08", + }, + { + Name: "debian", + Version: "8.10", + }, + { + Name: "debian", + Version: "10.06", + }, + }) + h.AssertNil(t, err) + }) + it("result should be nil", func() { + output, err := target.ParseDistros("", logging.NewLogWithWriters(&outBuf, &outBuf)) + h.AssertEq(t, output, []dist.Distribution(nil)) + h.AssertNil(t, err) + }) + it("should return an error", func() { + _, err := target.ParseDistros(";", logging.NewLogWithWriters(&outBuf, &outBuf)) + h.AssertNotNil(t, err) + }) + }) +} diff --git a/internal/target/platform.go b/internal/target/platform.go new file mode 100644 index 0000000000..4d48d6845f --- /dev/null +++ b/internal/target/platform.go @@ -0,0 +1,91 @@ +package target + +import ( + "strings" + + "github.com/pkg/errors" + + "github.com/buildpacks/pack/internal/style" + "github.com/buildpacks/pack/pkg/logging" +) + +func getPlatform(t []string, logger logging.Logger) (os, arch, variant string, err error) { + os, _ = getSliceAt[string](t, 0) + arch, _ = getSliceAt[string](t, 1) + variant, _ = getSliceAt[string](t, 2) + if !supportsOS(os) && supportsVariant(arch, variant) { + logger.Warn(style.Warn("unknown os %s, is this a typo", os)) + } + if supportsArch(os, arch) && !supportsVariant(arch, variant) { + logger.Warn(style.Warn("unknown variant %s", variant)) + } + if supportsOS(os) && !supportsArch(os, arch) && supportsVariant(arch, variant) { + logger.Warn(style.Warn("unknown arch %s", arch)) + } + if !SupportsPlatform(os, arch, variant) { + return os, arch, variant, errors.Errorf("unknown target: %s", style.Symbol(strings.Join(t, "/"))) + } + return os, arch, variant, err +} + +var supportedOSArchs = map[string][]string{ + "aix": {"ppc64"}, + "android": {"386", "amd64", "arm", "arm64"}, + "darwin": {"amd64", "arm64"}, + "dragonfly": {"amd64"}, + "freebsd": {"386", "amd64", "arm"}, + "illumos": {"amd64"}, + "ios": {"arm64"}, + "js": {"wasm"}, + "linux": {"386", "amd64", "arm", "arm64", "loong64", "mips", "mipsle", "mips64", "mips64le", "ppc64", "ppc64le", "riscv64", "s390x"}, + "netbsd": {"386", "amd64", "arm"}, + "openbsd": {"386", "amd64", "arm", "arm64"}, + "plan9": {"386", "amd64", "arm"}, + "solaris": {"amd64"}, + "wasip1": {"wasm"}, + "windows": {"386", "amd64", "arm", "arm64"}, +} + +var supportedArchVariants = map[string][]string{ + "386": {"softfloat", "sse2"}, + "arm": {"v5", "v6", "v7"}, + "amd64": {"v1", "v2", "v3", "v4"}, + "mips": {"hardfloat", "softfloat"}, + "mipsle": {"hardfloat", "softfloat"}, + "mips64": {"hardfloat", "softfloat"}, + "mips64le": {"hardfloat", "softfloat"}, + "ppc64": {"power8", "power9"}, + "ppc64le": {"power8", "power9"}, + "wasm": {"satconv", "signext"}, +} + +func supportsOS(os string) bool { + return supportedOSArchs[os] != nil +} + +func supportsArch(os, arch string) bool { + if supportsOS(os) { + for _, s := range supportedOSArchs[os] { + if s == arch { + return true + } + } + } + return false +} + +func supportsVariant(arch, variant string) (supported bool) { + if variant == "" || len(variant) == 0 { + return true + } + for _, s := range supportedArchVariants[arch] { + if s == variant { + return true + } + } + return supported +} + +func SupportsPlatform(os, arch, variant string) bool { + return supportsArch(os, arch) && supportsVariant(arch, variant) +} diff --git a/internal/target/platform_test.go b/internal/target/platform_test.go new file mode 100644 index 0000000000..469bd88962 --- /dev/null +++ b/internal/target/platform_test.go @@ -0,0 +1,35 @@ +package target_test + +import ( + "testing" + + "github.com/heroku/color" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + "github.com/buildpacks/pack/internal/target" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestPlatforms(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "TestPlatforms", testPlatforms, spec.Parallel(), spec.Report(report.Terminal{})) +} + +func testPlatforms(t *testing.T, when spec.G, it spec.S) { + it.Before(func() { + var err error + h.AssertNil(t, err) + }) + when("target#SupportsPlatform", func() { + it("should return false when target not supported", func() { + b := target.SupportsPlatform("os", "arm", "v6") + h.AssertFalse(t, b) + }) + it("should parse targets as expected", func() { + b := target.SupportsPlatform("linux", "arm", "v6") + h.AssertTrue(t, b) + }) + }) +} diff --git a/internal/termui/dashboard.go b/internal/termui/dashboard.go index a0e10310fc..23553fce95 100644 --- a/internal/termui/dashboard.go +++ b/internal/termui/dashboard.go @@ -11,7 +11,7 @@ import ( type Dashboard struct { app app - buildpackInfo []dist.BuildpackInfo + buildpackInfo []dist.ModuleInfo appTree *tview.TreeView builderTree *tview.TreeView planList *tview.List @@ -23,7 +23,7 @@ type Dashboard struct { logs string } -func NewDashboard(app app, appName string, bldr buildr, runImageName string, buildpackInfo []dist.BuildpackInfo, logs []string) *Dashboard { +func NewDashboard(app app, appName string, bldr buildr, runImageName string, buildpackInfo []dist.ModuleInfo, logs []string) *Dashboard { d := &Dashboard{} appTree, builderTree := initTrees(appName, bldr, runImageName) @@ -132,7 +132,7 @@ func (d *Dashboard) setScreen() { d.app.SetRoot(d.screen, true) } -func (d *Dashboard) initDashboard(buildpackInfos []dist.BuildpackInfo) (*tview.List, *tview.TextView) { +func (d *Dashboard) initDashboard(buildpackInfos []dist.ModuleInfo) (*tview.List, *tview.TextView) { planList := tview.NewList() planList.SetMainTextColor(tcell.ColorDarkGrey). SetSelectedTextColor(tcell.ColorDarkGrey). @@ -199,7 +199,7 @@ func initTrees(appName string, bldr buildr, runImageName string) (*tview.TreeVie return appTree, builderTree } -func info(buildpackInfo dist.BuildpackInfo) string { +func info(buildpackInfo dist.ModuleInfo) string { if buildpackInfo.Description != "" { return buildpackInfo.Description } diff --git a/internal/termui/detect.go b/internal/termui/detect.go index 0b72d3cf9d..0fe239904f 100644 --- a/internal/termui/detect.go +++ b/internal/termui/detect.go @@ -15,11 +15,11 @@ type Detect struct { textView *tview.TextView buildpackRegex *regexp.Regexp - buildpackChan chan dist.BuildpackInfo + buildpackChan chan dist.ModuleInfo doneChan chan bool } -func NewDetect(app app, buildpackChan chan dist.BuildpackInfo, bldr buildr) *Detect { +func NewDetect(app app, buildpackChan chan dist.ModuleInfo, bldr buildr) *Detect { d := &Detect{ app: app, textView: detectStatusTV(), @@ -88,14 +88,14 @@ func (d *Detect) start() { } } -func (d *Detect) find(buildpackID, buildpackVersion string) dist.BuildpackInfo { +func (d *Detect) find(buildpackID, buildpackVersion string) dist.ModuleInfo { for _, buildpack := range d.bldr.Buildpacks() { if buildpack.ID == buildpackID && buildpack.Version == buildpackVersion { return buildpack } } - return dist.BuildpackInfo{ + return dist.ModuleInfo{ ID: buildpackID, Version: buildpackVersion, } diff --git a/internal/termui/dive.go b/internal/termui/dive.go index 16e2ee5142..4f60cef5ae 100644 --- a/internal/termui/dive.go +++ b/internal/termui/dive.go @@ -16,12 +16,12 @@ type Dive struct { app app menuTable *tview.Table fileExplorerTable *tview.Table - buildpackInfo []dist.BuildpackInfo + buildpackInfo []dist.ModuleInfo buildpacksTreeMap map[string]*tview.TreeNode escHandler func() } -func NewDive(app app, buildpackInfo []dist.BuildpackInfo, selectedBuildpack dist.BuildpackInfo, nodes map[string]*tview.TreeNode, escHandler func()) *Dive { +func NewDive(app app, buildpackInfo []dist.ModuleInfo, selectedBuildpack dist.ModuleInfo, nodes map[string]*tview.TreeNode, escHandler func()) *Dive { menu := initMenu(buildpackInfo, nodes) fileExplorerTable := initFileExplorer() @@ -220,7 +220,7 @@ func (d *Dive) loadFileExplorerData(nodeKey string) { }) } -func initMenu(buildpackInfos []dist.BuildpackInfo, nodes map[string]*tview.TreeNode) *tview.Table { +func initMenu(buildpackInfos []dist.ModuleInfo, nodes map[string]*tview.TreeNode) *tview.Table { style := tcell.StyleDefault. Foreground(tcell.ColorMediumTurquoise). Background(tcell.ColorDarkSlateGray). diff --git a/internal/termui/dive_test.go b/internal/termui/dive_test.go index 2d72b0578f..67a5ea2f24 100644 --- a/internal/termui/dive_test.go +++ b/internal/termui/dive_test.go @@ -20,14 +20,14 @@ func TestDiveScreen(t *testing.T) { func testDive(t *testing.T, when spec.G, it spec.S) { var ( fakeApp app - buildpacks []dist.BuildpackInfo - selectedBuildpack dist.BuildpackInfo + buildpacks []dist.ModuleInfo + selectedBuildpack dist.ModuleInfo nodes map[string]*tview.TreeNode ) it.Before(func() { fakeApp = fakes.NewApp() - buildpacks = []dist.BuildpackInfo{ + buildpacks = []dist.ModuleInfo{ {ID: "some/buildpack-1", Version: "0.0.1"}, {ID: "some/buildpack-2", Version: "0.0.2"}} selectedBuildpack = buildpacks[0] diff --git a/internal/termui/fakes/builder.go b/internal/termui/fakes/builder.go index a8a9edf02b..e708652fe6 100644 --- a/internal/termui/fakes/builder.go +++ b/internal/termui/fakes/builder.go @@ -7,12 +7,12 @@ import ( type Builder struct { baseImageName string - buildpacks []dist.BuildpackInfo + buildpacks []dist.ModuleInfo lifecycleDescriptor builder.LifecycleDescriptor stack builder.StackMetadata } -func NewBuilder(baseImageName string, buildpacks []dist.BuildpackInfo, lifecycleDescriptor builder.LifecycleDescriptor, stack builder.StackMetadata) *Builder { +func NewBuilder(baseImageName string, buildpacks []dist.ModuleInfo, lifecycleDescriptor builder.LifecycleDescriptor, stack builder.StackMetadata) *Builder { return &Builder{ baseImageName: baseImageName, buildpacks: buildpacks, @@ -25,7 +25,7 @@ func (b *Builder) BaseImageName() string { return b.baseImageName } -func (b *Builder) Buildpacks() []dist.BuildpackInfo { +func (b *Builder) Buildpacks() []dist.ModuleInfo { return b.buildpacks } diff --git a/internal/termui/termui.go b/internal/termui/termui.go index 938a585fcc..5a8c432db1 100644 --- a/internal/termui/termui.go +++ b/internal/termui/termui.go @@ -4,14 +4,13 @@ import ( "archive/tar" "bufio" "io" - "io/ioutil" "path" "path/filepath" "strings" - dcontainer "github.com/docker/docker/api/types/container" "github.com/docker/docker/pkg/stdcopy" "github.com/gdamore/tcell/v2" + dcontainer "github.com/moby/moby/api/types/container" "github.com/rivo/tview" "github.com/buildpacks/pack/internal/builder" @@ -32,7 +31,7 @@ type app interface { type buildr interface { BaseImageName() string - Buildpacks() []dist.BuildpackInfo + Buildpacks() []dist.ModuleInfo LifecycleDescriptor() builder.LifecycleDescriptor Stack() builder.StackMetadata } @@ -52,7 +51,7 @@ type Termui struct { runImageName string exitCode int64 textChan chan string - buildpackChan chan dist.BuildpackInfo + buildpackChan chan dist.ModuleInfo nodes map[string]*tview.TreeNode } @@ -62,7 +61,7 @@ func NewTermui(appName string, bldr *builder.Builder, runImageName string) *Term bldr: bldr, runImageName: runImageName, app: tview.NewApplication(), - buildpackChan: make(chan dist.BuildpackInfo, 50), + buildpackChan: make(chan dist.ModuleInfo, 50), textChan: make(chan string, 50), nodes: map[string]*tview.TreeNode{}, } @@ -108,7 +107,7 @@ func (s *Termui) handle() { } func (s *Termui) Handler() container.Handler { - return func(bodyChan <-chan dcontainer.ContainerWaitOKBody, errChan <-chan error, reader io.Reader) error { + return func(bodyChan <-chan dcontainer.WaitResponse, errChan <-chan error, reader io.Reader) error { var ( copyErr = make(chan error) r, w = io.Pipe() @@ -118,7 +117,7 @@ func (s *Termui) Handler() container.Handler { go func() { defer w.Close() - _, err := stdcopy.StdCopy(w, ioutil.Discard, reader) + _, err := stdcopy.StdCopy(w, io.Discard, reader) if err != nil { copyErr <- err } @@ -199,10 +198,10 @@ func (s *Termui) showBuildStatus() { s.textChan <- "[red::b]\n\nBUILD FAILED" } -func collect(buildpackChan chan dist.BuildpackInfo) []dist.BuildpackInfo { +func collect(buildpackChan chan dist.ModuleInfo) []dist.ModuleInfo { close(buildpackChan) - var result []dist.BuildpackInfo + var result []dist.ModuleInfo for txt := range buildpackChan { result = append(result, txt) } diff --git a/internal/termui/termui_test.go b/internal/termui/termui_test.go index 79a55a367c..93632f3f64 100644 --- a/internal/termui/termui_test.go +++ b/internal/termui/termui_test.go @@ -12,7 +12,7 @@ import ( "testing" "time" - dcontainer "github.com/docker/docker/api/types/container" + dcontainer "github.com/moby/moby/api/types/container" "github.com/rivo/tview" "github.com/sclevine/spec" "github.com/sclevine/spec/report" @@ -37,13 +37,13 @@ func testTermui(t *testing.T, when spec.G, it spec.S) { it("performs the lifecycle", func() { var ( fakeBuild = make(chan bool, 1) - fakeBodyChan = make(chan dcontainer.ContainerWaitOKBody, 1) + fakeBodyChan = make(chan dcontainer.WaitResponse, 1) fakeApp = fakes.NewApp() r, w = io.Pipe() fakeDockerStdWriter = fakes.NewDockerStdWriter(w) fakeBuilder = fakes.NewBuilder("some/basename", - []dist.BuildpackInfo{ + []dist.ModuleInfo{ {ID: "some/buildpack-1", Version: "0.0.1", Homepage: "https://some/buildpack-1"}, {ID: "some/buildpack-2", Version: "0.0.2", Homepage: "https://some/buildpack-2"}, }, @@ -62,14 +62,14 @@ func testTermui(t *testing.T, when spec.G, it spec.S) { bldr: fakeBuilder, runImageName: "some/run-image-name", app: fakeApp, - buildpackChan: make(chan dist.BuildpackInfo, 10), + buildpackChan: make(chan dist.ModuleInfo, 10), textChan: make(chan string, 10), nodes: map[string]*tview.TreeNode{}, } ) defer func() { - fakeBodyChan <- dcontainer.ContainerWaitOKBody{StatusCode: 0} + fakeBodyChan <- dcontainer.WaitResponse{StatusCode: 0} fakeBuild <- true w.Close() fakeApp.StopRunning() @@ -142,7 +142,7 @@ func testTermui(t *testing.T, when spec.G, it spec.S) { h.AssertFalse(t, bpChildren2[0].GetChildren()[0].GetReference().(*tar.Header).FileInfo().IsDir()) // finish build - fakeBodyChan <- dcontainer.ContainerWaitOKBody{StatusCode: 0} + fakeBodyChan <- dcontainer.WaitResponse{StatusCode: 0} w.Close() time.Sleep(500 * time.Millisecond) fakeBuild <- true @@ -154,13 +154,13 @@ func testTermui(t *testing.T, when spec.G, it spec.S) { it("performs the lifecycle (when the builder is untrusted)", func() { var ( fakeBuild = make(chan bool, 1) - fakeBodyChan = make(chan dcontainer.ContainerWaitOKBody, 1) + fakeBodyChan = make(chan dcontainer.WaitResponse, 1) fakeApp = fakes.NewApp() r, w = io.Pipe() fakeDockerStdWriter = fakes.NewDockerStdWriter(w) fakeBuilder = fakes.NewBuilder("some/basename", - []dist.BuildpackInfo{ + []dist.ModuleInfo{ {ID: "some/buildpack-1", Version: "0.0.1", Homepage: "https://some/buildpack-1"}, {ID: "some/buildpack-2", Version: "0.0.2", Homepage: "https://some/buildpack-2"}, }, @@ -179,13 +179,13 @@ func testTermui(t *testing.T, when spec.G, it spec.S) { bldr: fakeBuilder, runImageName: "some/run-image-name", app: fakeApp, - buildpackChan: make(chan dist.BuildpackInfo, 10), + buildpackChan: make(chan dist.ModuleInfo, 10), textChan: make(chan string, 10), } ) defer func() { - fakeBodyChan <- dcontainer.ContainerWaitOKBody{StatusCode: 0} + fakeBodyChan <- dcontainer.WaitResponse{StatusCode: 0} fakeBuild <- true w.Close() fakeApp.StopRunning() @@ -226,7 +226,7 @@ func testTermui(t *testing.T, when spec.G, it spec.S) { }, eventuallyInterval, eventuallyDuration) // finish build - fakeBodyChan <- dcontainer.ContainerWaitOKBody{StatusCode: 1} + fakeBodyChan <- dcontainer.WaitResponse{StatusCode: 1} w.Close() time.Sleep(500 * time.Millisecond) fakeBuild <- true diff --git a/cmd/pack/main.go b/main.go similarity index 100% rename from cmd/pack/main.go rename to main.go diff --git a/pkg/archive/archive.go b/pkg/archive/archive.go index cf0fa09537..7221d468e1 100644 --- a/pkg/archive/archive.go +++ b/pkg/archive/archive.go @@ -5,17 +5,19 @@ import ( "archive/tar" "archive/zip" "io" - "io/ioutil" + "io/fs" "os" - "path" "path/filepath" "time" "github.com/docker/docker/pkg/ioutils" "github.com/pkg/errors" + + "github.com/buildpacks/pack/internal/paths" ) var NormalizedDateTime time.Time +var Umask fs.FileMode func init() { NormalizedDateTime = time.Date(1980, time.January, 1, 0, 0, 1, 0, time.UTC) @@ -57,7 +59,7 @@ func GenerateTar(genFn func(TarWriter) error) io.ReadCloser { return GenerateTarWithWriter(genFn, DefaultTarWriterFactory()) } -// GenerateTarWithTar returns a reader to a tar from a generator function using a writer from the provided factory. +// GenerateTarWithWriter returns a reader to a tar from a generator function using a writer from the provided factory. // Note that the generator will not fully execute until the reader is fully read from. Any errors returned by the // generator will be returned when reading the reader. func GenerateTarWithWriter(genFn func(TarWriter) error, twf TarWriterFactory) io.ReadCloser { @@ -81,12 +83,7 @@ func GenerateTarWithWriter(genFn func(TarWriter) error, twf TarWriterFactory) io errChan <- closeErr }() - closed := false return ioutils.NewReadCloserWrapper(pr, func() error { - if closed { - return errors.New("reader already closed") - } - var completeErr error // closing the reader ensures that if anything attempts @@ -100,7 +97,6 @@ func GenerateTarWithWriter(genFn func(TarWriter) error, twf TarWriterFactory) io completeErr = aggregateError(completeErr, err) } - closed = true return completeErr }) } @@ -139,6 +135,7 @@ func IsEntryNotExist(err error) bool { // ReadTarEntry reads and returns a tar file func ReadTarEntry(rc io.Reader, entryPath string) (*tar.Header, []byte, error) { + canonicalEntryPath := paths.CanonicalTarPath(entryPath) tr := tar.NewReader(rc) for { header, err := tr.Next() @@ -149,8 +146,8 @@ func ReadTarEntry(rc io.Reader, entryPath string) (*tar.Header, []byte, error) { return nil, nil, errors.Wrap(err, "failed to get next tar entry") } - if path.Clean(header.Name) == entryPath { - buf, err := ioutil.ReadAll(tr) + if paths.CanonicalTarPath(header.Name) == canonicalEntryPath { + buf, err := io.ReadAll(tr) if err != nil { return nil, nil, errors.Wrapf(err, "failed to read contents of '%s'", entryPath) } @@ -166,17 +163,14 @@ func ReadTarEntry(rc io.Reader, entryPath string) (*tar.Header, []byte, error) { // contents will be placed. The includeRoot param sets the permissions and metadata on the root file. func WriteDirToTar(tw TarWriter, srcDir, basePath string, uid, gid int, mode int64, normalizeModTime, includeRoot bool, fileFilter func(string) bool) error { if includeRoot { - rootHeader := &tar.Header{ - Typeflag: tar.TypeDir, - Name: basePath, - Mode: mode, - } - finalizeHeader(rootHeader, uid, gid, mode, normalizeModTime) - if err := tw.WriteHeader(rootHeader); err != nil { + mode := modePermIfNegativeMode(mode) + err := writeRootHeader(tw, basePath, mode, uid, gid, normalizeModTime) + if err != nil { return err } } + hardLinkFiles := map[uint64]string{} return filepath.Walk(srcDir, func(file string, fi os.FileInfo, err error) error { var relPath string if fileFilter != nil { @@ -188,51 +182,47 @@ func WriteDirToTar(tw TarWriter, srcDir, basePath string, uid, gid int, mode int return nil } } + if err != nil { return err } - if fi.Mode()&os.ModeSocket != 0 { - return nil - } - - var header *tar.Header - if fi.Mode()&os.ModeSymlink != 0 { - target, err := os.Readlink(file) + if relPath == "" { + relPath, err = filepath.Rel(srcDir, file) if err != nil { return err } + } + if relPath == "." { + return nil + } - // Ensure that symlinks have Linux link names, independent of source OS - header, err = tar.FileInfoHeader(fi, filepath.ToSlash(target)) - if err != nil { + if hasModeSocket(fi) != 0 { + return nil + } + + var header *tar.Header + if hasModeSymLink(fi) { + if header, err = getHeaderFromSymLink(file, fi); err != nil { return err } } else { - header, err = tar.FileInfoHeader(fi, fi.Name()) - if err != nil { + if header, err = tar.FileInfoHeader(fi, fi.Name()); err != nil { return err } } - if relPath == "" { - relPath, err = filepath.Rel(srcDir, file) - if err != nil { - return err - } - } - if relPath == "." { - return nil + header.Name = getHeaderNameFromBaseAndRelPath(basePath, relPath) + if err = processHardLinks(file, fi, hardLinkFiles, header); err != nil { + return err } - header.Name = filepath.ToSlash(filepath.Join(basePath, relPath)) - finalizeHeader(header, uid, gid, mode, normalizeModTime) - - if err := tw.WriteHeader(header); err != nil { + err = writeHeader(header, uid, gid, mode, normalizeModTime, tw) + if err != nil { return err } - if fi.Mode().IsRegular() { + if hasRegularMode(fi) && header.Size > 0 { f, err := os.Open(filepath.Clean(file)) if err != nil { return err @@ -248,6 +238,35 @@ func WriteDirToTar(tw TarWriter, srcDir, basePath string, uid, gid int, mode int }) } +// processHardLinks determine if the given file has hard-links associated with it, the given hardLinkFiles map keeps track +// of any previous hard-link previously processed. In case the hard-link was already found, the header will be updated with +// the previous information otherwise the new hard-link found will be tracked into the map +func processHardLinks(file string, fi os.FileInfo, hardLinkFiles map[uint64]string, header *tar.Header) error { + var ( + err error + hardlinks bool + inode uint64 + ) + if hardlinks, err = hasHardlinks(fi, file); err != nil { + return err + } + if hardlinks { + inode, err = getInodeFromStat(fi.Sys(), file) + if err != nil { + return err + } + + if processedPath, ok := hardLinkFiles[inode]; ok { + header.Typeflag = tar.TypeLink + header.Linkname = processedPath + header.Size = 0 + } else { + hardLinkFiles[inode] = header.Name + } + } + return nil +} + // WriteZipToTar writes the contents of a zip file to a tar writer. func WriteZipToTar(tw TarWriter, srcZip, basePath string, uid, gid int, mode int64, normalizeModTime bool, fileFilter func(string) bool) error { zipReader, err := zip.OpenReader(srcZip) @@ -277,7 +296,7 @@ func WriteZipToTar(tw TarWriter, srcZip, basePath string, uid, gid int, mode int defer r.Close() // contents is the target of the symlink - target, err := ioutil.ReadAll(r) + target, err := io.ReadAll(r) if err != nil { return "", err } @@ -328,9 +347,42 @@ func WriteZipToTar(tw TarWriter, srcZip, basePath string, uid, gid int, mode int return nil } +// NormalizeHeader normalizes a tar.Header +// +// Normalizes the following: +// - ModTime +// - GID +// - UID +// - User Name +// - Group Name +func NormalizeHeader(header *tar.Header, normalizeModTime bool) { + if normalizeModTime { + header.ModTime = NormalizedDateTime + } + header.Uid = 0 + header.Gid = 0 + header.Uname = "" + header.Gname = "" +} + +// IsZip detects whether or not a File is a zip directory +func IsZip(path string) (bool, error) { + r, err := zip.OpenReader(path) + + switch err { + case nil: + r.Close() + return true, nil + case zip.ErrFormat: + return false, nil + default: + return false, err + } +} + func isFatFile(header zip.FileHeader) bool { var ( - creatorFAT uint16 = 0 + creatorFAT uint16 = 0 // nolint:revive creatorVFAT uint16 = 14 ) @@ -348,35 +400,65 @@ func finalizeHeader(header *tar.Header, uid, gid int, mode int64, normalizeModTi header.Gid = gid } -// NormalizeHeader normalizes a tar.Header -// -// Normalizes the following: -// - ModTime -// - GID -// - UID -// - User Name -// - Group Name -func NormalizeHeader(header *tar.Header, normalizeModTime bool) { - if normalizeModTime { - header.ModTime = NormalizedDateTime +func hasRegularMode(fi os.FileInfo) bool { + return fi.Mode().IsRegular() +} + +func getHeaderNameFromBaseAndRelPath(basePath string, relPath string) string { + return filepath.ToSlash(filepath.Join(basePath, relPath)) +} + +func writeHeader(header *tar.Header, uid int, gid int, mode int64, normalizeModTime bool, tw TarWriter) error { + finalizeHeader(header, uid, gid, mode, normalizeModTime) + + if err := tw.WriteHeader(header); err != nil { + return err } - header.Uid = 0 - header.Gid = 0 - header.Uname = "" - header.Gname = "" + + return nil } -// IsZip detects whether or not a File is a zip directory -func IsZip(path string) (bool, error) { - r, err := zip.OpenReader(path) +func getHeaderFromSymLink(file string, fi os.FileInfo) (*tar.Header, error) { + target, err := os.Readlink(file) + if err != nil { + return nil, err + } - switch { - case err == nil: - r.Close() - return true, nil - case err == zip.ErrFormat: - return false, nil - default: - return false, err + // Ensure that symlinks have Linux link names, independent of source OS + header, err := tar.FileInfoHeader(fi, filepath.ToSlash(target)) + if err != nil { + return nil, err + } + return header, nil +} + +func hasModeSymLink(fi os.FileInfo) bool { + return fi.Mode()&os.ModeSymlink != 0 +} + +func hasModeSocket(fi os.FileInfo) fs.FileMode { + return fi.Mode() & os.ModeSocket +} + +func writeRootHeader(tw TarWriter, basePath string, mode int64, uid int, gid int, normalizeModTime bool) error { + rootHeader := &tar.Header{ + Typeflag: tar.TypeDir, + Name: basePath, + Mode: mode, + } + + finalizeHeader(rootHeader, uid, gid, mode, normalizeModTime) + + if err := tw.WriteHeader(rootHeader); err != nil { + return err + } + + return nil +} + +func modePermIfNegativeMode(mode int64) int64 { + if mode == -1 { + return int64(fs.ModePerm) } + return mode } diff --git a/pkg/archive/archive_test.go b/pkg/archive/archive_test.go index 6d93170888..05b864dd73 100644 --- a/pkg/archive/archive_test.go +++ b/pkg/archive/archive_test.go @@ -2,15 +2,12 @@ package archive_test import ( "archive/tar" - "io/ioutil" - "math/rand" "net" "os" "path/filepath" "runtime" "strings" "testing" - "time" "github.com/pkg/errors" @@ -25,7 +22,6 @@ import ( func TestArchive(t *testing.T) { color.Disable(true) defer color.Disable(false) - rand.Seed(time.Now().UTC().UnixNano()) spec.Run(t, "Archive", testArchive, spec.Sequential(), spec.Report(report.Terminal{})) } @@ -36,7 +32,7 @@ func testArchive(t *testing.T, when spec.G, it spec.S) { it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "create-tar-test") + tmpDir, err = os.MkdirTemp("", "create-tar-test") if err != nil { t.Fatalf("failed to create tmp dir %s: %s", tmpDir, err) } @@ -44,7 +40,10 @@ func testArchive(t *testing.T, when spec.G, it spec.S) { it.After(func() { if err := os.RemoveAll(tmpDir); err != nil { - t.Fatalf("failed to clean up tmp dir %s: %s", tmpDir, err) + if runtime.GOOS != "windows" { + // skip "The process cannot access the file because it is being used by another process" on windows + t.Fatalf("failed to clean up tmp dir %s: %s", tmpDir, err) + } } }) @@ -75,15 +74,6 @@ func testArchive(t *testing.T, when spec.G, it spec.S) { verify.NextDirectory("/nested/dir/dir-in-archive", int64(os.ModePerm)) }) }) - - it("returns error if closed multiple times", func() { - rc := archive.ReadDirAsTar(src, "/nested/dir/dir-in-archive", 1234, 2345, 0777, true, false, func(s string) bool { return false }) - tr := tar.NewReader(rc) - verify := h.NewTarVerifier(t, tr, 1234, 2345) - verify.NoMoreFilesExist() - h.AssertNil(t, rc.Close()) - h.AssertError(t, rc.Close(), "reader already closed") - }) }) when("#ReadZipAsTar", func() { @@ -112,7 +102,7 @@ func testArchive(t *testing.T, when spec.G, it spec.S) { tarFile *os.File ) it.Before(func() { - tarFile, err = ioutil.TempFile(tmpDir, "file.tgz") + tarFile, err = os.CreateTemp(tmpDir, "file.tgz") h.AssertNil(t, err) }) @@ -217,14 +207,14 @@ func testArchive(t *testing.T, when spec.G, it spec.S) { }) }) - when("includeRoot is true", func() { - it("sets metadata on base dest file", func() { + when("mode is set to 0755", func() { + it("writes a tar to the dest dir with 0755", func() { fh, err := os.Create(filepath.Join(tmpDir, "some.tar")) h.AssertNil(t, err) tw := tar.NewWriter(fh) - err = archive.WriteDirToTar(tw, src, "/nested/dir/dir-in-archive", 1234, 2345, 0777, true, true, nil) + err = archive.WriteDirToTar(tw, src, "/nested/dir/dir-in-archive", 1234, 2345, 0755, true, false, nil) h.AssertNil(t, err) h.AssertNil(t, tw.Close()) h.AssertNil(t, fh.Close()) @@ -236,18 +226,22 @@ func testArchive(t *testing.T, when spec.G, it spec.S) { tr := tar.NewReader(file) verify := h.NewTarVerifier(t, tr, 1234, 2345) - verify.NextDirectory("/nested/dir/dir-in-archive", int64(os.ModePerm)) + verify.NextFile("/nested/dir/dir-in-archive/some-file.txt", "some-content", 0755) + verify.NextDirectory("/nested/dir/dir-in-archive/sub-dir", 0755) + if runtime.GOOS != "windows" { + verify.NextSymLink("/nested/dir/dir-in-archive/sub-dir/link-file", "../some-file.txt") + } }) }) - when("mode is set to -1", func() { - it("writes a tar to the dest dir with preexisting file mode", func() { + when("includeRoot is true", func() { + it("writes a tar to the root dir with the provided mode", func() { fh, err := os.Create(filepath.Join(tmpDir, "some.tar")) h.AssertNil(t, err) tw := tar.NewWriter(fh) - err = archive.WriteDirToTar(tw, src, "/nested/dir/dir-in-archive", 1234, 2345, -1, true, false, nil) + err = archive.WriteDirToTar(tw, src, "/nested/dir/dir-in-archive", 1234, 2345, 0777, true, true, nil) h.AssertNil(t, err) h.AssertNil(t, tw.Close()) h.AssertNil(t, fh.Close()) @@ -259,11 +253,34 @@ func testArchive(t *testing.T, when spec.G, it spec.S) { tr := tar.NewReader(file) verify := h.NewTarVerifier(t, tr, 1234, 2345) - verify.NextFile("/nested/dir/dir-in-archive/some-file.txt", "some-content", fileMode(t, filepath.Join(src, "some-file.txt"))) - verify.NextDirectory("/nested/dir/dir-in-archive/sub-dir", fileMode(t, filepath.Join(src, "sub-dir"))) - if runtime.GOOS != "windows" { - verify.NextSymLink("/nested/dir/dir-in-archive/sub-dir/link-file", "../some-file.txt") - } + verify.NextDirectory("/nested/dir/dir-in-archive", int64(os.ModePerm)) + }) + when("mode is set to -1", func() { + it("writes a tar to the root dir with default (0777) dir mode", func() { + fh, err := os.Create(filepath.Join(tmpDir, "some.tar")) + h.AssertNil(t, err) + + tw := tar.NewWriter(fh) + + err = archive.WriteDirToTar(tw, src, "/nested/dir/dir-in-archive", 1234, 2345, -1, true, true, nil) + h.AssertNil(t, err) + h.AssertNil(t, tw.Close()) + h.AssertNil(t, fh.Close()) + + file, err := os.Open(filepath.Join(tmpDir, "some.tar")) + h.AssertNil(t, err) + defer file.Close() + + tr := tar.NewReader(file) + + verify := h.NewTarVerifier(t, tr, 1234, 2345) + verify.NextDirectory("/nested/dir/dir-in-archive", 0777) + verify.NextFile("/nested/dir/dir-in-archive/some-file.txt", "some-content", fileMode(t, filepath.Join(src, "some-file.txt"))) + verify.NextDirectory("/nested/dir/dir-in-archive/sub-dir", fileMode(t, filepath.Join(src, "sub-dir"))) + if runtime.GOOS != "windows" { + verify.NextSymLink("/nested/dir/dir-in-archive/sub-dir/link-file", "../some-file.txt") + } + }) }) }) @@ -375,7 +392,7 @@ func testArchive(t *testing.T, when spec.G, it spec.S) { ) it.Before(func() { - tmpSrcDir, err = ioutil.TempDir("", "socket-test") + tmpSrcDir, err = os.MkdirTemp("", "socket-test") h.AssertNil(t, err) fakeSocket, err = net.Listen( @@ -383,7 +400,7 @@ func testArchive(t *testing.T, when spec.G, it spec.S) { filepath.Join(tmpSrcDir, "fake-socket"), ) - err = ioutil.WriteFile(filepath.Join(tmpSrcDir, "fake-file"), []byte("some-content"), 0777) + err = os.WriteFile(filepath.Join(tmpSrcDir, "fake-file"), []byte("some-content"), 0777) h.AssertNil(t, err) }) @@ -419,6 +436,37 @@ func testArchive(t *testing.T, when spec.G, it spec.S) { }) }) }) + + when("hard link files are present", func() { + it.Before(func() { + src = filepath.Join("testdata", "dir-to-tar-with-hardlink") + // create a hard link + err := os.Link(filepath.Join(src, "original-file"), filepath.Join(src, "original-file-2")) + h.AssertNil(t, err) + }) + + it.After(func() { + os.RemoveAll(filepath.Join(src, "original-file-2")) + }) + + it("tar file file doesn't include duplicated data", func() { + outputFilename := filepath.Join(tmpDir, "file-with-hard-links.tar") + fh, err := os.Create(outputFilename) + h.AssertNil(t, err) + + tw := tar.NewWriter(fh) + err = archive.WriteDirToTar(tw, src, "/nested/dir", 1234, 2345, 0777, true, false, nil) + + h.AssertNil(t, err) + h.AssertNil(t, tw.Close()) + h.AssertNil(t, fh.Close()) + h.AssertOnTarEntries(t, outputFilename, + "/nested/dir/original-file", + "/nested/dir/original-file-2", + h.AreEquivalentHardLinks(), + ) + }) + }) }) when("#WriteZipToTar", func() { @@ -592,11 +640,11 @@ func testArchive(t *testing.T, when spec.G, it spec.S) { when("file is not a zip file", func() { when("file has some content", func() { it("returns false", func() { - file, err := ioutil.TempFile(tmpDir, "file.txt") + file, err := os.CreateTemp(tmpDir, "file.txt") h.AssertNil(t, err) defer file.Close() - err = ioutil.WriteFile(file.Name(), []byte("content"), os.ModePerm) + err = os.WriteFile(file.Name(), []byte("content"), os.ModePerm) h.AssertNil(t, err) isZip, err := archive.IsZip(file.Name()) @@ -607,7 +655,7 @@ func testArchive(t *testing.T, when spec.G, it spec.S) { when("file doesn't have content", func() { it("returns false", func() { - file, err := ioutil.TempFile(tmpDir, "file.txt") + file, err := os.CreateTemp(tmpDir, "file.txt") h.AssertNil(t, err) defer file.Close() diff --git a/pkg/archive/archive_unix.go b/pkg/archive/archive_unix.go new file mode 100644 index 0000000000..d48dd2714f --- /dev/null +++ b/pkg/archive/archive_unix.go @@ -0,0 +1,22 @@ +//go:build unix + +package archive + +import ( + "os" + "syscall" +) + +// hasHardlinks check if the given files has a hard-link associated with it +func hasHardlinks(fi os.FileInfo, path string) (bool, error) { + return fi.Sys().(*syscall.Stat_t).Nlink > 1, nil +} + +// getInodeFromStat returns the inode (index node) value associated with the given file +func getInodeFromStat(stat interface{}, path string) (inode uint64, err error) { + s, ok := stat.(*syscall.Stat_t) + if ok { + inode = s.Ino + } + return +} diff --git a/pkg/archive/archive_windows.go b/pkg/archive/archive_windows.go new file mode 100644 index 0000000000..3189363938 --- /dev/null +++ b/pkg/archive/archive_windows.go @@ -0,0 +1,68 @@ +//go:build windows + +package archive + +import ( + "os" + "syscall" + + "golang.org/x/sys/windows" +) + +// hasHardlinks returns true if the given file has hard-links associated with it +func hasHardlinks(fi os.FileInfo, path string) (bool, error) { + var numberOfLinks uint32 + switch v := fi.Sys().(type) { + case *syscall.ByHandleFileInformation: + numberOfLinks = v.NumberOfLinks + default: + // We need an instance of a ByHandleFileInformation to read NumberOfLinks + info, err := open(path) + if err != nil { + return false, err + } + numberOfLinks = info.NumberOfLinks + } + return numberOfLinks > 1, nil +} + +// getInodeFromStat returns an equivalent representation of unix inode on windows based on FileIndexHigh and FileIndexLow values +func getInodeFromStat(stat interface{}, path string) (inode uint64, err error) { + s, ok := stat.(*syscall.ByHandleFileInformation) + if ok { + inode = (uint64(s.FileIndexHigh) << 32) | uint64(s.FileIndexLow) + } else { + s, err = open(path) + if err == nil { + inode = (uint64(s.FileIndexHigh) << 32) | uint64(s.FileIndexLow) + } + } + return +} + +// open returns a ByHandleFileInformation object representation of the given file +func open(path string) (*syscall.ByHandleFileInformation, error) { + fPath, err := syscall.UTF16PtrFromString(path) + if err != nil { + return nil, err + } + + handle, err := syscall.CreateFile( + fPath, + windows.FILE_READ_ATTRIBUTES, + syscall.FILE_SHARE_READ|syscall.FILE_SHARE_WRITE|syscall.FILE_SHARE_DELETE, + nil, + syscall.OPEN_EXISTING, + syscall.FILE_FLAG_BACKUP_SEMANTICS, + 0) + if err != nil { + return nil, err + } + defer syscall.CloseHandle(handle) + + var info syscall.ByHandleFileInformation + if err = syscall.GetFileInformationByHandle(handle, &info); err != nil { + return nil, err + } + return &info, nil +} diff --git a/pkg/archive/tar_builder_test.go b/pkg/archive/tar_builder_test.go index 2674e21141..b1b2a5bf36 100644 --- a/pkg/archive/tar_builder_test.go +++ b/pkg/archive/tar_builder_test.go @@ -2,12 +2,9 @@ package archive_test import ( "archive/tar" - "io/ioutil" - "math/rand" "os" "path/filepath" "testing" - "time" "github.com/buildpacks/pack/pkg/archive" @@ -21,7 +18,6 @@ import ( func TestTarBuilder(t *testing.T) { color.Disable(true) defer color.Disable(false) - rand.Seed(time.Now().UTC().UnixNano()) spec.Run(t, "TarBuilder", testTarBuilder, spec.Sequential(), spec.Report(report.Terminal{})) } @@ -33,7 +29,7 @@ func testTarBuilder(t *testing.T, when spec.G, it spec.S) { it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "tar-builder-test") + tmpDir, err = os.MkdirTemp("", "tar-builder-test") h.AssertNil(t, err) tarBuilder = archive.TarBuilder{} }) diff --git a/pkg/archive/testdata/dir-to-tar-with-hardlink/original-file b/pkg/archive/testdata/dir-to-tar-with-hardlink/original-file new file mode 100644 index 0000000000..257cc5642c --- /dev/null +++ b/pkg/archive/testdata/dir-to-tar-with-hardlink/original-file @@ -0,0 +1 @@ +foo diff --git a/pkg/archive/umask_unix.go b/pkg/archive/umask_unix.go new file mode 100644 index 0000000000..bfbf75a582 --- /dev/null +++ b/pkg/archive/umask_unix.go @@ -0,0 +1,13 @@ +//go:build unix + +package archive + +import ( + "io/fs" + "syscall" +) + +func init() { + Umask = fs.FileMode(syscall.Umask(0)) + syscall.Umask(int(Umask)) +} diff --git a/pkg/blob/blob.go b/pkg/blob/blob.go index 1846435c4f..5919a303ef 100644 --- a/pkg/blob/blob.go +++ b/pkg/blob/blob.go @@ -58,7 +58,6 @@ func (b blob) Open() (r io.ReadCloser, err error) { defer fh.Close() return gzr.Close() }) - return rc, nil } diff --git a/pkg/blob/downloader.go b/pkg/blob/downloader.go index 1f5e9e5918..53c46e5a99 100644 --- a/pkg/blob/downloader.go +++ b/pkg/blob/downloader.go @@ -5,7 +5,6 @@ import ( "crypto/sha256" "fmt" "io" - "io/ioutil" "net/http" "net/url" "os" @@ -29,6 +28,14 @@ type Logger interface { Writer() io.Writer } +type DownloaderOption func(d *downloader) + +func WithClient(client *http.Client) DownloaderOption { + return func(d *downloader) { + d.client = client + } +} + type Downloader interface { Download(ctx context.Context, pathOrURI string) (Blob, error) } @@ -36,13 +43,21 @@ type Downloader interface { type downloader struct { logger Logger baseCacheDir string + client *http.Client } -func NewDownloader(logger Logger, baseCacheDir string) Downloader { - return &downloader{ +func NewDownloader(logger Logger, baseCacheDir string, opts ...DownloaderOption) Downloader { + d := &downloader{ logger: logger, baseCacheDir: baseCacheDir, + client: http.DefaultClient, } + + for _, opt := range opts { + opt(d) + } + + return d } func (d *downloader) Download(ctx context.Context, pathOrURI string) (Blob, error) { @@ -58,6 +73,10 @@ func (d *downloader) Download(ctx context.Context, pathOrURI string) (Blob, erro path, err = paths.URIToFilePath(pathOrURI) case "http", "https": path, err = d.handleHTTP(ctx, pathOrURI) + if err != nil { + // retry as we sometimes see `wsarecv: An existing connection was forcibly closed by the remote host.` on Windows + path, err = d.handleHTTP(ctx, pathOrURI) + } default: err = fmt.Errorf("unsupported protocol %s in URI %s", style.Symbol(parsedURL.Scheme), style.Symbol(pathOrURI)) } @@ -99,7 +118,7 @@ func (d *downloader) handleHTTP(ctx context.Context, uri string) (string, error) etag := "" if etagExists { - bytes, err := ioutil.ReadFile(filepath.Clean(etagFile)) + bytes, err := os.ReadFile(filepath.Clean(etagFile)) if err != nil { return "", err } @@ -125,7 +144,7 @@ func (d *downloader) handleHTTP(ctx context.Context, uri string) (string, error) return "", errors.Wrap(err, "writing cache") } - if err = ioutil.WriteFile(etagFile, []byte(etag), 0744); err != nil { + if err = os.WriteFile(etagFile, []byte(etag), 0744); err != nil { return "", errors.Wrap(err, "writing etag") } @@ -143,7 +162,7 @@ func (d *downloader) downloadAsStream(ctx context.Context, uri string, etag stri req.Header.Set("If-None-Match", etag) } - resp, err := (&http.Client{}).Do(req) //nolint:bodyclose + resp, err := d.client.Do(req) //nolint:bodyclose if err != nil { return nil, "", err } diff --git a/pkg/blob/downloader_test.go b/pkg/blob/downloader_test.go index 791473312a..e1bc7952cf 100644 --- a/pkg/blob/downloader_test.go +++ b/pkg/blob/downloader_test.go @@ -4,7 +4,6 @@ import ( "context" "fmt" "io" - "io/ioutil" "net/http" "os" "path/filepath" @@ -36,9 +35,9 @@ func testDownloader(t *testing.T, when spec.G, it spec.S) { ) it.Before(func() { - cacheDir, err = ioutil.TempDir("", "cache") + cacheDir, err = os.MkdirTemp("", "cache") h.AssertNil(t, err) - subject = blob.NewDownloader(&logger{ioutil.Discard}, cacheDir) + subject = blob.NewDownloader(&logger{io.Discard}, cacheDir) }) it.After(func() { @@ -142,6 +141,10 @@ func testDownloader(t *testing.T, when spec.G, it spec.S) { server.AppendHandlers(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(404) }) + + server.AppendHandlers(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(404) + }) }) it("should return error", func() { diff --git a/pkg/buildpack/build_module_info.go b/pkg/buildpack/build_module_info.go new file mode 100644 index 0000000000..1549579cd2 --- /dev/null +++ b/pkg/buildpack/build_module_info.go @@ -0,0 +1,68 @@ +package buildpack + +import ( + "strings" + + "github.com/pkg/errors" + + "github.com/buildpacks/pack/pkg/dist" +) + +type ModuleInfos interface { + BuildModule() []dist.ModuleInfo +} + +type FlattenModuleInfos interface { + FlattenModules() []ModuleInfos +} + +type flattenModules struct { + modules []ModuleInfos +} + +func (fl *flattenModules) FlattenModules() []ModuleInfos { + return fl.modules +} + +type buildModuleInfosImpl struct { + modules []dist.ModuleInfo +} + +func (b *buildModuleInfosImpl) BuildModule() []dist.ModuleInfo { + return b.modules +} + +func ParseFlattenBuildModules(buildpacksID []string) (FlattenModuleInfos, error) { + var buildModuleInfos []ModuleInfos + for _, ids := range buildpacksID { + modules, err := parseBuildpackName(ids) + if err != nil { + return nil, err + } + buildModuleInfos = append(buildModuleInfos, modules) + } + return &flattenModules{modules: buildModuleInfos}, nil +} + +func parseBuildpackName(names string) (ModuleInfos, error) { + var buildModuleInfos []dist.ModuleInfo + ids := strings.Split(names, ",") + for _, id := range ids { + if strings.Count(id, "@") != 1 { + return nil, errors.Errorf("invalid format %s; please use '@' to add buildpacks to be flattened", id) + } + bpFullName := strings.Split(id, "@") + idFromName := strings.TrimSpace(bpFullName[0]) + versionFromName := strings.TrimSpace(bpFullName[1]) + if idFromName == "" || versionFromName == "" { + return nil, errors.Errorf("invalid format %s; '' and '' must be specified", id) + } + + bpID := dist.ModuleInfo{ + ID: idFromName, + Version: versionFromName, + } + buildModuleInfos = append(buildModuleInfos, bpID) + } + return &buildModuleInfosImpl{modules: buildModuleInfos}, nil +} diff --git a/pkg/buildpack/build_module_info_test.go b/pkg/buildpack/build_module_info_test.go new file mode 100644 index 0000000000..2f1dd6760a --- /dev/null +++ b/pkg/buildpack/build_module_info_test.go @@ -0,0 +1,93 @@ +package buildpack_test + +import ( + "fmt" + "testing" + + "github.com/heroku/color" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + "github.com/buildpacks/pack/pkg/buildpack" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestBuildModuleInfo(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "BuildModuleInfo", testBuildModuleInfo, spec.Parallel(), spec.Report(report.Terminal{})) +} + +func testBuildModuleInfo(t *testing.T, when spec.G, it spec.S) { + when("#ParseFlattenBuildModules", func() { + when("buildpacksID have format @", func() { + var buildModules []string + when("one buildpackID is provided", func() { + it.Before(func() { + buildModules = []string{"some-buildpack@version-1"} + }) + + it("parses successfully", func() { + flattenModuleInfos, err := buildpack.ParseFlattenBuildModules(buildModules) + h.AssertNil(t, err) + h.AssertNotNil(t, flattenModuleInfos) + h.AssertTrue(t, len(flattenModuleInfos.FlattenModules()) == 1) + h.AssertEq(t, flattenModuleInfos.FlattenModules()[0].BuildModule()[0].ID, "some-buildpack") + h.AssertEq(t, flattenModuleInfos.FlattenModules()[0].BuildModule()[0].Version, "version-1") + }) + }) + + when("more than one buildpackID is provided", func() { + it.Before(func() { + buildModules = []string{"some-buildpack@version-1, another-buildpack@version-2"} + }) + + it("parses multiple buildpackIDs", func() { + flattenModuleInfos, err := buildpack.ParseFlattenBuildModules(buildModules) + h.AssertNil(t, err) + h.AssertNotNil(t, flattenModuleInfos) + h.AssertTrue(t, len(flattenModuleInfos.FlattenModules()) == 1) + h.AssertTrue(t, len(flattenModuleInfos.FlattenModules()[0].BuildModule()) == 2) + h.AssertEq(t, flattenModuleInfos.FlattenModules()[0].BuildModule()[0].ID, "some-buildpack") + h.AssertEq(t, flattenModuleInfos.FlattenModules()[0].BuildModule()[0].Version, "version-1") + h.AssertEq(t, flattenModuleInfos.FlattenModules()[0].BuildModule()[1].ID, "another-buildpack") + h.AssertEq(t, flattenModuleInfos.FlattenModules()[0].BuildModule()[1].Version, "version-2") + }) + }) + }) + + when("buildpacksID don't have format @", func() { + when("@ is missing", func() { + it("errors with a descriptive message", func() { + _, err := buildpack.ParseFlattenBuildModules([]string{"some-buildpack"}) + h.AssertNotNil(t, err) + h.AssertError(t, err, fmt.Sprintf("invalid format %s; please use '@' to add buildpacks to be flattened", "some-buildpack")) + }) + }) + + when(" is missing", func() { + it("errors with a descriptive message", func() { + _, err := buildpack.ParseFlattenBuildModules([]string{"some-buildpack@"}) + h.AssertNotNil(t, err) + h.AssertError(t, err, fmt.Sprintf("invalid format %s; '' and '' must be specified", "some-buildpack@")) + }) + }) + + when(" is missing", func() { + it("errors with a descriptive message", func() { + _, err := buildpack.ParseFlattenBuildModules([]string{"@version-1"}) + h.AssertNotNil(t, err) + h.AssertError(t, err, fmt.Sprintf("invalid format %s; '' and '' must be specified", "@version-1")) + }) + }) + + when("multiple @ are used", func() { + it("errors with a descriptive message", func() { + _, err := buildpack.ParseFlattenBuildModules([]string{"some-buildpack@@version-1"}) + h.AssertNotNil(t, err) + h.AssertError(t, err, fmt.Sprintf("invalid format %s; please use '@' to add buildpacks to be flattened", "some-buildpack@@version-1")) + }) + }) + }) + }) +} diff --git a/pkg/buildpack/builder.go b/pkg/buildpack/builder.go index da0f8226c5..c03dbb3d1e 100644 --- a/pkg/buildpack/builder.go +++ b/pkg/buildpack/builder.go @@ -3,12 +3,14 @@ package buildpack import ( "archive/tar" "compress/gzip" - "io/ioutil" + "fmt" + "io" "os" - - "github.com/buildpacks/imgutil/layer" + "path/filepath" + "strconv" "github.com/buildpacks/imgutil" + "github.com/buildpacks/imgutil/layer" v1 "github.com/google/go-containerregistry/pkg/v1" "github.com/google/go-containerregistry/pkg/v1/empty" "github.com/google/go-containerregistry/pkg/v1/layout" @@ -16,6 +18,8 @@ import ( "github.com/google/go-containerregistry/pkg/v1/tarball" "github.com/pkg/errors" + "github.com/buildpacks/pack/pkg/logging" + "github.com/buildpacks/pack/internal/stack" "github.com/buildpacks/pack/internal/style" "github.com/buildpacks/pack/pkg/archive" @@ -23,7 +27,7 @@ import ( ) type ImageFactory interface { - NewImage(repoName string, local bool, imageOS string) (imgutil.Image, error) + NewImage(repoName string, local bool, target dist.Target) (imgutil.Image, error) } type WorkableImage interface { @@ -35,6 +39,12 @@ type layoutImage struct { v1.Image } +type toAdd struct { + tarPath string + diffID string + module BuildModule +} + func (i *layoutImage) SetLabel(key string, val string) error { configFile, err := i.ConfigFile() if err != nil { @@ -61,37 +71,161 @@ func (i *layoutImage) AddLayerWithDiffID(path, _ string) error { return nil } +type PackageBuilderOption func(*options) error + +type options struct { + flatten bool + exclude []string + logger logging.Logger + factory archive.TarWriterFactory +} + type PackageBuilder struct { - buildpack Buildpack - dependencies []Buildpack - imageFactory ImageFactory + buildpack BuildModule + extension BuildModule + logger logging.Logger + layerWriterFactory archive.TarWriterFactory + dependencies ManagedCollection + imageFactory ImageFactory + flattenAllBuildpacks bool + flattenExcludeBuildpacks []string } // TODO: Rename to PackageBuilder -func NewBuilder(imageFactory ImageFactory) *PackageBuilder { +func NewBuilder(imageFactory ImageFactory, ops ...PackageBuilderOption) *PackageBuilder { + opts := &options{} + for _, op := range ops { + if err := op(opts); err != nil { + return nil + } + } + moduleManager := NewManagedCollectionV1(opts.flatten) return &PackageBuilder{ - imageFactory: imageFactory, + imageFactory: imageFactory, + dependencies: moduleManager, + flattenAllBuildpacks: opts.flatten, + flattenExcludeBuildpacks: opts.exclude, + logger: opts.logger, + layerWriterFactory: opts.factory, + } +} + +func FlattenAll() PackageBuilderOption { + return func(o *options) error { + o.flatten = true + return nil } } -func (b *PackageBuilder) SetBuildpack(buildpack Buildpack) { +func DoNotFlatten(exclude []string) PackageBuilderOption { + return func(o *options) error { + o.flatten = true + o.exclude = exclude + return nil + } +} + +func WithLogger(logger logging.Logger) PackageBuilderOption { + return func(o *options) error { + o.logger = logger + return nil + } +} + +func WithLayerWriterFactory(factory archive.TarWriterFactory) PackageBuilderOption { + return func(o *options) error { + o.factory = factory + return nil + } +} + +func (b *PackageBuilder) SetBuildpack(buildpack BuildModule) { b.buildpack = buildpack } +func (b *PackageBuilder) SetExtension(extension BuildModule) { + b.extension = extension +} -func (b *PackageBuilder) AddDependency(buildpack Buildpack) { - b.dependencies = append(b.dependencies, buildpack) +func (b *PackageBuilder) AddDependency(buildpack BuildModule) { + b.dependencies.AddModules(buildpack) +} + +func (b *PackageBuilder) AddDependencies(main BuildModule, dependencies []BuildModule) { + b.dependencies.AddModules(main, dependencies...) +} + +func (b *PackageBuilder) ShouldFlatten(module BuildModule) bool { + return b.flattenAllBuildpacks || (b.dependencies.ShouldFlatten(module)) +} + +func (b *PackageBuilder) FlattenedModules() [][]BuildModule { + return b.dependencies.FlattenedModules() +} + +func (b *PackageBuilder) AllModules() []BuildModule { + all := b.dependencies.ExplodedModules() + for _, modules := range b.dependencies.FlattenedModules() { + all = append(all, modules...) + } + return all } func (b *PackageBuilder) finalizeImage(image WorkableImage, tmpDir string) error { if err := dist.SetLabel(image, MetadataLabel, &Metadata{ - BuildpackInfo: b.buildpack.Descriptor().Info, - Stacks: b.resolvedStacks(), + ModuleInfo: b.buildpack.Descriptor().Info(), + Stacks: b.resolvedStacks(), }); err != nil { return err } - bpLayers := dist.BuildpackLayers{} - for _, bp := range append(b.dependencies, b.buildpack) { + collectionToAdd := map[string]toAdd{} + var individualBuildModules []BuildModule + + // Let's create the tarball for each flatten module + if len(b.FlattenedModules()) > 0 { + buildModuleWriter := NewBuildModuleWriter(b.logger, b.layerWriterFactory) + excludedModules := Set(b.flattenExcludeBuildpacks) + + var ( + finalTarPath string + err error + ) + for i, additionalModules := range b.FlattenedModules() { + modFlattenTmpDir := filepath.Join(tmpDir, fmt.Sprintf("buildpack-%s-flatten", strconv.Itoa(i))) + if err := os.MkdirAll(modFlattenTmpDir, os.ModePerm); err != nil { + return errors.Wrap(err, "creating flatten temp dir") + } + + if b.flattenAllBuildpacks { + // include the buildpack itself + additionalModules = append(additionalModules, b.buildpack) + } + finalTarPath, individualBuildModules, err = buildModuleWriter.NToLayerTar(modFlattenTmpDir, fmt.Sprintf("buildpack-flatten-%s", strconv.Itoa(i)), additionalModules, excludedModules) + if err != nil { + return errors.Wrapf(err, "adding layer %s", finalTarPath) + } + + diffID, err := dist.LayerDiffID(finalTarPath) + if err != nil { + return errors.Wrapf(err, "calculating diffID for layer %s", finalTarPath) + } + + for _, module := range additionalModules { + collectionToAdd[module.Descriptor().Info().FullName()] = toAdd{ + tarPath: finalTarPath, + diffID: diffID.String(), + module: module, + } + } + } + } + + if !b.flattenAllBuildpacks || len(b.FlattenedModules()) == 0 { + individualBuildModules = append(individualBuildModules, b.buildpack) + } + + // Let's create the tarball for each individual module + for _, bp := range append(b.dependencies.ExplodedModules(), individualBuildModules...) { bpLayerTar, err := ToLayerTar(tmpDir, bp) if err != nil { return err @@ -101,15 +235,37 @@ func (b *PackageBuilder) finalizeImage(image WorkableImage, tmpDir string) error if err != nil { return errors.Wrapf(err, "getting content hashes for buildpack %s", - style.Symbol(bp.Descriptor().Info.FullName()), + style.Symbol(bp.Descriptor().Info().FullName()), ) } + collectionToAdd[bp.Descriptor().Info().FullName()] = toAdd{ + tarPath: bpLayerTar, + diffID: diffID.String(), + module: bp, + } + } - if err := image.AddLayerWithDiffID(bpLayerTar, diffID.String()); err != nil { - return errors.Wrapf(err, "adding layer tar for buildpack %s", style.Symbol(bp.Descriptor().Info.FullName())) + bpLayers := dist.ModuleLayers{} + diffIDAdded := map[string]string{} + + for key := range collectionToAdd { + module := collectionToAdd[key] + bp := module.module + addLayer := true + if b.ShouldFlatten(bp) { + if _, ok := diffIDAdded[module.diffID]; !ok { + diffIDAdded[module.diffID] = module.tarPath + } else { + addLayer = false + } + } + if addLayer { + if err := image.AddLayerWithDiffID(module.tarPath, module.diffID); err != nil { + return errors.Wrapf(err, "adding layer tar for buildpack %s", style.Symbol(bp.Descriptor().Info().FullName())) + } } - dist.AddBuildpackToLayersMD(bpLayers, bp.Descriptor(), diffID.String()) + dist.AddToLayersMD(bpLayers, bp.Descriptor(), module.diffID) } if err := dist.SetLabel(image, dist.BuildpackLayersLabel, bpLayers); err != nil { @@ -119,58 +275,123 @@ func (b *PackageBuilder) finalizeImage(image WorkableImage, tmpDir string) error return nil } -func (b *PackageBuilder) validate() error { - if b.buildpack == nil { - return errors.New("buildpack must be set") +func (b *PackageBuilder) finalizeExtensionImage(image WorkableImage, tmpDir string) error { + if err := dist.SetLabel(image, MetadataLabel, &Metadata{ + ModuleInfo: b.extension.Descriptor().Info(), + }); err != nil { + return err } - if err := validateBuildpacks(b.buildpack, b.dependencies); err != nil { + exLayers := dist.ModuleLayers{} + exLayerTar, err := ToLayerTar(tmpDir, b.extension) + if err != nil { return err } - if len(b.resolvedStacks()) == 0 { - return errors.Errorf("no compatible stacks among provided buildpacks") + diffID, err := dist.LayerDiffID(exLayerTar) + if err != nil { + return errors.Wrapf(err, + "getting content hashes for extension %s", + style.Symbol(b.extension.Descriptor().Info().FullName()), + ) + } + + if err := image.AddLayerWithDiffID(exLayerTar, diffID.String()); err != nil { + return errors.Wrapf(err, "adding layer tar for extension %s", style.Symbol(b.extension.Descriptor().Info().FullName())) + } + + dist.AddToLayersMD(exLayers, b.extension.Descriptor(), diffID.String()) + + if err := dist.SetLabel(image, dist.ExtensionLayersLabel, exLayers); err != nil { + return err + } + + return nil +} + +func (b *PackageBuilder) validate() error { + if b.buildpack == nil && b.extension == nil { + return errors.New("buildpack or extension must be set") + } + + // we don't need to validate extensions because there are no order or stacks in extensions + if b.buildpack != nil && b.extension == nil { + if err := validateBuildpacks(b.buildpack, b.AllModules()); err != nil { + return err + } + + if len(b.resolvedStacks()) == 0 { + return errors.Errorf("no compatible stacks among provided buildpacks") + } } return nil } func (b *PackageBuilder) resolvedStacks() []dist.Stack { - stacks := b.buildpack.Descriptor().Stacks - for _, bp := range b.dependencies { + stacks := b.buildpack.Descriptor().Stacks() + if len(stacks) == 0 && len(b.buildpack.Descriptor().Order()) == 0 { + // For non-meta-buildpacks using targets, not stacks: assume any stack + stacks = append(stacks, dist.Stack{ID: "*"}) + } + for _, bp := range b.AllModules() { bpd := bp.Descriptor() + bpdStacks := bp.Descriptor().Stacks() + if len(bpdStacks) == 0 && len(bpd.Order()) == 0 { + // For non-meta-buildpacks using targets, not stacks: assume any stack + bpdStacks = append(bpdStacks, dist.Stack{ID: "*"}) + } if len(stacks) == 0 { - stacks = bpd.Stacks - } else if len(bpd.Stacks) > 0 { // skip over "meta-buildpacks" - stacks = stack.MergeCompatible(stacks, bpd.Stacks) + stacks = bpdStacks + } else if len(bpdStacks) > 0 { // skip over "meta-buildpacks" + stacks = stack.MergeCompatible(stacks, bpdStacks) } } return stacks } -func (b *PackageBuilder) SaveAsFile(path, imageOS string) error { +func (b *PackageBuilder) SaveAsFile(path string, target dist.Target, labels map[string]string) error { if err := b.validate(); err != nil { return err } - layoutImage, err := newLayoutImage(imageOS) + layoutImage, err := newLayoutImage(target) if err != nil { return errors.Wrap(err, "creating layout image") } - tmpDir, err := ioutil.TempDir("", "package-buildpack") + for labelKey, labelValue := range labels { + err = layoutImage.SetLabel(labelKey, labelValue) + if err != nil { + return errors.Wrapf(err, "adding label %s=%s", labelKey, labelValue) + } + } + + tempDirName := "" + if b.buildpack != nil { + tempDirName = "package-buildpack" + } else if b.extension != nil { + tempDirName = "extension-buildpack" + } + + tmpDir, err := os.MkdirTemp("", tempDirName) if err != nil { return err } defer os.RemoveAll(tmpDir) - if err := b.finalizeImage(layoutImage, tmpDir); err != nil { - return err + if b.buildpack != nil { + if err := b.finalizeImage(layoutImage, tmpDir); err != nil { + return err + } + } else if b.extension != nil { + if err := b.finalizeExtensionImage(layoutImage, tmpDir); err != nil { + return err + } } - - layoutDir, err := ioutil.TempDir(tmpDir, "oci-layout") + layoutDir, err := os.MkdirTemp(tmpDir, "oci-layout") if err != nil { return errors.Wrap(err, "creating oci-layout temp dir") } @@ -196,7 +417,7 @@ func (b *PackageBuilder) SaveAsFile(path, imageOS string) error { return archive.WriteDirToTar(tw, layoutDir, "/", 0, 0, 0755, true, false, nil) } -func newLayoutImage(imageOS string) (*layoutImage, error) { +func newLayoutImage(target dist.Target) (*layoutImage, error) { i := empty.Image configFile, err := i.ConfigFile() @@ -204,19 +425,20 @@ func newLayoutImage(imageOS string) (*layoutImage, error) { return nil, err } - configFile.OS = imageOS + configFile.OS = target.OS + configFile.Architecture = target.Arch i, err = mutate.ConfigFile(i, configFile) if err != nil { return nil, err } - if imageOS == "windows" { - baseLayerReader, err := layer.WindowsBaseLayer() - if err != nil { - return nil, err + if target.OS == "windows" { + opener := func() (io.ReadCloser, error) { + reader, err := layer.WindowsBaseLayer() + return io.NopCloser(reader), err } - baseLayer, err := tarball.LayerFromReader(baseLayerReader, tarball.WithCompressionLevel(gzip.DefaultCompression)) + baseLayer, err := tarball.LayerFromOpener(opener, tarball.WithCompressionLevel(gzip.DefaultCompression)) if err != nil { return nil, err } @@ -230,53 +452,81 @@ func newLayoutImage(imageOS string) (*layoutImage, error) { return &layoutImage{Image: i}, nil } -func (b *PackageBuilder) SaveAsImage(repoName string, publish bool, imageOS string) (imgutil.Image, error) { +func (b *PackageBuilder) SaveAsImage(repoName string, publish bool, target dist.Target, labels map[string]string, additionalTags ...string) (imgutil.Image, error) { if err := b.validate(); err != nil { return nil, err } - image, err := b.imageFactory.NewImage(repoName, !publish, imageOS) + image, err := b.imageFactory.NewImage(repoName, !publish, target) if err != nil { return nil, errors.Wrapf(err, "creating image") } - tmpDir, err := ioutil.TempDir("", "package-buildpack") + for labelKey, labelValue := range labels { + err = image.SetLabel(labelKey, labelValue) + if err != nil { + return nil, errors.Wrapf(err, "adding label %s=%s", labelKey, labelValue) + } + } + + tempDirName := "" + if b.buildpack != nil { + tempDirName = "package-buildpack" + } else if b.extension != nil { + tempDirName = "extension-buildpack" + } + + tmpDir, err := os.MkdirTemp("", tempDirName) if err != nil { return nil, err } defer os.RemoveAll(tmpDir) - - if err := b.finalizeImage(image, tmpDir); err != nil { - return nil, err + if b.buildpack != nil { + if err := b.finalizeImage(image, tmpDir); err != nil { + return nil, err + } + } else if b.extension != nil { + if err := b.finalizeExtensionImage(image, tmpDir); err != nil { + return nil, err + } } - if err := image.Save(); err != nil { + if err := image.Save(additionalTags...); err != nil { return nil, err } return image, nil } -func validateBuildpacks(mainBP Buildpack, depBPs []Buildpack) error { - depsWithRefs := map[string][]dist.BuildpackInfo{} +func validateBuildpacks(mainBP BuildModule, depBPs []BuildModule) error { + depsWithRefs := map[string][]dist.ModuleInfo{} for _, bp := range depBPs { - depsWithRefs[bp.Descriptor().Info.FullName()] = nil + depsWithRefs[bp.Descriptor().Info().FullName()] = nil } - for _, bp := range append([]Buildpack{mainBP}, depBPs...) { // List of everything + for _, bp := range append([]BuildModule{mainBP}, depBPs...) { // List of everything bpd := bp.Descriptor() - for _, orderEntry := range bpd.Order { + for _, orderEntry := range bpd.Order() { for _, groupEntry := range orderEntry.Group { - if _, ok := depsWithRefs[groupEntry.BuildpackInfo.FullName()]; !ok { + bpFullName, err := groupEntry.FullNameWithVersion() + if err != nil { + return errors.Wrapf( + err, + "buildpack %s must specify a version when referencing buildpack %s", + style.Symbol(bpd.Info().FullName()), + style.Symbol(bpFullName), + ) + } + if _, ok := depsWithRefs[bpFullName]; !ok { return errors.Errorf( "buildpack %s references buildpack %s which is not present", - style.Symbol(bpd.Info.FullName()), - style.Symbol(groupEntry.FullName()), + style.Symbol(bpd.Info().FullName()), + style.Symbol(bpFullName), ) } - depsWithRefs[groupEntry.BuildpackInfo.FullName()] = append(depsWithRefs[groupEntry.BuildpackInfo.FullName()], bpd.Info) + depsWithRefs[bpFullName] = append(depsWithRefs[bpFullName], bpd.Info()) } } } @@ -286,7 +536,7 @@ func validateBuildpacks(mainBP Buildpack, depBPs []Buildpack) error { return errors.Errorf( "buildpack %s is not used by buildpack %s", style.Symbol(bp), - style.Symbol(mainBP.Descriptor().Info.FullName()), + style.Symbol(mainBP.Descriptor().Info().FullName()), ) } } diff --git a/pkg/buildpack/builder_test.go b/pkg/buildpack/builder_test.go index b81fe82af9..ece0126020 100644 --- a/pkg/buildpack/builder_test.go +++ b/pkg/buildpack/builder_test.go @@ -2,16 +2,19 @@ package buildpack_test import ( "archive/tar" + "bytes" "compress/gzip" "encoding/json" "fmt" "io" - "io/ioutil" "os" "path" "path/filepath" + "slices" "testing" + "github.com/pkg/errors" + "github.com/buildpacks/imgutil/fakes" "github.com/buildpacks/imgutil/layer" "github.com/buildpacks/lifecycle/api" @@ -22,6 +25,9 @@ import ( "github.com/sclevine/spec" "github.com/sclevine/spec/report" + "github.com/buildpacks/pack/pkg/archive" + "github.com/buildpacks/pack/pkg/logging" + ifakes "github.com/buildpacks/pack/internal/fakes" "github.com/buildpacks/pack/pkg/buildpack" "github.com/buildpacks/pack/pkg/dist" @@ -50,14 +56,14 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { if expectedImageOS != "" { fakePackageImage := fakes.NewImage("some/package", "", nil) - imageFactory.EXPECT().NewImage("some/package", true, expectedImageOS).Return(fakePackageImage, nil).MaxTimes(1) + imageFactory.EXPECT().NewImage("some/package", true, dist.Target{OS: expectedImageOS}).Return(fakePackageImage, nil).MaxTimes(1) } return imageFactory } var err error - tmpDir, err = ioutil.TempDir("", "package_builder_tests") + tmpDir, err = os.MkdirTemp("", "package_builder_tests") h.AssertNil(t, err) }) @@ -67,24 +73,27 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { }) when("validation", func() { + linux := dist.Target{OS: "linux"} + windows := dist.Target{OS: "windows"} + for _, _test := range []*struct { name string expectedImageOS string fn func(*buildpack.PackageBuilder) error }{ {name: "SaveAsImage", expectedImageOS: "linux", fn: func(builder *buildpack.PackageBuilder) error { - _, err := builder.SaveAsImage("some/package", false, "linux") + _, err := builder.SaveAsImage("some/package", false, linux, map[string]string{}) return err }}, {name: "SaveAsImage", expectedImageOS: "windows", fn: func(builder *buildpack.PackageBuilder) error { - _, err := builder.SaveAsImage("some/package", false, "windows") + _, err := builder.SaveAsImage("some/package", false, windows, map[string]string{}) return err }}, {name: "SaveAsFile", expectedImageOS: "linux", fn: func(builder *buildpack.PackageBuilder) error { - return builder.SaveAsFile(path.Join(tmpDir, "package.cnb"), "linux") + return builder.SaveAsFile(path.Join(tmpDir, "package.cnb"), linux, map[string]string{}) }}, {name: "SaveAsFile", expectedImageOS: "windows", fn: func(builder *buildpack.PackageBuilder) error { - return builder.SaveAsFile(path.Join(tmpDir, "package.cnb"), "windows") + return builder.SaveAsFile(path.Join(tmpDir, "package.cnb"), windows, map[string]string{}) }}, } { // always use copies to avoid stale refs @@ -100,19 +109,19 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { it("returns error", func() { builder := buildpack.NewBuilder(mockImageFactory(expectedImageOS)) err := testFn(builder) - h.AssertError(t, err, "buildpack must be set") + h.AssertError(t, err, "buildpack or extension must be set") }) }) when("there is a buildpack not referenced", func() { it("should error", func() { bp1, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "bp.1.id", Version: "bp.1.version", }, - Stacks: []dist.Stack{{ID: "some.stack"}}, + WithStacks: []dist.Stack{{ID: "some.stack"}}, }, 0644) h.AssertNil(t, err) @@ -120,10 +129,10 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { builder.SetBuildpack(bp1) bp2, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.2.id", Version: "bp.2.version"}, - Stacks: []dist.Stack{{ID: "some.stack"}}, - Order: nil, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.2.id", Version: "bp.2.version"}, + WithStacks: []dist.Stack{{ID: "some.stack"}}, + WithOrder: nil, }, 0644) h.AssertNil(t, err) builder.AddDependency(bp2) @@ -136,15 +145,15 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { when("there is a referenced buildpack from main buildpack that is not present", func() { it("should error", func() { mainBP, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "bp.1.id", Version: "bp.1.version", }, - Order: dist.Order{{ - Group: []dist.BuildpackRef{ - {BuildpackInfo: dist.BuildpackInfo{ID: "bp.present.id", Version: "bp.present.version"}}, - {BuildpackInfo: dist.BuildpackInfo{ID: "bp.missing.id", Version: "bp.missing.version"}}, + WithOrder: dist.Order{{ + Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "bp.present.id", Version: "bp.present.version"}}, + {ModuleInfo: dist.ModuleInfo{ID: "bp.missing.id", Version: "bp.missing.version"}}, }, }}, }, 0644) @@ -154,10 +163,10 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { builder.SetBuildpack(mainBP) presentBP, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.present.id", Version: "bp.present.version"}, - Stacks: []dist.Stack{{ID: "some.stack"}}, - Order: nil, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.present.id", Version: "bp.present.version"}, + WithStacks: []dist.Stack{{ID: "some.stack"}}, + WithOrder: nil, }, 0644) h.AssertNil(t, err) builder.AddDependency(presentBP) @@ -170,14 +179,14 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { when("there is a referenced buildpack from dependency buildpack that is not present", func() { it("should error", func() { mainBP, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "bp.1.id", Version: "bp.1.version", }, - Order: dist.Order{{ - Group: []dist.BuildpackRef{ - {BuildpackInfo: dist.BuildpackInfo{ID: "bp.present.id", Version: "bp.present.version"}}, + WithOrder: dist.Order{{ + Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "bp.present.id", Version: "bp.present.version"}}, }, }}, }, 0644) @@ -186,11 +195,11 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { builder.SetBuildpack(mainBP) presentBP, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.present.id", Version: "bp.present.version"}, - Order: dist.Order{{ - Group: []dist.BuildpackRef{ - {BuildpackInfo: dist.BuildpackInfo{ID: "bp.missing.id", Version: "bp.missing.version"}}, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.present.id", Version: "bp.present.version"}, + WithOrder: dist.Order{{ + Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "bp.missing.id", Version: "bp.missing.version"}}, }, }}, }, 0644) @@ -201,21 +210,75 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { h.AssertError(t, err, "buildpack 'bp.present.id@bp.present.version' references buildpack 'bp.missing.id@bp.missing.version' which is not present") }) }) + + when("there is a referenced buildpack from dependency buildpack that does not have proper version defined", func() { + it("should error", func() { + mainBP, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ + ID: "bp.1.id", + Version: "bp.1.version", + }, + WithOrder: dist.Order{{ + Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "bp.present.id", Version: "bp.present.version"}}, + }, + }}, + }, 0644) + h.AssertNil(t, err) + builder := buildpack.NewBuilder(mockImageFactory(expectedImageOS)) + builder.SetBuildpack(mainBP) + + presentBP, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.present.id", Version: "bp.present.version"}, + WithOrder: dist.Order{{ + Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "bp.missing.id"}}, + }, + }}, + }, 0644) + h.AssertNil(t, err) + builder.AddDependency(presentBP) + + err = testFn(builder) + h.AssertError(t, err, "buildpack 'bp.present.id@bp.present.version' must specify a version when referencing buildpack 'bp.missing.id'") + }) + }) }) when("validate stacks", func() { + when("buildpack does not define stacks", func() { + it("should succeed", func() { + bp, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.10"), + WithInfo: dist.ModuleInfo{ + ID: "bp.1.id", + Version: "bp.1.version", + }, + WithStacks: nil, + WithOrder: nil, + }, 0644) + h.AssertNil(t, err) + builder := buildpack.NewBuilder(mockImageFactory(expectedImageOS)) + builder.SetBuildpack(bp) + err = testFn(builder) + h.AssertNil(t, err) + }) + }) + when("buildpack is meta-buildpack", func() { it("should succeed", func() { bp, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "bp.1.id", Version: "bp.1.version", }, - Stacks: nil, - Order: dist.Order{{ - Group: []dist.BuildpackRef{ - {BuildpackInfo: dist.BuildpackInfo{ID: "bp.nested.id", Version: "bp.nested.version"}}, + WithStacks: nil, + WithOrder: dist.Order{{ + Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "bp.nested.id", Version: "bp.nested.version"}}, }, }}, }, 0644) @@ -225,15 +288,15 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { builder.SetBuildpack(bp) dependency, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "bp.nested.id", Version: "bp.nested.version", }, - Stacks: []dist.Stack{ + WithStacks: []dist.Stack{ {ID: "stack.id.1", Mixins: []string{"Mixin-A"}}, }, - Order: nil, + WithOrder: nil, }, 0644) h.AssertNil(t, err) @@ -247,18 +310,18 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { when("dependencies don't have a common stack", func() { it("should error", func() { bp, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "bp.1.id", Version: "bp.1.version", }, - Order: dist.Order{{ - Group: []dist.BuildpackRef{{ - BuildpackInfo: dist.BuildpackInfo{ID: "bp.2.id", Version: "bp.2.version"}, - Optional: false, + WithOrder: dist.Order{{ + Group: []dist.ModuleRef{{ + ModuleInfo: dist.ModuleInfo{ID: "bp.2.id", Version: "bp.2.version"}, + Optional: false, }, { - BuildpackInfo: dist.BuildpackInfo{ID: "bp.3.id", Version: "bp.3.version"}, - Optional: false, + ModuleInfo: dist.ModuleInfo{ID: "bp.3.id", Version: "bp.3.version"}, + Optional: false, }}, }}, }, 0644) @@ -268,30 +331,30 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { builder.SetBuildpack(bp) dependency1, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "bp.2.id", Version: "bp.2.version", }, - Stacks: []dist.Stack{ + WithStacks: []dist.Stack{ {ID: "stack.id.1", Mixins: []string{"Mixin-A"}}, {ID: "stack.id.2", Mixins: []string{"Mixin-A"}}, }, - Order: nil, + WithOrder: nil, }, 0644) h.AssertNil(t, err) builder.AddDependency(dependency1) dependency2, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "bp.3.id", Version: "bp.3.version", }, - Stacks: []dist.Stack{ + WithStacks: []dist.Stack{ {ID: "stack.id.3", Mixins: []string{"Mixin-A"}}, }, - Order: nil, + WithOrder: nil, }, 0644) h.AssertNil(t, err) builder.AddDependency(dependency2) @@ -304,18 +367,18 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { when("dependency has stacks that aren't supported by buildpack", func() { it("should only support common stacks", func() { bp, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "bp.1.id", Version: "bp.1.version", }, - Order: dist.Order{{ - Group: []dist.BuildpackRef{{ - BuildpackInfo: dist.BuildpackInfo{ID: "bp.2.id", Version: "bp.2.version"}, - Optional: false, + WithOrder: dist.Order{{ + Group: []dist.ModuleRef{{ + ModuleInfo: dist.ModuleInfo{ID: "bp.2.id", Version: "bp.2.version"}, + Optional: false, }, { - BuildpackInfo: dist.BuildpackInfo{ID: "bp.3.id", Version: "bp.3.version"}, - Optional: false, + ModuleInfo: dist.ModuleInfo{ID: "bp.3.id", Version: "bp.3.version"}, + Optional: false, }}, }}, }, 0644) @@ -325,35 +388,35 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { builder.SetBuildpack(bp) dependency1, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "bp.2.id", Version: "bp.2.version", }, - Stacks: []dist.Stack{ + WithStacks: []dist.Stack{ {ID: "stack.id.1", Mixins: []string{"Mixin-A"}}, {ID: "stack.id.2", Mixins: []string{"Mixin-A"}}, }, - Order: nil, + WithOrder: nil, }, 0644) h.AssertNil(t, err) builder.AddDependency(dependency1) dependency2, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "bp.3.id", Version: "bp.3.version", }, - Stacks: []dist.Stack{ + WithStacks: []dist.Stack{ {ID: "stack.id.1", Mixins: []string{"Mixin-A"}}, }, - Order: nil, + WithOrder: nil, }, 0644) h.AssertNil(t, err) builder.AddDependency(dependency2) - img, err := builder.SaveAsImage("some/package", false, expectedImageOS) + img, err := builder.SaveAsImage("some/package", false, dist.Target{OS: expectedImageOS}, map[string]string{}) h.AssertNil(t, err) metadata := buildpack.Metadata{} @@ -367,18 +430,18 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { when("dependency has wildcard stacks", func() { it("should support all the possible stacks", func() { bp, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "bp.1.id", Version: "bp.1.version", }, - Order: dist.Order{{ - Group: []dist.BuildpackRef{{ - BuildpackInfo: dist.BuildpackInfo{ID: "bp.2.id", Version: "bp.2.version"}, - Optional: false, + WithOrder: dist.Order{{ + Group: []dist.ModuleRef{{ + ModuleInfo: dist.ModuleInfo{ID: "bp.2.id", Version: "bp.2.version"}, + Optional: false, }, { - BuildpackInfo: dist.BuildpackInfo{ID: "bp.3.id", Version: "bp.3.version"}, - Optional: false, + ModuleInfo: dist.ModuleInfo{ID: "bp.3.id", Version: "bp.3.version"}, + Optional: false, }}, }}, }, 0644) @@ -388,34 +451,34 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { builder.SetBuildpack(bp) dependency1, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "bp.2.id", Version: "bp.2.version", }, - Stacks: []dist.Stack{ + WithStacks: []dist.Stack{ {ID: "*", Mixins: []string{"Mixin-A"}}, }, - Order: nil, + WithOrder: nil, }, 0644) h.AssertNil(t, err) builder.AddDependency(dependency1) dependency2, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "bp.3.id", Version: "bp.3.version", }, - Stacks: []dist.Stack{ + WithStacks: []dist.Stack{ {ID: "stack.id.1", Mixins: []string{"Mixin-A"}}, }, - Order: nil, + WithOrder: nil, }, 0644) h.AssertNil(t, err) builder.AddDependency(dependency2) - img, err := builder.SaveAsImage("some/package", false, expectedImageOS) + img, err := builder.SaveAsImage("some/package", false, dist.Target{OS: expectedImageOS}, map[string]string{}) h.AssertNil(t, err) metadata := buildpack.Metadata{} @@ -429,15 +492,15 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { when("dependency is meta-buildpack", func() { it("should succeed and compute common stacks", func() { bp, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "bp.1.id", Version: "bp.1.version", }, - Stacks: nil, - Order: dist.Order{{ - Group: []dist.BuildpackRef{ - {BuildpackInfo: dist.BuildpackInfo{ID: "bp.nested.id", Version: "bp.nested.version"}}, + WithStacks: nil, + WithOrder: dist.Order{{ + Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "bp.nested.id", Version: "bp.nested.version"}}, }, }}, }, 0644) @@ -447,14 +510,14 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { builder.SetBuildpack(bp) dependencyOrder, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "bp.nested.id", Version: "bp.nested.version", }, - Order: dist.Order{{ - Group: []dist.BuildpackRef{ - {BuildpackInfo: dist.BuildpackInfo{ + WithOrder: dist.Order{{ + Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ ID: "bp.nested.nested.id", Version: "bp.nested.nested.version", }}, @@ -466,21 +529,21 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { builder.AddDependency(dependencyOrder) dependencyNestedNested, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "bp.nested.nested.id", Version: "bp.nested.nested.version", }, - Stacks: []dist.Stack{ + WithStacks: []dist.Stack{ {ID: "stack.id.1", Mixins: []string{"Mixin-A"}}, }, - Order: nil, + WithOrder: nil, }, 0644) h.AssertNil(t, err) builder.AddDependency(dependencyNestedNested) - img, err := builder.SaveAsImage("some/package", false, expectedImageOS) + img, err := builder.SaveAsImage("some/package", false, dist.Target{OS: expectedImageOS}, map[string]string{}) h.AssertNil(t, err) metadata := buildpack.Metadata{} @@ -499,8 +562,8 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { when("#SaveAsImage", func() { it("sets metadata", func() { buildpack1, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ ID: "bp.1.id", Version: "bp.1.version", Name: "One", @@ -514,18 +577,20 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { }, }, }, - Stacks: []dist.Stack{ + WithStacks: []dist.Stack{ {ID: "stack.id.1"}, {ID: "stack.id.2"}, }, - Order: nil, + WithOrder: nil, }, 0644) h.AssertNil(t, err) builder := buildpack.NewBuilder(mockImageFactory("linux")) builder.SetBuildpack(buildpack1) - packageImage, err := builder.SaveAsImage("some/package", false, "linux") + var customLabels = map[string]string{"test.label.one": "1", "test.label.two": "2"} + + packageImage, err := builder.SaveAsImage("some/package", false, dist.Target{OS: "linux"}, customLabels) h.AssertNil(t, err) labelData, err := packageImage.Label("io.buildpacks.buildpackage.metadata") @@ -548,24 +613,71 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { osVal, err := packageImage.OS() h.AssertNil(t, err) h.AssertEq(t, osVal, "linux") + + imageLabels, err := packageImage.Labels() + h.AssertNil(t, err) + h.AssertEq(t, imageLabels["test.label.one"], "1") + h.AssertEq(t, imageLabels["test.label.two"], "2") + }) + + it("sets extension metadata", func() { + extension1, err := ifakes.NewFakeExtension(dist.ExtensionDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ + ID: "ex.1.id", + Version: "ex.1.version", + Name: "One", + Description: "some description", + Homepage: "https://example.com/homepage", + Keywords: []string{"some-keyword"}, + Licenses: []dist.License{ + { + Type: "MIT", + URI: "https://example.com/license", + }, + }, + }, + }, 0644) + h.AssertNil(t, err) + builder := buildpack.NewBuilder(mockImageFactory("linux")) + builder.SetExtension(extension1) + packageImage, err := builder.SaveAsImage("some/package", false, dist.Target{OS: "linux"}, map[string]string{}) + h.AssertNil(t, err) + labelData, err := packageImage.Label("io.buildpacks.buildpackage.metadata") + h.AssertNil(t, err) + var md buildpack.Metadata + h.AssertNil(t, json.Unmarshal([]byte(labelData), &md)) + + h.AssertEq(t, md.ID, "ex.1.id") + h.AssertEq(t, md.Version, "ex.1.version") + h.AssertEq(t, md.Keywords[0], "some-keyword") + h.AssertEq(t, md.Homepage, "https://example.com/homepage") + h.AssertEq(t, md.Name, "One") + h.AssertEq(t, md.Description, "some description") + h.AssertEq(t, md.Licenses[0].Type, "MIT") + h.AssertEq(t, md.Licenses[0].URI, "https://example.com/license") + + osVal, err := packageImage.OS() + h.AssertNil(t, err) + h.AssertEq(t, osVal, "linux") }) it("sets buildpack layers label", func() { buildpack1, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.1.id", Version: "bp.1.version"}, - Stacks: []dist.Stack{{ID: "stack.id.1"}, {ID: "stack.id.2"}}, - Order: nil, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.1.id", Version: "bp.1.version"}, + WithStacks: []dist.Stack{{ID: "stack.id.1"}, {ID: "stack.id.2"}}, + WithOrder: nil, }, 0644) h.AssertNil(t, err) builder := buildpack.NewBuilder(mockImageFactory("linux")) builder.SetBuildpack(buildpack1) - packageImage, err := builder.SaveAsImage("some/package", false, "linux") + packageImage, err := builder.SaveAsImage("some/package", false, dist.Target{OS: "linux"}, map[string]string{}) h.AssertNil(t, err) - var bpLayers dist.BuildpackLayers + var bpLayers dist.ModuleLayers _, err = dist.GetLabel(packageImage, "io.buildpacks.buildpack.layers", &bpLayers) h.AssertNil(t, err) @@ -576,17 +688,17 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { it("adds buildpack layers for linux", func() { buildpack1, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.1.id", Version: "bp.1.version"}, - Stacks: []dist.Stack{{ID: "stack.id.1"}, {ID: "stack.id.2"}}, - Order: nil, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.1.id", Version: "bp.1.version"}, + WithStacks: []dist.Stack{{ID: "stack.id.1"}, {ID: "stack.id.2"}}, + WithOrder: nil, }, 0644) h.AssertNil(t, err) builder := buildpack.NewBuilder(mockImageFactory("linux")) builder.SetBuildpack(buildpack1) - packageImage, err := builder.SaveAsImage("some/package", false, "linux") + packageImage, err := builder.SaveAsImage("some/package", false, dist.Target{OS: "linux"}, map[string]string{}) h.AssertNil(t, err) buildpackExists := func(name, version string) { @@ -623,36 +735,256 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { it("adds baselayer + buildpack layers for windows", func() { buildpack1, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.1.id", Version: "bp.1.version"}, - Stacks: []dist.Stack{{ID: "stack.id.1"}, {ID: "stack.id.2"}}, - Order: nil, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.1.id", Version: "bp.1.version"}, + WithStacks: []dist.Stack{{ID: "stack.id.1"}, {ID: "stack.id.2"}}, + WithOrder: nil, }, 0644) h.AssertNil(t, err) builder := buildpack.NewBuilder(mockImageFactory("windows")) builder.SetBuildpack(buildpack1) - _, err = builder.SaveAsImage("some/package", false, "windows") + _, err = builder.SaveAsImage("some/package", false, dist.Target{OS: "windows"}, map[string]string{}) h.AssertNil(t, err) }) + + it("should report an error when custom label cannot be set", func() { + mockImageFactory = func(expectedImageOS string) *testmocks.MockImageFactory { + var imageWithLabelError = &imageWithLabelError{Image: fakes.NewImage("some/package", "", nil)} + imageFactory := testmocks.NewMockImageFactory(mockController) + imageFactory.EXPECT().NewImage("some/package", true, dist.Target{OS: expectedImageOS}).Return(imageWithLabelError, nil).MaxTimes(1) + return imageFactory + } + + buildpack1, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ + ID: "bp.1.id", + Version: "bp.1.version", + Name: "One", + Description: "some description", + Homepage: "https://example.com/homepage", + Keywords: []string{"some-keyword"}, + Licenses: []dist.License{ + { + Type: "MIT", + URI: "https://example.com/license", + }, + }, + }, + WithStacks: []dist.Stack{ + {ID: "stack.id.1"}, + {ID: "stack.id.2"}, + }, + WithOrder: nil, + }, 0644) + h.AssertNil(t, err) + + builder := buildpack.NewBuilder(mockImageFactory("linux")) + builder.SetBuildpack(buildpack1) + + var customLabels = map[string]string{"test.label.fail": "true"} + + _, err = builder.SaveAsImage("some/package", false, dist.Target{OS: "linux"}, customLabels) + h.AssertError(t, err, "adding label test.label.fail=true") + }) + + it("sets additional tags", func() { + buildpack1, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{}, 0644) + h.AssertNil(t, err) + + builder := buildpack.NewBuilder(mockImageFactory("linux")) + builder.SetBuildpack(buildpack1) + + packageImage, err := builder.SaveAsImage("some/package", false, dist.Target{OS: "linux"}, map[string]string{}, "additional-tag-one", "additional-tag-two") + h.AssertNil(t, err) + + i, ok := packageImage.(*fakes.Image) + h.AssertTrue(t, ok) + savedNames := i.SavedNames() + slices.Sort(savedNames) + h.AssertEq(t, 3, len(savedNames)) + h.AssertEq(t, "additional-tag-one", savedNames[0]) + h.AssertEq(t, "additional-tag-two", savedNames[1]) + h.AssertEq(t, "some/package", savedNames[2]) + }) + + when("flatten is set", func() { + var ( + buildpack1 buildpack.BuildModule + bp1 buildpack.BuildModule + compositeBP2 buildpack.BuildModule + bp21 buildpack.BuildModule + bp22 buildpack.BuildModule + compositeBP3 buildpack.BuildModule + bp31 buildpack.BuildModule + logger logging.Logger + outBuf bytes.Buffer + err error + ) + it.Before(func() { + bp1, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ + ID: "buildpack-1-id", + Version: "buildpack-1-version", + }, + }, 0644) + h.AssertNil(t, err) + + bp21, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ + ID: "buildpack-21-id", + Version: "buildpack-21-version", + }, + }, 0644) + h.AssertNil(t, err) + + bp22, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ + ID: "buildpack-22-id", + Version: "buildpack-22-version", + }, + }, 0644) + h.AssertNil(t, err) + + bp31, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ + ID: "buildpack-31-id", + Version: "buildpack-31-version", + }, + }, 0644) + h.AssertNil(t, err) + + compositeBP3, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ + ID: "composite-buildpack-3-id", + Version: "composite-buildpack-3-version", + }, + WithOrder: []dist.OrderEntry{{ + Group: []dist.ModuleRef{ + { + ModuleInfo: bp31.Descriptor().Info(), + }, + }, + }}, + }, 0644) + h.AssertNil(t, err) + + compositeBP2, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ + ID: "composite-buildpack-2-id", + Version: "composite-buildpack-2-version", + }, + WithOrder: []dist.OrderEntry{{ + Group: []dist.ModuleRef{ + { + ModuleInfo: bp21.Descriptor().Info(), + }, + { + ModuleInfo: bp22.Descriptor().Info(), + }, + { + ModuleInfo: compositeBP3.Descriptor().Info(), + }, + }, + }}, + }, 0644) + h.AssertNil(t, err) + + buildpack1, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.1.id", Version: "bp.1.version"}, + WithStacks: []dist.Stack{{ID: "stack.id.1"}, {ID: "stack.id.2"}}, + WithOrder: []dist.OrderEntry{{ + Group: []dist.ModuleRef{ + { + ModuleInfo: bp1.Descriptor().Info(), + }, + { + ModuleInfo: compositeBP2.Descriptor().Info(), + }, + }, + }}, + }, 0644) + h.AssertNil(t, err) + + logger = logging.NewLogWithWriters(&outBuf, &outBuf, logging.WithVerbose()) + }) + + when("flatten all", func() { + var builder *buildpack.PackageBuilder + + when("no exclusions", func() { + it.Before(func() { + builder = buildpack.NewBuilder(mockImageFactory("linux"), + buildpack.FlattenAll(), + buildpack.WithLogger(logger), + buildpack.WithLayerWriterFactory(archive.DefaultTarWriterFactory())) + }) + + it("flatten all buildpacks", func() { + builder.SetBuildpack(buildpack1) + builder.AddDependencies(bp1, nil) + builder.AddDependencies(compositeBP2, []buildpack.BuildModule{bp21, bp22, compositeBP3, bp31}) + + packageImage, err := builder.SaveAsImage("some/package", false, dist.Target{OS: "linux"}, map[string]string{}) + h.AssertNil(t, err) + + fakePackageImage := packageImage.(*fakes.Image) + h.AssertEq(t, fakePackageImage.NumberOfAddedLayers(), 1) + }) + }) + + when("exclude buildpacks", func() { + it.Before(func() { + excluded := []string{bp31.Descriptor().Info().FullName()} + + builder = buildpack.NewBuilder(mockImageFactory("linux"), + buildpack.DoNotFlatten(excluded), + buildpack.WithLogger(logger), + buildpack.WithLayerWriterFactory(archive.DefaultTarWriterFactory())) + }) + + it("creates 2 layers", func() { + builder.SetBuildpack(buildpack1) + builder.AddDependencies(bp1, nil) + builder.AddDependencies(compositeBP2, []buildpack.BuildModule{bp21, bp22, compositeBP3, bp31}) + + packageImage, err := builder.SaveAsImage("some/package", false, dist.Target{OS: "linux"}, map[string]string{}) + h.AssertNil(t, err) + + fakePackageImage := packageImage.(*fakes.Image) + h.AssertEq(t, fakePackageImage.NumberOfAddedLayers(), 2) + }) + }) + }) + }) }) when("#SaveAsFile", func() { it("sets metadata", func() { buildpack1, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.1.id", Version: "bp.1.version"}, - Stacks: []dist.Stack{{ID: "stack.id.1"}, {ID: "stack.id.2"}}, - Order: nil, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.1.id", Version: "bp.1.version"}, + WithStacks: []dist.Stack{{ID: "stack.id.1"}, {ID: "stack.id.2"}}, + WithOrder: nil, }, 0644) h.AssertNil(t, err) builder := buildpack.NewBuilder(mockImageFactory("")) builder.SetBuildpack(buildpack1) + var customLabels = map[string]string{"test.label.one": "1", "test.label.two": "2"} + outputFile := filepath.Join(tmpDir, fmt.Sprintf("package-%s.cnb", h.RandString(10))) - h.AssertNil(t, builder.SaveAsFile(outputFile, "linux")) + h.AssertNil(t, builder.SaveAsFile(outputFile, dist.Target{OS: "linux"}, customLabels)) withContents := func(fn func(data []byte)) h.TarEntryAssertion { return func(t *testing.T, header *tar.Header, data []byte) { @@ -688,9 +1020,12 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { // buildpackage metadata h.ContentContains(`"io.buildpacks.buildpackage.metadata":"{\"id\":\"bp.1.id\",\"version\":\"bp.1.version\",\"stacks\":[{\"id\":\"stack.id.1\"},{\"id\":\"stack.id.2\"}]}"`), // buildpack layers metadata - h.ContentContains(`"io.buildpacks.buildpack.layers":"{\"bp.1.id\":{\"bp.1.version\":{\"api\":\"0.2\",\"stacks\":[{\"id\":\"stack.id.1\"},{\"id\":\"stack.id.2\"}],\"layerDiffID\":\"sha256:9fa0bb03eebdd0f8e4b6d6f50471b44be83dba750624dfce15dac45975c5707b\"}}`), + h.ContentContains(`"io.buildpacks.buildpack.layers":"{\"bp.1.id\":{\"bp.1.version\":{\"api\":\"0.2\",\"stacks\":[{\"id\":\"stack.id.1\"},{\"id\":\"stack.id.2\"}],\"layerDiffID\":\"sha256:44447e95b06b73496d1891de5afb01936e9999b97ea03dad6337d9f5610807a7\"}}`), // image os h.ContentContains(`"os":"linux"`), + // custom labels + h.ContentContains(`"test.label.one":"1"`), + h.ContentContains(`"test.label.two":"2"`), ) })) })) @@ -698,10 +1033,10 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { it("adds buildpack layers", func() { buildpack1, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.1.id", Version: "bp.1.version"}, - Stacks: []dist.Stack{{ID: "stack.id.1"}, {ID: "stack.id.2"}}, - Order: nil, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.1.id", Version: "bp.1.version"}, + WithStacks: []dist.Stack{{ID: "stack.id.1"}, {ID: "stack.id.2"}}, + WithOrder: nil, }, 0644) h.AssertNil(t, err) @@ -709,7 +1044,7 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { builder.SetBuildpack(buildpack1) outputFile := filepath.Join(tmpDir, fmt.Sprintf("package-%s.cnb", h.RandString(10))) - h.AssertNil(t, builder.SaveAsFile(outputFile, "linux")) + h.AssertNil(t, builder.SaveAsFile(outputFile, dist.Target{OS: "linux"}, map[string]string{})) h.AssertOnTarEntry(t, outputFile, "/blobs", h.IsDirectory(), @@ -748,10 +1083,10 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { it("adds baselayer + buildpack layers for windows", func() { buildpack1, err := ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.1.id", Version: "bp.1.version"}, - Stacks: []dist.Stack{{ID: "stack.id.1"}, {ID: "stack.id.2"}}, - Order: nil, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.1.id", Version: "bp.1.version"}, + WithStacks: []dist.Stack{{ID: "stack.id.1"}, {ID: "stack.id.2"}}, + WithOrder: nil, }, 0644) h.AssertNil(t, err) @@ -759,14 +1094,14 @@ func testPackageBuilder(t *testing.T, when spec.G, it spec.S) { builder.SetBuildpack(buildpack1) outputFile := filepath.Join(tmpDir, fmt.Sprintf("package-%s.cnb", h.RandString(10))) - h.AssertNil(t, builder.SaveAsFile(outputFile, "windows")) + h.AssertNil(t, builder.SaveAsFile(outputFile, dist.Target{OS: "windows"}, map[string]string{})) // Windows baselayer content is constant expectedBaseLayerReader, err := layer.WindowsBaseLayer() h.AssertNil(t, err) // layer: application/vnd.docker.image.rootfs.diff.tar.gzip - expectedBaseLayerSHA, err := computeLayerSHA(ioutil.NopCloser(expectedBaseLayerReader)) + expectedBaseLayerSHA, err := computeLayerSHA(io.NopCloser(expectedBaseLayerReader)) h.AssertNil(t, err) h.AssertOnTarEntry(t, outputFile, "/blobs/sha256/"+expectedBaseLayerSHA, @@ -799,7 +1134,7 @@ func computeLayerSHA(reader io.ReadCloser) (string, error) { } defer compressed.Close() - if _, err := io.Copy(ioutil.Discard, compressed); err != nil { + if _, err := io.Copy(io.Discard, compressed); err != nil { return "", err } @@ -810,3 +1145,11 @@ func computeLayerSHA(reader io.ReadCloser) (string, error) { return digest.Hex, nil } + +type imageWithLabelError struct { + *fakes.Image +} + +func (i *imageWithLabelError) SetLabel(string, string) error { + return errors.New("Label could not be set") +} diff --git a/pkg/buildpack/buildpack.go b/pkg/buildpack/buildpack.go index d3713b6cf8..d8edb71f29 100644 --- a/pkg/buildpack/buildpack.go +++ b/pkg/buildpack/buildpack.go @@ -7,6 +7,7 @@ import ( "os" "path" "path/filepath" + "strings" "github.com/BurntSushi/toml" "github.com/buildpacks/lifecycle/api" @@ -17,73 +18,168 @@ import ( "github.com/buildpacks/pack/pkg/dist" ) -type Blob interface { - // Open returns a io.ReadCloser for the contents of the Blob in tar format. +const ( + KindBuildpack = "buildpack" + KindExtension = "extension" +) + +//go:generate mockgen -package testmocks -destination ../testmocks/mock_build_module.go github.com/buildpacks/pack/pkg/buildpack BuildModule +type BuildModule interface { + // Open returns a reader to a tar with contents structured as per the distribution spec + // (currently '/cnb/buildpacks/{ID}/{version}/*', all entries with a zeroed-out + // timestamp and root UID/GID). Open() (io.ReadCloser, error) + Descriptor() Descriptor } -//go:generate mockgen -package testmocks -destination ../testmocks/mock_buildpack.go github.com/buildpacks/pack/pkg/buildpack Buildpack +type Descriptor interface { + API() *api.Version + EnsureStackSupport(stackID string, providedMixins []string, validateRunStageMixins bool) error + EnsureTargetSupport(os, arch, distroName, distroVersion string) error + EscapedID() string + Info() dist.ModuleInfo + Kind() string + Order() dist.Order + Stacks() []dist.Stack + Targets() []dist.Target +} -type Buildpack interface { - // Open returns a reader to a tar with contents structured as per the distribution spec - // (currently '/cnbs/buildpacks/{ID}/{version}/*', all entries with a zeroed-out - // timestamp and root UID/GID). +type Blob interface { + // Open returns a io.ReadCloser for the contents of the Blob in tar format. Open() (io.ReadCloser, error) - Descriptor() dist.BuildpackDescriptor } -type buildpack struct { - descriptor dist.BuildpackDescriptor +type buildModule struct { + descriptor Descriptor Blob `toml:"-"` } -func (b *buildpack) Descriptor() dist.BuildpackDescriptor { +func (b *buildModule) Descriptor() Descriptor { return b.descriptor } -// FromBlob constructs a buildpack from a blob. It is assumed that the buildpack -// contents are structured as per the distribution spec (currently '/cnbs/buildpacks/{ID}/{version}/*'). -func FromBlob(bpd dist.BuildpackDescriptor, blob Blob) Buildpack { - return &buildpack{ +// FromBlob constructs a buildpack or extension from a blob. It is assumed that the buildpack +// contents are structured as per the distribution spec (currently '/cnb/buildpacks/{ID}/{version}/*' or +// '/cnb/extensions/{ID}/{version}/*'). +func FromBlob(descriptor Descriptor, blob Blob) BuildModule { + return &buildModule{ Blob: blob, - descriptor: bpd, + descriptor: descriptor, } } -// FromRootBlob constructs a buildpack from a blob. It is assumed that the buildpack contents reside at the +// FromBuildpackRootBlob constructs a buildpack from a blob. It is assumed that the buildpack contents reside at the // root of the blob. The constructed buildpack contents will be structured as per the distribution spec (currently -// a tar with contents under '/cnbs/buildpacks/{ID}/{version}/*'). -func FromRootBlob(blob Blob, layerWriterFactory archive.TarWriterFactory) (Buildpack, error) { - bpd := dist.BuildpackDescriptor{} +// a tar with contents under '/cnb/buildpacks/{ID}/{version}/*'). +func FromBuildpackRootBlob(blob Blob, layerWriterFactory archive.TarWriterFactory, logger Logger) (BuildModule, error) { + descriptor := dist.BuildpackDescriptor{} + descriptor.WithAPI = api.MustParse(dist.AssumedBuildpackAPIVersion) + undecodedKeys, err := readDescriptor(KindBuildpack, &descriptor, blob) + if err != nil { + return nil, err + } + if len(undecodedKeys) > 0 { + logger.Warnf("Ignoring unexpected key(s) in descriptor for buildpack %s: %s", descriptor.EscapedID(), strings.Join(undecodedKeys, ", ")) + } + if err := detectPlatformSpecificValues(&descriptor, blob); err != nil { + return nil, err + } + if err := validateBuildpackDescriptor(descriptor); err != nil { + return nil, err + } + return buildpackFrom(&descriptor, blob, layerWriterFactory) +} + +// FromExtensionRootBlob constructs an extension from a blob. It is assumed that the extension contents reside at the +// root of the blob. The constructed extension contents will be structured as per the distribution spec (currently +// a tar with contents under '/cnb/extensions/{ID}/{version}/*'). +func FromExtensionRootBlob(blob Blob, layerWriterFactory archive.TarWriterFactory, logger Logger) (BuildModule, error) { + descriptor := dist.ExtensionDescriptor{} + descriptor.WithAPI = api.MustParse(dist.AssumedBuildpackAPIVersion) + undecodedKeys, err := readDescriptor(KindExtension, &descriptor, blob) + if err != nil { + return nil, err + } + if len(undecodedKeys) > 0 { + logger.Warnf("Ignoring unexpected key(s) in descriptor for extension %s: %s", descriptor.EscapedID(), strings.Join(undecodedKeys, ", ")) + } + if err := validateExtensionDescriptor(descriptor); err != nil { + return nil, err + } + return buildpackFrom(&descriptor, blob, layerWriterFactory) +} + +func readDescriptor(kind string, descriptor interface{}, blob Blob) (undecodedKeys []string, err error) { rc, err := blob.Open() if err != nil { - return nil, errors.Wrap(err, "open buildpack") + return undecodedKeys, errors.Wrapf(err, "open %s", kind) } defer rc.Close() - _, buf, err := archive.ReadTarEntry(rc, "buildpack.toml") + descriptorFile := kind + ".toml" + + _, buf, err := archive.ReadTarEntry(rc, descriptorFile) if err != nil { - return nil, errors.Wrap(err, "reading buildpack.toml") + return undecodedKeys, errors.Wrapf(err, "reading %s", descriptorFile) } - bpd.API = api.MustParse(dist.AssumedBuildpackAPIVersion) - _, err = toml.Decode(string(buf), &bpd) + md, err := toml.Decode(string(buf), descriptor) if err != nil { - return nil, errors.Wrap(err, "decoding buildpack.toml") + return undecodedKeys, errors.Wrapf(err, "decoding %s", descriptorFile) } - err = validateDescriptor(bpd) + undecoded := md.Undecoded() + for _, k := range undecoded { + // FIXME: we should ideally update dist.ModuleInfo to expect sbom-formats, but this breaks other tests; + // it isn't possible to make [metadata] a decoded key because its type is undefined in the buildpack spec. + if k.String() == "metadata" || strings.HasPrefix(k.String(), "metadata.") || + k.String() == "buildpack.sbom-formats" { + // buildpack.toml & extension.toml can contain [metadata] which is arbitrary + continue + } + undecodedKeys = append(undecodedKeys, k.String()) + } + + return undecodedKeys, nil +} + +func detectPlatformSpecificValues(descriptor *dist.BuildpackDescriptor, blob Blob) error { + if val, err := hasFile(blob, path.Join("bin", "build")); val { + descriptor.WithLinuxBuild = true + } else if err != nil { + return err + } + if val, err := hasFile(blob, path.Join("bin", "build.bat")); val { + descriptor.WithWindowsBuild = true + } else if err != nil { + return err + } + if val, err := hasFile(blob, path.Join("bin", "build.exe")); val { + descriptor.WithWindowsBuild = true + } else if err != nil { + return err + } + return nil +} + +func hasFile(blob Blob, file string) (bool, error) { + rc, err := blob.Open() if err != nil { - return nil, errors.Wrap(err, "invalid buildpack.toml") + return false, errors.Wrapf(err, "open %s", "buildpack bin/") } + defer rc.Close() + _, _, err = archive.ReadTarEntry(rc, file) + return err == nil, nil +} - return &buildpack{ - descriptor: bpd, +func buildpackFrom(descriptor Descriptor, blob Blob, layerWriterFactory archive.TarWriterFactory) (BuildModule, error) { + return &buildModule{ + descriptor: descriptor, Blob: &distBlob{ openFn: func() io.ReadCloser { return archive.GenerateTarWithWriter( func(tw archive.TarWriter) error { - return toDistTar(tw, bpd, blob) + return toDistTar(tw, descriptor, blob) }, layerWriterFactory, ) @@ -100,31 +196,36 @@ func (b *distBlob) Open() (io.ReadCloser, error) { return b.openFn(), nil } -func toDistTar(tw archive.TarWriter, bpd dist.BuildpackDescriptor, blob Blob) error { +func toDistTar(tw archive.TarWriter, descriptor Descriptor, blob Blob) error { ts := archive.NormalizedDateTime + parentDir := dist.BuildpacksDir + if descriptor.Kind() == KindExtension { + parentDir = dist.ExtensionsDir + } + if err := tw.WriteHeader(&tar.Header{ Typeflag: tar.TypeDir, - Name: path.Join(dist.BuildpacksDir, bpd.EscapedID()), + Name: path.Join(parentDir, descriptor.EscapedID()), Mode: 0755, ModTime: ts, }); err != nil { - return errors.Wrapf(err, "writing buildpack id dir header") + return errors.Wrapf(err, "writing %s id dir header", descriptor.Kind()) } - baseTarDir := path.Join(dist.BuildpacksDir, bpd.EscapedID(), bpd.Info.Version) + baseTarDir := path.Join(parentDir, descriptor.EscapedID(), descriptor.Info().Version) if err := tw.WriteHeader(&tar.Header{ Typeflag: tar.TypeDir, Name: baseTarDir, Mode: 0755, ModTime: ts, }); err != nil { - return errors.Wrapf(err, "writing buildpack version dir header") + return errors.Wrapf(err, "writing %s version dir header", descriptor.Kind()) } rc, err := blob.Open() if err != nil { - return errors.Wrap(err, "reading buildpack blob") + return errors.Wrapf(err, "reading %s blob", descriptor.Kind()) } defer rc.Close() @@ -146,6 +247,10 @@ func toDistTar(tw archive.TarWriter, bpd dist.BuildpackDescriptor, blob Blob) er header.Mode = calcFileMode(header) header.Name = path.Join(baseTarDir, header.Name) + + if header.Typeflag == tar.TypeLink { + header.Linkname = path.Join(baseTarDir, path.Clean(header.Linkname)) + } err = tw.WriteHeader(header) if err != nil { return errors.Wrapf(err, "failed to write header for '%s'", header.Name) @@ -165,8 +270,9 @@ func calcFileMode(header *tar.Header) int64 { case header.Typeflag == tar.TypeDir: return 0755 case nameOneOf(header.Name, - path.Join("bin", "detect"), path.Join("bin", "build"), + path.Join("bin", "detect"), + path.Join("bin", "generate"), ): return 0755 case anyExecBit(header.Mode): @@ -189,54 +295,337 @@ func anyExecBit(mode int64) bool { return mode&0111 != 0 } -func validateDescriptor(bpd dist.BuildpackDescriptor) error { - if bpd.Info.ID == "" { +func validateBuildpackDescriptor(bpd dist.BuildpackDescriptor) error { + if bpd.Info().ID == "" { return errors.Errorf("%s is required", style.Symbol("buildpack.id")) } - if bpd.Info.Version == "" { + if bpd.Info().Version == "" { return errors.Errorf("%s is required", style.Symbol("buildpack.version")) } - if len(bpd.Order) == 0 && len(bpd.Stacks) == 0 { + if len(bpd.Order()) >= 1 && (len(bpd.Stacks()) >= 1 || len(bpd.Targets()) >= 1) { return errors.Errorf( - "buildpack %s: must have either %s or an %s defined", - style.Symbol(bpd.Info.FullName()), + "buildpack %s: cannot have both %s/%s and an %s defined", + style.Symbol(bpd.Info().FullName()), + style.Symbol("targets"), style.Symbol("stacks"), style.Symbol("order"), ) } - if len(bpd.Order) >= 1 && len(bpd.Stacks) >= 1 { - return errors.Errorf( - "buildpack %s: cannot have both %s and an %s defined", - style.Symbol(bpd.Info.FullName()), - style.Symbol("stacks"), - style.Symbol("order"), - ) + return nil +} + +func validateExtensionDescriptor(extd dist.ExtensionDescriptor) error { + if extd.Info().ID == "" { + return errors.Errorf("%s is required", style.Symbol("extension.id")) + } + + if extd.Info().Version == "" { + return errors.Errorf("%s is required", style.Symbol("extension.version")) } return nil } -func ToLayerTar(dest string, bp Buildpack) (string, error) { - bpd := bp.Descriptor() - bpReader, err := bp.Open() +func ToLayerTar(dest string, module BuildModule) (string, error) { + descriptor := module.Descriptor() + modReader, err := module.Open() if err != nil { - return "", errors.Wrap(err, "opening buildpack blob") + return "", errors.Wrap(err, "opening blob") } - defer bpReader.Close() + defer modReader.Close() - layerTar := filepath.Join(dest, fmt.Sprintf("%s.%s.tar", bpd.EscapedID(), bpd.Info.Version)) + layerTar := filepath.Join(dest, fmt.Sprintf("%s.%s.tar", descriptor.EscapedID(), descriptor.Info().Version)) fh, err := os.Create(layerTar) if err != nil { return "", errors.Wrap(err, "create file for tar") } defer fh.Close() - if _, err := io.Copy(fh, bpReader); err != nil { - return "", errors.Wrap(err, "writing buildpack blob to tar") + if _, err := io.Copy(fh, modReader); err != nil { + return "", errors.Wrap(err, "writing blob to tar") } return layerTar, nil } + +func ToNLayerTar(dest string, module BuildModule) ([]ModuleTar, error) { + modReader, err := module.Open() + if err != nil { + return nil, errors.Wrap(err, "opening blob") + } + defer modReader.Close() + + tarCollection := newModuleTarCollection(dest) + tr := tar.NewReader(modReader) + + var ( + header *tar.Header + forWindows bool + ) + + for { + header, err = tr.Next() + if err != nil { + if err == io.EOF { + return handleEmptyModule(dest, module) + } + return nil, err + } + if _, err := sanitizePath(header.Name); err != nil { + return nil, err + } + if header.Name == "Files" { + forWindows = true + } + if strings.Contains(header.Name, `/cnb/buildpacks/`) || strings.Contains(header.Name, `\cnb\buildpacks\`) { + // Only for Windows, the first four headers are: + // - Files + // - Hives + // - Files/cnb + // - Files/cnb/buildpacks + // Skip over these until we find "Files/cnb/buildpacks/": + break + } + } + // The header should look like "/cnb/buildpacks/" + // The version should be blank because the first header is missing . + origID, origVersion := parseBpIDAndVersion(header) + if origVersion != "" { + return nil, fmt.Errorf("first header '%s' contained unexpected version", header.Name) + } + + if err := toNLayerTar(origID, origVersion, header, tr, tarCollection, forWindows); err != nil { + return nil, err + } + + errs := tarCollection.close() + if len(errs) > 0 { + return nil, errors.New("closing files") + } + + return tarCollection.moduleTars(), nil +} + +func toNLayerTar(origID, origVersion string, firstHeader *tar.Header, tr *tar.Reader, tc *moduleTarCollection, forWindows bool) error { + toWrite := []*tar.Header{firstHeader} + if origVersion == "" { + // the first header only contains the id - e.g., /cnb/buildpacks/, + // read the next header to get the version + secondHeader, err := tr.Next() + if err != nil { + return fmt.Errorf("getting second header: %w; first header was %s", err, firstHeader.Name) + } + if _, err := sanitizePath(secondHeader.Name); err != nil { + return err + } + nextID, nextVersion := parseBpIDAndVersion(secondHeader) + if nextID != origID || nextVersion == "" { + return fmt.Errorf("second header '%s' contained unexpected id or missing version", secondHeader.Name) + } + origVersion = nextVersion + toWrite = append(toWrite, secondHeader) + } else { + // the first header contains id and version - e.g., /cnb/buildpacks//, + // we need to write the parent header - e.g., /cnb/buildpacks/ + realFirstHeader := *firstHeader + realFirstHeader.Name = filepath.ToSlash(filepath.Dir(firstHeader.Name)) + toWrite = append([]*tar.Header{&realFirstHeader}, toWrite...) + } + if forWindows { + toWrite = append(windowsPreamble(), toWrite...) + } + mt, err := tc.get(origID, origVersion) + if err != nil { + return fmt.Errorf("getting module from collection: %w", err) + } + for _, h := range toWrite { + if err := mt.writer.WriteHeader(h); err != nil { + return fmt.Errorf("failed to write header '%s': %w", h.Name, err) + } + } + // write the rest of the package + var header *tar.Header + for { + header, err = tr.Next() + if err != nil { + if err == io.EOF { + return nil + } + return fmt.Errorf("getting next header: %w", err) + } + if _, err := sanitizePath(header.Name); err != nil { + return err + } + nextID, nextVersion := parseBpIDAndVersion(header) + if nextID != origID || nextVersion != origVersion { + // we found a new module, recurse + return toNLayerTar(nextID, nextVersion, header, tr, tc, forWindows) + } + + err = mt.writer.WriteHeader(header) + if err != nil { + return fmt.Errorf("failed to write header for '%s': %w", header.Name, err) + } + + _, err = io.Copy(mt.writer, tr) + if err != nil { + return errors.Wrapf(err, "failed to write contents to '%s'", header.Name) + } + } +} + +func sanitizePath(path string) (string, error) { + if strings.Contains(path, "..") { + return "", fmt.Errorf("path %s contains unexpected special elements", path) + } + return path, nil +} + +func windowsPreamble() []*tar.Header { + return []*tar.Header{ + { + Name: "Files", + Typeflag: tar.TypeDir, + }, + { + Name: "Hives", + Typeflag: tar.TypeDir, + }, + { + Name: "Files/cnb", + Typeflag: tar.TypeDir, + }, + { + Name: "Files/cnb/buildpacks", + Typeflag: tar.TypeDir, + }, + } +} + +func parseBpIDAndVersion(hdr *tar.Header) (id, version string) { + // splitting "/cnb/buildpacks/{ID}/{version}/*" returns + // [0] = "" -> first element is empty or "Files" in windows + // [1] = "cnb" + // [2] = "buildpacks" + // [3] = "{ID}" + // [4] = "{version}" + // ... + parts := strings.Split(strings.ReplaceAll(filepath.Clean(hdr.Name), `\`, `/`), `/`) + size := len(parts) + switch { + case size < 4: + // error + case size == 4: + id = parts[3] + case size >= 5: + id = parts[3] + version = parts[4] + } + return id, version +} + +func handleEmptyModule(dest string, module BuildModule) ([]ModuleTar, error) { + tarFile, err := ToLayerTar(dest, module) + if err != nil { + return nil, err + } + layerTar := &moduleTar{ + info: module.Descriptor().Info(), + path: tarFile, + } + return []ModuleTar{layerTar}, nil +} + +// Set returns a set of the given string slice. +func Set(exclude []string) map[string]struct{} { + type void struct{} + var member void + var excludedModules = make(map[string]struct{}) + for _, fullName := range exclude { + excludedModules[fullName] = member + } + return excludedModules +} + +type ModuleTar interface { + Info() dist.ModuleInfo + Path() string +} + +type moduleTar struct { + info dist.ModuleInfo + path string + writer archive.TarWriter +} + +func (t *moduleTar) Info() dist.ModuleInfo { + return t.info +} + +func (t *moduleTar) Path() string { + return t.path +} + +func newModuleTar(dest, id, version string) (moduleTar, error) { + layerTar := filepath.Join(dest, fmt.Sprintf("%s.%s.tar", id, version)) + fh, err := os.Create(layerTar) + if err != nil { + return moduleTar{}, errors.Wrapf(err, "creating file at path %s", layerTar) + } + return moduleTar{ + info: dist.ModuleInfo{ + ID: id, + Version: version, + }, + path: layerTar, + writer: tar.NewWriter(fh), + }, nil +} + +type moduleTarCollection struct { + rootPath string + modules map[string]moduleTar +} + +func newModuleTarCollection(rootPath string) *moduleTarCollection { + return &moduleTarCollection{ + rootPath: rootPath, + modules: map[string]moduleTar{}, + } +} + +func (m *moduleTarCollection) get(id, version string) (moduleTar, error) { + key := fmt.Sprintf("%s@%s", id, version) + if _, ok := m.modules[key]; !ok { + module, err := newModuleTar(m.rootPath, id, version) + if err != nil { + return moduleTar{}, err + } + m.modules[key] = module + } + return m.modules[key], nil +} + +func (m *moduleTarCollection) moduleTars() []ModuleTar { + var modulesTar []ModuleTar + for _, v := range m.modules { + v := v + vv := &v + modulesTar = append(modulesTar, vv) + } + return modulesTar +} + +func (m *moduleTarCollection) close() []error { + var errors []error + for _, v := range m.modules { + err := v.writer.Close() + if err != nil { + errors = append(errors, err) + } + } + return errors +} diff --git a/pkg/buildpack/buildpack_tar_writer.go b/pkg/buildpack/buildpack_tar_writer.go new file mode 100644 index 0000000000..c119b13ff6 --- /dev/null +++ b/pkg/buildpack/buildpack_tar_writer.go @@ -0,0 +1,132 @@ +package buildpack + +import ( + "archive/tar" + "fmt" + "io" + "os" + "path" + "path/filepath" + "strings" + + "github.com/buildpacks/lifecycle/buildpack" + "github.com/pkg/errors" + + "github.com/buildpacks/pack/internal/style" + "github.com/buildpacks/pack/pkg/archive" + "github.com/buildpacks/pack/pkg/logging" +) + +type BuildModuleWriter struct { + logger logging.Logger + factory archive.TarWriterFactory +} + +// NewBuildModuleWriter creates a BuildModule writer +func NewBuildModuleWriter(logger logging.Logger, factory archive.TarWriterFactory) *BuildModuleWriter { + return &BuildModuleWriter{ + logger: logger, + factory: factory, + } +} + +// NToLayerTar creates a tar file containing the all the Buildpacks given, but excluding the ones which FullName() is +// in the exclude list. It returns the path to the tar file, the list of Buildpacks that were excluded, and any error +func (b *BuildModuleWriter) NToLayerTar(tarPath, filename string, modules []BuildModule, exclude map[string]struct{}) (string, []BuildModule, error) { + layerTar := filepath.Join(tarPath, fmt.Sprintf("%s.tar", filename)) + tarFile, err := os.Create(layerTar) + b.logger.Debugf("creating file %s", style.Symbol(layerTar)) + if err != nil { + return "", nil, errors.Wrap(err, "create file for tar") + } + + defer tarFile.Close() + tw := b.factory.NewWriter(tarFile) + defer tw.Close() + + parentFolderAdded := map[string]bool{} + duplicated := map[string]bool{} + + var buildModuleExcluded []BuildModule + for _, module := range modules { + if _, ok := exclude[module.Descriptor().Info().FullName()]; !ok { + if !duplicated[module.Descriptor().Info().FullName()] { + duplicated[module.Descriptor().Info().FullName()] = true + b.logger.Debugf("adding %s", style.Symbol(module.Descriptor().Info().FullName())) + + if err := b.writeBuildModuleToTar(tw, module, &parentFolderAdded); err != nil { + return "", nil, errors.Wrapf(err, "adding %s", style.Symbol(module.Descriptor().Info().FullName())) + } + rootPath := processRootPath(module) + if !parentFolderAdded[rootPath] { + parentFolderAdded[rootPath] = true + } + } else { + b.logger.Debugf("skipping %s, it was already added", style.Symbol(module.Descriptor().Info().FullName())) + } + } else { + b.logger.Debugf("excluding %s from being flattened", style.Symbol(module.Descriptor().Info().FullName())) + buildModuleExcluded = append(buildModuleExcluded, module) + } + } + + b.logger.Debugf("%s was created successfully", style.Symbol(layerTar)) + return layerTar, buildModuleExcluded, nil +} + +// writeBuildModuleToTar writes the content of the given tar file into the writer, skipping the folders that were already added +func (b *BuildModuleWriter) writeBuildModuleToTar(tw archive.TarWriter, module BuildModule, parentFolderAdded *map[string]bool) error { + var ( + rc io.ReadCloser + err error + ) + + if rc, err = module.Open(); err != nil { + return err + } + defer rc.Close() + + tr := tar.NewReader(rc) + + for { + header, err := tr.Next() + if err == io.EOF { + break + } + if err != nil { + return errors.Wrap(err, "failed to get next tar entry") + } + + if (*parentFolderAdded)[header.Name] { + b.logger.Debugf("folder %s was already added, skipping it", style.Symbol(header.Name)) + continue + } + + err = tw.WriteHeader(header) + if err != nil { + return errors.Wrapf(err, "failed to write header for '%s'", header.Name) + } + + _, err = io.Copy(tw, tr) + if err != nil { + return errors.Wrapf(err, "failed to write contents to '%s'", header.Name) + } + } + + return nil +} + +func processRootPath(module BuildModule) string { + var bpFolder string + switch module.Descriptor().Kind() { + case buildpack.KindBuildpack: + bpFolder = "buildpacks" + case buildpack.KindExtension: + bpFolder = "extensions" + default: + bpFolder = "buildpacks" + } + bpInfo := module.Descriptor().Info() + rootPath := path.Join("/cnb", bpFolder, strings.ReplaceAll(bpInfo.ID, "/", "_")) + return rootPath +} diff --git a/pkg/buildpack/buildpack_tar_writer_test.go b/pkg/buildpack/buildpack_tar_writer_test.go new file mode 100644 index 0000000000..649253a835 --- /dev/null +++ b/pkg/buildpack/buildpack_tar_writer_test.go @@ -0,0 +1,175 @@ +package buildpack_test + +import ( + "bytes" + "fmt" + "os" + "testing" + + "github.com/buildpacks/lifecycle/api" + "github.com/heroku/color" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + ifakes "github.com/buildpacks/pack/internal/fakes" + "github.com/buildpacks/pack/internal/style" + "github.com/buildpacks/pack/pkg/archive" + "github.com/buildpacks/pack/pkg/buildpack" + "github.com/buildpacks/pack/pkg/dist" + "github.com/buildpacks/pack/pkg/logging" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestBuildModuleWriter(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "testBuildModuleWriter", testBuildModuleWriter, spec.Parallel(), spec.Report(report.Terminal{})) +} + +type void struct{} + +func testBuildModuleWriter(t *testing.T, when spec.G, it spec.S) { + var ( + outBuf bytes.Buffer + logger logging.Logger + buildModuleWriter *buildpack.BuildModuleWriter + bp1v1 buildpack.BuildModule + bp1v2 buildpack.BuildModule + bp2v1 buildpack.BuildModule + bp3v1 buildpack.BuildModule + member void + tmpDir string + err error + ) + + it.Before(func() { + logger = logging.NewLogWithWriters(&outBuf, &outBuf, logging.WithVerbose()) + buildModuleWriter = buildpack.NewBuildModuleWriter(logger, archive.DefaultTarWriterFactory()) + tmpDir, err = os.MkdirTemp("", "test_build_module_writer") + h.AssertNil(t, err) + + bp1v1, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ + ID: "buildpack-1-id", + Version: "buildpack-1-version-1", + }, + WithStacks: []dist.Stack{{ + ID: "*", + }}, + }, 0644) + h.AssertNil(t, err) + + bp1v2, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ + ID: "buildpack-1-id", + Version: "buildpack-1-version-2", + }, + WithStacks: []dist.Stack{{ + ID: "*", + }}, + }, 0644) + h.AssertNil(t, err) + + bp2v1, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ + ID: "buildpack-2-id", + Version: "buildpack-2-version-1", + }, + WithStacks: []dist.Stack{{ + ID: "*", + }}, + }, 0644) + h.AssertNil(t, err) + + bp3v1, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ + ID: "buildpack-3-id", + Version: "buildpack-3-version-1", + }, + WithStacks: []dist.Stack{{ + ID: "*", + }}, + }, 0644) + h.AssertNil(t, err) + }) + + it.After(func() { + err := os.RemoveAll(tmpDir) + h.AssertNil(t, err) + }) + + when("#NToLayerTar", func() { + when("there are not exclude buildpacks", func() { + when("there are not duplicated buildpacks", func() { + it("creates a tar", func() { + bpModules := []buildpack.BuildModule{bp1v1, bp2v1, bp3v1} + tarFile, bpExcluded, err := buildModuleWriter.NToLayerTar(tmpDir, "test-file-1", bpModules, nil) + + h.AssertNil(t, err) + h.AssertTrue(t, len(bpExcluded) == 0) + h.AssertNotNil(t, tarFile) + assertBuildpackModuleWritten(t, tarFile, bpModules) + }) + }) + + when("there are duplicated buildpacks", func() { + it("creates a tar skipping root folder from duplicated buildpacks", func() { + bpModules := []buildpack.BuildModule{bp1v1, bp1v2, bp2v1, bp3v1} + tarFile, bpExcluded, err := buildModuleWriter.NToLayerTar(tmpDir, "test-file-2", bpModules, nil) + + h.AssertNil(t, err) + h.AssertTrue(t, len(bpExcluded) == 0) + h.AssertNotNil(t, tarFile) + assertBuildpackModuleWritten(t, tarFile, bpModules) + h.AssertContains(t, outBuf.String(), fmt.Sprintf("folder '%s' was already added, skipping it", "/cnb/buildpacks/buildpack-1-id")) + }) + }) + }) + + when("there are exclude buildpacks", func() { + exclude := make(map[string]struct{}) + it.Before(func() { + exclude[bp2v1.Descriptor().Info().FullName()] = member + }) + + when("there are not duplicated buildpacks", func() { + it("creates a tar skipping excluded buildpacks", func() { + bpModules := []buildpack.BuildModule{bp1v1, bp2v1, bp3v1} + tarFile, bpExcluded, err := buildModuleWriter.NToLayerTar(tmpDir, "test-file-3", bpModules, exclude) + h.AssertNil(t, err) + h.AssertTrue(t, len(bpExcluded) == 1) + h.AssertNotNil(t, tarFile) + assertBuildpackModuleWritten(t, tarFile, []buildpack.BuildModule{bp1v1, bp3v1}) + h.AssertContains(t, outBuf.String(), fmt.Sprintf("excluding %s from being flattened", style.Symbol(bp2v1.Descriptor().Info().FullName()))) + }) + }) + + when("there are duplicated buildpacks", func() { + it("creates a tar skipping excluded buildpacks and root folder from duplicated buildpacks", func() { + bpModules := []buildpack.BuildModule{bp1v1, bp1v2, bp2v1, bp3v1} + tarFile, bpExcluded, err := buildModuleWriter.NToLayerTar(tmpDir, "test-file-4", bpModules, exclude) + h.AssertNil(t, err) + h.AssertTrue(t, len(bpExcluded) == 1) + h.AssertNotNil(t, tarFile) + assertBuildpackModuleWritten(t, tarFile, []buildpack.BuildModule{bp1v1, bp1v2, bp3v1}) + h.AssertContains(t, outBuf.String(), fmt.Sprintf("folder '%s' was already added, skipping it", "/cnb/buildpacks/buildpack-1-id")) + h.AssertContains(t, outBuf.String(), fmt.Sprintf("excluding %s from being flattened", style.Symbol(bp2v1.Descriptor().Info().FullName()))) + }) + }) + }) + }) +} + +func assertBuildpackModuleWritten(t *testing.T, path string, modules []buildpack.BuildModule) { + t.Helper() + for _, module := range modules { + dirPath := fmt.Sprintf("/cnb/buildpacks/%s/%s", module.Descriptor().Info().ID, module.Descriptor().Info().Version) + h.AssertOnTarEntry(t, path, dirPath, + h.IsDirectory(), + ) + } +} diff --git a/pkg/buildpack/buildpack_test.go b/pkg/buildpack/buildpack_test.go index b2ead856e6..5692f92de5 100644 --- a/pkg/buildpack/buildpack_test.go +++ b/pkg/buildpack/buildpack_test.go @@ -1,21 +1,27 @@ package buildpack_test import ( - "errors" + "bytes" + "fmt" "io" - "io/ioutil" "os" "path/filepath" + "runtime" + "strings" "testing" "time" + "github.com/buildpacks/lifecycle/api" "github.com/heroku/color" + "github.com/pkg/errors" "github.com/sclevine/spec" "github.com/sclevine/spec/report" "github.com/buildpacks/pack/pkg/archive" + "github.com/buildpacks/pack/pkg/blob" "github.com/buildpacks/pack/pkg/buildpack" "github.com/buildpacks/pack/pkg/dist" + "github.com/buildpacks/pack/pkg/logging" h "github.com/buildpacks/pack/testhelpers" ) @@ -26,13 +32,13 @@ func TestBuildpack(t *testing.T) { } func testBuildpack(t *testing.T, when spec.G, it spec.S) { - var writeBlobToFile = func(bp buildpack.Buildpack) string { + var writeBlobToFile = func(bp buildpack.BuildModule) string { t.Helper() bpReader, err := bp.Open() h.AssertNil(t, err) - tmpDir, err := ioutil.TempDir("", "") + tmpDir, err := os.MkdirTemp("", "") h.AssertNil(t, err) p := filepath.Join(tmpDir, "bp.tar") @@ -50,11 +56,10 @@ func testBuildpack(t *testing.T, when spec.G, it spec.S) { when("#BuildpackFromRootBlob", func() { it("parses the descriptor file", func() { - bp, err := buildpack.FromRootBlob( - &readerBlob{ - openFn: func() io.ReadCloser { - tarBuilder := archive.TarBuilder{} - tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(` + bp, err := buildpack.FromBuildpackRootBlob(&readerBlob{ + openFn: func() io.ReadCloser { + tarBuilder := archive.TarBuilder{} + tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(` api = "0.3" [buildpack] @@ -65,26 +70,23 @@ homepage = "http://geocities.com/cool-bp" [[stacks]] id = "some.stack.id" `)) - return tarBuilder.Reader(archive.DefaultTarWriterFactory()) - }, + return tarBuilder.Reader(archive.DefaultTarWriterFactory()) }, - archive.DefaultTarWriterFactory(), - ) + }, archive.DefaultTarWriterFactory(), nil) h.AssertNil(t, err) - h.AssertEq(t, bp.Descriptor().API.String(), "0.3") - h.AssertEq(t, bp.Descriptor().Info.ID, "bp.one") - h.AssertEq(t, bp.Descriptor().Info.Version, "1.2.3") - h.AssertEq(t, bp.Descriptor().Info.Homepage, "http://geocities.com/cool-bp") - h.AssertEq(t, bp.Descriptor().Stacks[0].ID, "some.stack.id") + h.AssertEq(t, bp.Descriptor().API().String(), "0.3") + h.AssertEq(t, bp.Descriptor().Info().ID, "bp.one") + h.AssertEq(t, bp.Descriptor().Info().Version, "1.2.3") + h.AssertEq(t, bp.Descriptor().Info().Homepage, "http://geocities.com/cool-bp") + h.AssertEq(t, bp.Descriptor().Stacks()[0].ID, "some.stack.id") }) it("translates blob to distribution format", func() { - bp, err := buildpack.FromRootBlob( - &readerBlob{ - openFn: func() io.ReadCloser { - tarBuilder := archive.TarBuilder{} - tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(` + bp, err := buildpack.FromBuildpackRootBlob(&readerBlob{ + openFn: func() io.ReadCloser { + tarBuilder := archive.TarBuilder{} + tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(` api = "0.3" [buildpack] @@ -95,16 +97,16 @@ version = "1.2.3" id = "some.stack.id" `)) - tarBuilder.AddDir("bin", 0700, time.Now()) - tarBuilder.AddFile("bin/detect", 0700, time.Now(), []byte("detect-contents")) - tarBuilder.AddFile("bin/build", 0700, time.Now(), []byte("build-contents")) - return tarBuilder.Reader(archive.DefaultTarWriterFactory()) - }, + tarBuilder.AddDir("bin", 0700, time.Now()) + tarBuilder.AddFile("bin/detect", 0700, time.Now(), []byte("detect-contents")) + tarBuilder.AddFile("bin/build", 0700, time.Now(), []byte("build-contents")) + return tarBuilder.Reader(archive.DefaultTarWriterFactory()) }, - archive.DefaultTarWriterFactory(), - ) + }, archive.DefaultTarWriterFactory(), nil) h.AssertNil(t, err) + h.AssertNil(t, bp.Descriptor().EnsureTargetSupport(dist.DefaultTargetOSLinux, dist.DefaultTargetArch, "", "")) + tarPath := writeBlobToFile(bp) defer os.Remove(tarPath) @@ -144,6 +146,76 @@ id = "some.stack.id" ) }) + it("translates blob to windows bat distribution format", func() { + bp, err := buildpack.FromBuildpackRootBlob(&readerBlob{ + openFn: func() io.ReadCloser { + tarBuilder := archive.TarBuilder{} + tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(` +api = "0.9" + +[buildpack] +id = "bp.one" +version = "1.2.3" +`)) + + tarBuilder.AddDir("bin", 0700, time.Now()) + tarBuilder.AddFile("bin/detect", 0700, time.Now(), []byte("detect-contents")) + tarBuilder.AddFile("bin/build.bat", 0700, time.Now(), []byte("build-contents")) + return tarBuilder.Reader(archive.DefaultTarWriterFactory()) + }, + }, archive.DefaultTarWriterFactory(), nil) + h.AssertNil(t, err) + + bpDescriptor := bp.Descriptor().(*dist.BuildpackDescriptor) + h.AssertTrue(t, bpDescriptor.WithWindowsBuild) + h.AssertFalse(t, bpDescriptor.WithLinuxBuild) + + tarPath := writeBlobToFile(bp) + defer os.Remove(tarPath) + + h.AssertOnTarEntry(t, tarPath, + "/cnb/buildpacks/bp.one/1.2.3/bin/build.bat", + h.HasFileMode(0755), + h.HasModTime(archive.NormalizedDateTime), + h.ContentEquals("build-contents"), + ) + }) + + it("translates blob to windows exe distribution format", func() { + bp, err := buildpack.FromBuildpackRootBlob(&readerBlob{ + openFn: func() io.ReadCloser { + tarBuilder := archive.TarBuilder{} + tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(` +api = "0.3" + +[buildpack] +id = "bp.one" +version = "1.2.3" +`)) + + tarBuilder.AddDir("bin", 0700, time.Now()) + tarBuilder.AddFile("bin/detect", 0700, time.Now(), []byte("detect-contents")) + tarBuilder.AddFile("bin/build.exe", 0700, time.Now(), []byte("build-contents")) + return tarBuilder.Reader(archive.DefaultTarWriterFactory()) + }, + }, archive.DefaultTarWriterFactory(), nil) + h.AssertNil(t, err) + + bpDescriptor := bp.Descriptor().(*dist.BuildpackDescriptor) + h.AssertTrue(t, bpDescriptor.WithWindowsBuild) + h.AssertFalse(t, bpDescriptor.WithLinuxBuild) + + tarPath := writeBlobToFile(bp) + defer os.Remove(tarPath) + + h.AssertOnTarEntry(t, tarPath, + "/cnb/buildpacks/bp.one/1.2.3/bin/build.exe", + h.HasFileMode(0755), + h.HasModTime(archive.NormalizedDateTime), + h.ContentEquals("build-contents"), + ) + }) + it("surfaces errors encountered while reading blob", func() { realBlob := &readerBlob{ openFn: func() io.ReadCloser { @@ -162,19 +234,17 @@ id = "some.stack.id" }, } - bp, err := buildpack.FromRootBlob( - &errorBlob{ - realBlob: realBlob, - }, - archive.DefaultTarWriterFactory(), - ) + bp, err := buildpack.FromBuildpackRootBlob(&errorBlob{ + realBlob: realBlob, + limit: 4, + }, archive.DefaultTarWriterFactory(), nil) h.AssertNil(t, err) bpReader, err := bp.Open() h.AssertNil(t, err) - _, err = io.Copy(ioutil.Discard, bpReader) - h.AssertError(t, err, "error from errBlob") + _, err = io.Copy(io.Discard, bpReader) + h.AssertError(t, err, "error from errBlob (reached limit of 4)") }) when("calculating permissions", func() { @@ -191,17 +261,14 @@ id = "some.stack.id" when("no exec bits set", func() { it("sets to 0755 if directory", func() { - bp, err := buildpack.FromRootBlob( - &readerBlob{ - openFn: func() io.ReadCloser { - tarBuilder := archive.TarBuilder{} - tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(bpTOMLData)) - tarBuilder.AddDir("some-dir", 0600, time.Now()) - return tarBuilder.Reader(archive.DefaultTarWriterFactory()) - }, + bp, err := buildpack.FromBuildpackRootBlob(&readerBlob{ + openFn: func() io.ReadCloser { + tarBuilder := archive.TarBuilder{} + tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(bpTOMLData)) + tarBuilder.AddDir("some-dir", 0600, time.Now()) + return tarBuilder.Reader(archive.DefaultTarWriterFactory()) }, - archive.DefaultTarWriterFactory(), - ) + }, archive.DefaultTarWriterFactory(), nil) h.AssertNil(t, err) tarPath := writeBlobToFile(bp) @@ -216,20 +283,21 @@ id = "some.stack.id" when("no exec bits set", func() { it("sets to 0755 if 'bin/detect' or 'bin/build'", func() { - bp, err := buildpack.FromRootBlob( - &readerBlob{ - openFn: func() io.ReadCloser { - tarBuilder := archive.TarBuilder{} - tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(bpTOMLData)) - tarBuilder.AddFile("bin/detect", 0600, time.Now(), []byte("detect-contents")) - tarBuilder.AddFile("bin/build", 0600, time.Now(), []byte("build-contents")) - return tarBuilder.Reader(archive.DefaultTarWriterFactory()) - }, + bp, err := buildpack.FromBuildpackRootBlob(&readerBlob{ + openFn: func() io.ReadCloser { + tarBuilder := archive.TarBuilder{} + tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(bpTOMLData)) + tarBuilder.AddFile("bin/detect", 0600, time.Now(), []byte("detect-contents")) + tarBuilder.AddFile("bin/build", 0600, time.Now(), []byte("build-contents")) + return tarBuilder.Reader(archive.DefaultTarWriterFactory()) }, - archive.DefaultTarWriterFactory(), - ) + }, archive.DefaultTarWriterFactory(), nil) h.AssertNil(t, err) + bpDescriptor := bp.Descriptor().(*dist.BuildpackDescriptor) + h.AssertFalse(t, bpDescriptor.WithWindowsBuild) + h.AssertTrue(t, bpDescriptor.WithLinuxBuild) + tarPath := writeBlobToFile(bp) defer os.Remove(tarPath) @@ -247,17 +315,14 @@ id = "some.stack.id" when("not directory, 'bin/detect', or 'bin/build'", func() { it("sets to 0755 if ANY exec bit is set", func() { - bp, err := buildpack.FromRootBlob( - &readerBlob{ - openFn: func() io.ReadCloser { - tarBuilder := archive.TarBuilder{} - tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(bpTOMLData)) - tarBuilder.AddFile("some-file", 0700, time.Now(), []byte("some-data")) - return tarBuilder.Reader(archive.DefaultTarWriterFactory()) - }, + bp, err := buildpack.FromBuildpackRootBlob(&readerBlob{ + openFn: func() io.ReadCloser { + tarBuilder := archive.TarBuilder{} + tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(bpTOMLData)) + tarBuilder.AddFile("some-file", 0700, time.Now(), []byte("some-data")) + return tarBuilder.Reader(archive.DefaultTarWriterFactory()) }, - archive.DefaultTarWriterFactory(), - ) + }, archive.DefaultTarWriterFactory(), nil) h.AssertNil(t, err) tarPath := writeBlobToFile(bp) @@ -272,17 +337,14 @@ id = "some.stack.id" when("not directory, 'bin/detect', or 'bin/build'", func() { it("sets to 0644 if NO exec bits set", func() { - bp, err := buildpack.FromRootBlob( - &readerBlob{ - openFn: func() io.ReadCloser { - tarBuilder := archive.TarBuilder{} - tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(bpTOMLData)) - tarBuilder.AddFile("some-file", 0600, time.Now(), []byte("some-data")) - return tarBuilder.Reader(archive.DefaultTarWriterFactory()) - }, + bp, err := buildpack.FromBuildpackRootBlob(&readerBlob{ + openFn: func() io.ReadCloser { + tarBuilder := archive.TarBuilder{} + tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(bpTOMLData)) + tarBuilder.AddFile("some-file", 0600, time.Now(), []byte("some-data")) + return tarBuilder.Reader(archive.DefaultTarWriterFactory()) }, - archive.DefaultTarWriterFactory(), - ) + }, archive.DefaultTarWriterFactory(), nil) h.AssertNil(t, err) tarPath := writeBlobToFile(bp) @@ -298,93 +360,80 @@ id = "some.stack.id" when("there is no descriptor file", func() { it("returns error", func() { - _, err := buildpack.FromRootBlob( - &readerBlob{ - openFn: func() io.ReadCloser { - tarBuilder := archive.TarBuilder{} - return tarBuilder.Reader(archive.DefaultTarWriterFactory()) - }, + _, err := buildpack.FromBuildpackRootBlob(&readerBlob{ + openFn: func() io.ReadCloser { + tarBuilder := archive.TarBuilder{} + return tarBuilder.Reader(archive.DefaultTarWriterFactory()) }, - archive.DefaultTarWriterFactory(), - ) + }, archive.DefaultTarWriterFactory(), nil) h.AssertError(t, err, "could not find entry path 'buildpack.toml'") }) }) when("there is no api field", func() { it("assumes an api version", func() { - bp, err := buildpack.FromRootBlob( - &readerBlob{ - openFn: func() io.ReadCloser { - tarBuilder := archive.TarBuilder{} - tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(` + bp, err := buildpack.FromBuildpackRootBlob(&readerBlob{ + openFn: func() io.ReadCloser { + tarBuilder := archive.TarBuilder{} + tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(` [buildpack] id = "bp.one" version = "1.2.3" [[stacks]] id = "some.stack.id"`)) - return tarBuilder.Reader(archive.DefaultTarWriterFactory()) - }, + return tarBuilder.Reader(archive.DefaultTarWriterFactory()) }, - archive.DefaultTarWriterFactory(), - ) + }, archive.DefaultTarWriterFactory(), nil) h.AssertNil(t, err) - h.AssertEq(t, bp.Descriptor().API.String(), "0.1") + h.AssertEq(t, bp.Descriptor().API().String(), "0.1") }) }) when("there is no id", func() { it("returns error", func() { - _, err := buildpack.FromRootBlob( - &readerBlob{ - openFn: func() io.ReadCloser { - tarBuilder := archive.TarBuilder{} - tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(` + _, err := buildpack.FromBuildpackRootBlob(&readerBlob{ + openFn: func() io.ReadCloser { + tarBuilder := archive.TarBuilder{} + tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(` [buildpack] id = "" version = "1.2.3" [[stacks]] id = "some.stack.id"`)) - return tarBuilder.Reader(archive.DefaultTarWriterFactory()) - }, + return tarBuilder.Reader(archive.DefaultTarWriterFactory()) }, - archive.DefaultTarWriterFactory(), - ) + }, archive.DefaultTarWriterFactory(), nil) h.AssertError(t, err, "'buildpack.id' is required") }) }) when("there is no version", func() { it("returns error", func() { - _, err := buildpack.FromRootBlob( - &readerBlob{ - openFn: func() io.ReadCloser { - tarBuilder := archive.TarBuilder{} - tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(` + _, err := buildpack.FromBuildpackRootBlob(&readerBlob{ + openFn: func() io.ReadCloser { + tarBuilder := archive.TarBuilder{} + tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(` [buildpack] id = "bp.one" version = "" [[stacks]] id = "some.stack.id"`)) - return tarBuilder.Reader(archive.DefaultTarWriterFactory()) - }, + return tarBuilder.Reader(archive.DefaultTarWriterFactory()) }, - archive.DefaultTarWriterFactory(), - ) + }, archive.DefaultTarWriterFactory(), nil) h.AssertError(t, err, "'buildpack.version' is required") }) }) when("both stacks and order are present", func() { it("returns error", func() { - _, err := buildpack.FromRootBlob( - &readerBlob{ - openFn: func() io.ReadCloser { - tarBuilder := archive.TarBuilder{} - tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(` + _, err := buildpack.FromBuildpackRootBlob(&readerBlob{ + openFn: func() io.ReadCloser { + tarBuilder := archive.TarBuilder{} + tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(` [buildpack] id = "bp.one" version = "1.2.3" @@ -397,39 +446,102 @@ id = "some.stack.id" id = "bp.nested" version = "bp.nested.version" `)) - return tarBuilder.Reader(archive.DefaultTarWriterFactory()) - }, + return tarBuilder.Reader(archive.DefaultTarWriterFactory()) }, - archive.DefaultTarWriterFactory(), - ) - h.AssertError(t, err, "cannot have both 'stacks' and an 'order' defined") + }, archive.DefaultTarWriterFactory(), nil) + h.AssertError(t, err, "cannot have both 'targets'/'stacks' and an 'order' defined") }) }) when("missing stacks and order", func() { - it("returns error", func() { - _, err := buildpack.FromRootBlob( - &readerBlob{ - openFn: func() io.ReadCloser { - tarBuilder := archive.TarBuilder{} - tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(` + it("does not return an error", func() { + _, err := buildpack.FromBuildpackRootBlob(&readerBlob{ + openFn: func() io.ReadCloser { + tarBuilder := archive.TarBuilder{} + tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(` [buildpack] id = "bp.one" version = "1.2.3" `)) - return tarBuilder.Reader(archive.DefaultTarWriterFactory()) - }, + return tarBuilder.Reader(archive.DefaultTarWriterFactory()) }, - archive.DefaultTarWriterFactory(), + }, archive.DefaultTarWriterFactory(), nil) + h.AssertNil(t, err) + }) + }) + + when("hardlink is present", func() { + var bpRootFolder string + + it.Before(func() { + bpRootFolder = filepath.Join("testdata", "buildpack-with-hardlink") + // create a hard link + err := os.Link(filepath.Join(bpRootFolder, "original-file"), filepath.Join(bpRootFolder, "original-file-2")) + h.AssertNil(t, err) + }) + + it.After(func() { + os.RemoveAll(filepath.Join(bpRootFolder, "original-file-2")) + }) + + it("hardlink is preserved in the output tar file", func() { + bp, err := buildpack.FromBuildpackRootBlob(blob.NewBlob(bpRootFolder), archive.DefaultTarWriterFactory(), nil) + h.AssertNil(t, err) + + tarPath := writeBlobToFile(bp) + defer os.Remove(tarPath) + + h.AssertOnTarEntries(t, tarPath, + "/cnb/buildpacks/bp.one/1.2.3/original-file", + "/cnb/buildpacks/bp.one/1.2.3/original-file-2", + h.AreEquivalentHardLinks(), ) - h.AssertError(t, err, "must have either 'stacks' or an 'order' defined") + }) + }) + + when("there are wrong things in the file", func() { + it("warns", func() { + outBuf := bytes.Buffer{} + logger := logging.NewLogWithWriters(&outBuf, &outBuf) + _, err := buildpack.FromBuildpackRootBlob(&readerBlob{ + openFn: func() io.ReadCloser { + tarBuilder := archive.TarBuilder{} + tarBuilder.AddFile("buildpack.toml", 0700, time.Now(), []byte(` +api = "0.3" + +[buildpack] +id = "bp.one" +version = "1.2.3" +homepage = "http://geocities.com/cool-bp" +sbom-formats = ["this should not warn"] +clear-env = true + +[[targets]] +os = "some-os" +arch = "some-arch" +variant = "some-arch-variant" +[[targets.distributions]] +name = "some-distro-name" +version = "some-distro-version" +[[targets.distros]] +name = "some-distro-name" +versions = ["some-distro-version"] + +[metadata] +this-key = "is totally allowed and should not warn" +`)) + return tarBuilder.Reader(archive.DefaultTarWriterFactory()) + }, + }, archive.DefaultTarWriterFactory(), logger) + h.AssertNil(t, err) + h.AssertContains(t, outBuf.String(), "Warning: Ignoring unexpected key(s) in descriptor for buildpack bp.one: targets.distributions, targets.distributions.name, targets.distributions.version, targets.distros.versions") }) }) }) when("#Match", func() { it("compares, using only the id and version", func() { - other := dist.BuildpackInfo{ + other := dist.ModuleInfo{ ID: "same", Version: "1.2.3", Description: "something else", @@ -443,7 +555,7 @@ version = "1.2.3" }, } - self := dist.BuildpackInfo{ + self := dist.ModuleInfo{ ID: "same", Version: "1.2.3", } @@ -458,19 +570,375 @@ version = "1.2.3" h.AssertEq(t, match, false) }) }) + + when("#Set", func() { + it("creates a set", func() { + values := []string{"a", "b", "c", "a"} + set := buildpack.Set(values) + h.AssertEq(t, len(set), 3) + }) + }) + + when("#ToNLayerTar", func() { + var ( + tmpDir string + expectedBP []expectedBuildpack + err error + ) + + it.Before(func() { + tmpDir, err = os.MkdirTemp("", "") + h.AssertNil(t, err) + }) + + it.After(func() { + err := os.RemoveAll(tmpDir) + if runtime.GOOS != "windows" { + // avoid "The process cannot access the file because it is being used by another process" + // error on Windows + h.AssertNil(t, err) + } + }) + + when("BuildModule contains only an individual buildpack (default)", func() { + it.Before(func() { + expectedBP = []expectedBuildpack{ + { + id: "buildpack-1-id", + version: "buildpack-1-version-1", + }, + } + }) + + it("returns 1 tar files", func() { + bp := buildpack.FromBlob( + &dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ + ID: "buildpack-1-id", + Version: "buildpack-1-version-1", + Name: "buildpack-1", + }, + }, + &readerBlob{ + openFn: func() io.ReadCloser { + tarBuilder := archive.TarBuilder{} + + // Buildpack 1 + tarBuilder.AddDir("/cnb/buildpacks/buildpack-1-id", 0700, time.Now()) + tarBuilder.AddDir("/cnb/buildpacks/buildpack-1-id/buildpack-1-version-1", 0700, time.Now()) + tarBuilder.AddFile("/cnb/buildpacks/buildpack-1-id/buildpack-1-version-1/buildpack.toml", 0700, time.Now(), []byte(` +api = "0.3" + +[buildpack] +id = "buildpack-1-id" +version = "buildpack-1-version-1" + +`)) + tarBuilder.AddDir("/cnb/buildpacks/buildpack-1-id/buildpack-1-version-1/bin", 0700, time.Now()) + tarBuilder.AddFile("/cnb/buildpacks/buildpack-1-id/buildpack-1-version-1/bin/detect", 0700, time.Now(), []byte("detect-contents")) + tarBuilder.AddFile("/cnb/buildpacks/buildpack-1-id/buildpack-1-version-1/bin/build", 0700, time.Now(), []byte("build-contents")) + + return tarBuilder.Reader(archive.DefaultTarWriterFactory()) + }, + }, + ) + + tarPaths, err := buildpack.ToNLayerTar(tmpDir, bp) + h.AssertNil(t, err) + h.AssertEq(t, len(tarPaths), 1) + assertBuildpacksToTar(t, tarPaths, expectedBP) + }) + }) + + when("BuildModule contains N flattened buildpacks", func() { + it.Before(func() { + expectedBP = []expectedBuildpack{ + { + id: "buildpack-1-id", + version: "buildpack-1-version-1", + }, + { + id: "buildpack-2-id", + version: "buildpack-2-version-1", + }, + } + }) + when("not running on windows", func() { + it("returns N tar files", func() { + h.SkipIf(t, runtime.GOOS == "windows", "") + bp := buildpack.FromBlob( + &dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ + ID: "buildpack-1-id", + Version: "buildpack-1-version-1", + Name: "buildpack-1", + }, + }, + &readerBlob{ + openFn: func() io.ReadCloser { + tarBuilder := archive.TarBuilder{} + + // Buildpack 1 + tarBuilder.AddDir("/cnb/buildpacks/buildpack-1-id", 0700, time.Now()) + tarBuilder.AddDir("/cnb/buildpacks/buildpack-1-id/buildpack-1-version-1", 0700, time.Now()) + tarBuilder.AddFile("/cnb/buildpacks/buildpack-1-id/buildpack-1-version-1/buildpack.toml", 0700, time.Now(), []byte(` +api = "0.3" + +[buildpack] +id = "buildpack-1-id" +version = "buildpack-1-version-1" + +`)) + tarBuilder.AddDir("/cnb/buildpacks/buildpack-1-id/buildpack-1-version-1/bin", 0700, time.Now()) + tarBuilder.AddFile("/cnb/buildpacks/buildpack-1-id/buildpack-1-version-1/bin/detect", 0700, time.Now(), []byte("detect-contents")) + tarBuilder.AddFile("/cnb/buildpacks/buildpack-1-id/buildpack-1-version-1/bin/build", 0700, time.Now(), []byte("build-contents")) + + // Buildpack 2 + tarBuilder.AddDir("/cnb/buildpacks/buildpack-2-id", 0700, time.Now()) + tarBuilder.AddDir("/cnb/buildpacks/buildpack-2-id/buildpack-2-version-1", 0700, time.Now()) + tarBuilder.AddFile("/cnb/buildpacks/buildpack-2-id/buildpack-2-version-1/buildpack.toml", 0700, time.Now(), []byte(` +api = "0.3" + +[buildpack] +id = "buildpack-2-id" +version = "buildpack-2-version-1" + +`)) + tarBuilder.AddDir("/cnb/buildpacks/buildpack-2-id/buildpack-2-version-1/bin", 0700, time.Now()) + tarBuilder.AddFile("/cnb/buildpacks/buildpack-2-id/buildpack-2-version-1/bin/detect", 0700, time.Now(), []byte("detect-contents")) + tarBuilder.AddFile("/cnb/buildpacks/buildpack-2-id/buildpack-2-version-1/bin/build", 0700, time.Now(), []byte("build-contents")) + + return tarBuilder.Reader(archive.DefaultTarWriterFactory()) + }, + }, + ) + + tarPaths, err := buildpack.ToNLayerTar(tmpDir, bp) + h.AssertNil(t, err) + h.AssertEq(t, len(tarPaths), 2) + assertBuildpacksToTar(t, tarPaths, expectedBP) + }) + }) + + when("running on windows", func() { + it("returns N tar files", func() { + h.SkipIf(t, runtime.GOOS != "windows", "") + bp := buildpack.FromBlob( + &dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ + ID: "buildpack-1-id", + Version: "buildpack-1-version-1", + Name: "buildpack-1", + }, + }, + &readerBlob{ + openFn: func() io.ReadCloser { + tarBuilder := archive.TarBuilder{} + // Windows tar format + tarBuilder.AddDir("Files", 0700, time.Now()) + tarBuilder.AddDir("Hives", 0700, time.Now()) + tarBuilder.AddDir("Files/cnb", 0700, time.Now()) + tarBuilder.AddDir("Files/cnb/builpacks", 0700, time.Now()) + + // Buildpack 1 + tarBuilder.AddDir("Files/cnb/buildpacks/buildpack-1-id", 0700, time.Now()) + tarBuilder.AddDir("Files/cnb/buildpacks/buildpack-1-id/buildpack-1-version-1", 0700, time.Now()) + tarBuilder.AddFile("Files/cnb/buildpacks/buildpack-1-id/buildpack-1-version-1/buildpack.toml", 0700, time.Now(), []byte(` +api = "0.3" + +[buildpack] +id = "buildpack-1-id" +version = "buildpack-1-version-1" + +`)) + tarBuilder.AddDir("Files/cnb/buildpacks/buildpack-1-id/buildpack-1-version-1/bin", 0700, time.Now()) + tarBuilder.AddFile("Files/cnb/buildpacks/buildpack-1-id/buildpack-1-version-1/bin/detect.bat", 0700, time.Now(), []byte("detect-contents")) + tarBuilder.AddFile("Files/cnb/buildpacks/buildpack-1-id/buildpack-1-version-1/bin/build.bat", 0700, time.Now(), []byte("build-contents")) + + // Buildpack 2 + tarBuilder.AddDir("Files/cnb/buildpacks/buildpack-2-id", 0700, time.Now()) + tarBuilder.AddDir("Files/cnb/buildpacks/buildpack-2-id/buildpack-2-version-1", 0700, time.Now()) + tarBuilder.AddFile("Files/cnb/buildpacks/buildpack-2-id/buildpack-2-version-1/buildpack.toml", 0700, time.Now(), []byte(` +api = "0.3" + +[buildpack] +id = "buildpack-2-id" +version = "buildpack-2-version-1" + +`)) + tarBuilder.AddDir("Files/cnb/buildpacks/buildpack-2-id/buildpack-2-version-1/bin", 0700, time.Now()) + tarBuilder.AddFile("Files/cnb/buildpacks/buildpack-2-id/buildpack-2-version-1/bin/detect.bat", 0700, time.Now(), []byte("detect-contents")) + tarBuilder.AddFile("Files/cnb/buildpacks/buildpack-2-id/buildpack-2-version-1/bin/build.bat", 0700, time.Now(), []byte("build-contents")) + + return tarBuilder.Reader(archive.DefaultTarWriterFactory()) + }, + }, + ) + + tarPaths, err := buildpack.ToNLayerTar(tmpDir, bp) + h.AssertNil(t, err) + h.AssertEq(t, len(tarPaths), 2) + assertWindowsBuildpacksToTar(t, tarPaths, expectedBP) + }) + }) + }) + + when("BuildModule contains buildpacks with same ID but different versions", func() { + it.Before(func() { + expectedBP = []expectedBuildpack{ + { + id: "buildpack-1-id", + version: "buildpack-1-version-1", + }, + { + id: "buildpack-1-id", + version: "buildpack-1-version-2", + }, + } + }) + + it("returns N tar files one per each version", func() { + bp := buildpack.FromBlob( + &dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ + ID: "buildpack-1-id", + Version: "buildpack-1-version-1", + Name: "buildpack-1", + }, + }, + &readerBlob{ + openFn: func() io.ReadCloser { + tarBuilder := archive.TarBuilder{} + + // Buildpack 1 + tarBuilder.AddDir("/cnb/buildpacks/buildpack-1-id", 0700, time.Now()) + tarBuilder.AddDir("/cnb/buildpacks/buildpack-1-id/buildpack-1-version-1", 0700, time.Now()) + tarBuilder.AddFile("/cnb/buildpacks/buildpack-1-id/buildpack-1-version-1/buildpack.toml", 0700, time.Now(), []byte(` +api = "0.3" + +[buildpack] +id = "buildpack-1-id" +version = "buildpack-1-version-1" + +`)) + tarBuilder.AddDir("/cnb/buildpacks/buildpack-1-id/buildpack-1-version-1/bin", 0700, time.Now()) + tarBuilder.AddFile("/cnb/buildpacks/buildpack-1-id/buildpack-1-version-1/bin/detect", 0700, time.Now(), []byte("detect-contents")) + tarBuilder.AddFile("/cnb/buildpacks/buildpack-1-id/buildpack-1-version-1/bin/build", 0700, time.Now(), []byte("build-contents")) + + // Buildpack 2 same as before but with different version + tarBuilder.AddDir("/cnb/buildpacks/buildpack-1-id/buildpack-1-version-2", 0700, time.Now()) + tarBuilder.AddFile("/cnb/buildpacks/buildpack-1-id/buildpack-1-version-2/buildpack.toml", 0700, time.Now(), []byte(` +api = "0.3" + +[buildpack] +id = "buildpack-2-id" +version = "buildpack-2-version-1" + +`)) + tarBuilder.AddDir("/cnb/buildpacks/buildpack-1-id/buildpack-1-version-2/bin", 0700, time.Now()) + tarBuilder.AddFile("/cnb/buildpacks/buildpack-1-id/buildpack-1-version-2/bin/detect", 0700, time.Now(), []byte("detect-contents")) + tarBuilder.AddFile("/cnb/buildpacks/buildpack-1-id/buildpack-1-version-2/bin/build", 0700, time.Now(), []byte("build-contents")) + + return tarBuilder.Reader(archive.DefaultTarWriterFactory()) + }, + }, + ) + + tarPaths, err := buildpack.ToNLayerTar(tmpDir, bp) + h.AssertNil(t, err) + h.AssertEq(t, len(tarPaths), 2) + assertBuildpacksToTar(t, tarPaths, expectedBP) + }) + }) + + when("BuildModule could not be read", func() { + it("surfaces errors encountered while reading blob", func() { + _, err = buildpack.ToNLayerTar(tmpDir, &errorBuildModule{}) + h.AssertError(t, err, "opening blob") + }) + }) + + when("BuildModule is empty", func() { + it("returns a path to an empty tarball", func() { + bp := buildpack.FromBlob( + &dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ + ID: "buildpack-1-id", + Version: "buildpack-1-version-1", + Name: "buildpack-1", + }, + }, + &readerBlob{ + openFn: func() io.ReadCloser { + return io.NopCloser(strings.NewReader("")) + }, + }, + ) + + tarPaths, err := buildpack.ToNLayerTar(tmpDir, bp) + h.AssertNil(t, err) + h.AssertEq(t, len(tarPaths), 1) + h.AssertNotNil(t, tarPaths[0].Path()) + }) + }) + + when("BuildModule contains unexpected elements in the tarball file", func() { + it.Before(func() { + expectedBP = []expectedBuildpack{ + { + id: "buildpack-1-id", + version: "buildpack-1-version-1", + }, + } + }) + + it("throws an error", func() { + bp := buildpack.FromBlob( + &dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ + ID: "buildpack-1-id", + Version: "buildpack-1-version-1", + Name: "buildpack-1", + }, + }, + &readerBlob{ + openFn: func() io.ReadCloser { + tarBuilder := archive.TarBuilder{} + + // Buildpack 1 + tarBuilder.AddDir("/cnb/buildpacks/buildpack-1-id", 0700, time.Now()) + tarBuilder.AddDir("/cnb/buildpacks/buildpack-1-id/buildpack-1-version-1", 0700, time.Now()) + tarBuilder.AddFile("/cnb/buildpacks/buildpack-1-id/buildpack-1-version-1/../hack", 0700, time.Now(), []byte("harmful content")) + return tarBuilder.Reader(archive.DefaultTarWriterFactory()) + }, + }, + ) + + _, err = buildpack.ToNLayerTar(tmpDir, bp) + h.AssertError(t, err, "contains unexpected special elements") + }) + }) + }) } type errorBlob struct { - notFirst bool + count int + limit int realBlob buildpack.Blob } func (e *errorBlob) Open() (io.ReadCloser, error) { - if !e.notFirst { - e.notFirst = true + if e.count < e.limit { + e.count += 1 return e.realBlob.Open() } - return nil, errors.New("error from errBlob") + return nil, fmt.Errorf("error from errBlob (reached limit of %d)", e.limit) } type readerBlob struct { @@ -480,3 +948,83 @@ type readerBlob struct { func (r *readerBlob) Open() (io.ReadCloser, error) { return r.openFn(), nil } + +type errorBuildModule struct { +} + +func (eb *errorBuildModule) Open() (io.ReadCloser, error) { + return nil, errors.New("something happened opening the build module") +} + +func (eb *errorBuildModule) Descriptor() buildpack.Descriptor { + return nil +} + +type expectedBuildpack struct { + id string + version string +} + +func assertBuildpacksToTar(t *testing.T, actual []buildpack.ModuleTar, expected []expectedBuildpack) { + t.Helper() + for _, expectedBP := range expected { + found := false + for _, moduleTar := range actual { + if expectedBP.id == moduleTar.Info().ID && expectedBP.version == moduleTar.Info().Version { + found = true + h.AssertOnTarEntry(t, moduleTar.Path(), fmt.Sprintf("/cnb/buildpacks/%s", expectedBP.id), + h.IsDirectory(), + ) + h.AssertOnTarEntry(t, moduleTar.Path(), fmt.Sprintf("/cnb/buildpacks/%s/%s", expectedBP.id, expectedBP.version), + h.IsDirectory(), + ) + h.AssertOnTarEntry(t, moduleTar.Path(), fmt.Sprintf("/cnb/buildpacks/%s/%s/bin", expectedBP.id, expectedBP.version), + h.IsDirectory(), + ) + h.AssertOnTarEntry(t, moduleTar.Path(), fmt.Sprintf("/cnb/buildpacks/%s/%s/bin/build", expectedBP.id, expectedBP.version), + h.HasFileMode(0700), + ) + h.AssertOnTarEntry(t, moduleTar.Path(), fmt.Sprintf("/cnb/buildpacks/%s/%s/bin/detect", expectedBP.id, expectedBP.version), + h.HasFileMode(0700), + ) + h.AssertOnTarEntry(t, moduleTar.Path(), fmt.Sprintf("/cnb/buildpacks/%s/%s/buildpack.toml", expectedBP.id, expectedBP.version), + h.HasFileMode(0700), + ) + break + } + } + h.AssertTrue(t, found) + } +} + +func assertWindowsBuildpacksToTar(t *testing.T, actual []buildpack.ModuleTar, expected []expectedBuildpack) { + t.Helper() + for _, expectedBP := range expected { + found := false + for _, moduleTar := range actual { + if expectedBP.id == moduleTar.Info().ID && expectedBP.version == moduleTar.Info().Version { + found = true + h.AssertOnTarEntry(t, moduleTar.Path(), fmt.Sprintf("Files/cnb/buildpacks/%s", expectedBP.id), + h.IsDirectory(), + ) + h.AssertOnTarEntry(t, moduleTar.Path(), fmt.Sprintf("Files/cnb/buildpacks/%s/%s", expectedBP.id, expectedBP.version), + h.IsDirectory(), + ) + h.AssertOnTarEntry(t, moduleTar.Path(), fmt.Sprintf("Files/cnb/buildpacks/%s/%s/bin", expectedBP.id, expectedBP.version), + h.IsDirectory(), + ) + h.AssertOnTarEntry(t, moduleTar.Path(), fmt.Sprintf("Files/cnb/buildpacks/%s/%s/bin/build.bat", expectedBP.id, expectedBP.version), + h.HasFileMode(0700), + ) + h.AssertOnTarEntry(t, moduleTar.Path(), fmt.Sprintf("Files/cnb/buildpacks/%s/%s/bin/detect.bat", expectedBP.id, expectedBP.version), + h.HasFileMode(0700), + ) + h.AssertOnTarEntry(t, moduleTar.Path(), fmt.Sprintf("Files/cnb/buildpacks/%s/%s/buildpack.toml", expectedBP.id, expectedBP.version), + h.HasFileMode(0700), + ) + break + } + } + h.AssertTrue(t, found) + } +} diff --git a/pkg/buildpack/buildpackage.go b/pkg/buildpack/buildpackage.go index df40107355..3c7547d0dc 100644 --- a/pkg/buildpack/buildpackage.go +++ b/pkg/buildpack/buildpackage.go @@ -8,6 +8,6 @@ import ( const MetadataLabel = "io.buildpacks.buildpackage.metadata" type Metadata struct { - dist.BuildpackInfo + dist.ModuleInfo Stacks []dist.Stack `toml:"stacks" json:"stacks"` } diff --git a/pkg/buildpack/downloader.go b/pkg/buildpack/downloader.go index 82f41a8eba..a127effb9e 100644 --- a/pkg/buildpack/downloader.go +++ b/pkg/buildpack/downloader.go @@ -29,6 +29,7 @@ type Logger interface { type ImageFetcher interface { Fetch(ctx context.Context, name string, options image.FetchOptions) (imgutil.Image, error) + CheckReadAccess(repo string, options image.FetchOptions) bool } type Downloader interface { @@ -48,7 +49,7 @@ type buildpackDownloader struct { registryResolver RegistryResolver } -func NewDownloader(logger Logger, imageFetcher ImageFetcher, downloader Downloader, registryResolver RegistryResolver) *buildpackDownloader { //nolint:golint,gosimple +func NewDownloader(logger Logger, imageFetcher ImageFetcher, downloader Downloader, registryResolver RegistryResolver) *buildpackDownloader { //nolint:revive,gosimple return &buildpackDownloader{ logger: logger, imageFetcher: imageFetcher, @@ -64,112 +65,160 @@ type DownloadOptions struct { // The base directory to use to resolve relative assets RelativeBaseDir string - // The OS of the builder image + // Deprecated: the older alternative to specify the OS to download; use Target instead ImageOS string // Deprecated: the older alternative to buildpack URI ImageName string + // The kind of module to download (valid values: "buildpack", "extension"). Defaults to "buildpack". + ModuleKind string + Daemon bool PullPolicy image.PullPolicy + + // The OS/Architecture/Variant to download. + Target *dist.Target } -func (c *buildpackDownloader) Download(ctx context.Context, buildpackURI string, opts DownloadOptions) (Buildpack, []Buildpack, error) { +func (c *buildpackDownloader) Download(ctx context.Context, moduleURI string, opts DownloadOptions) (BuildModule, []BuildModule, error) { + kind := KindBuildpack + if opts.ModuleKind == KindExtension { + kind = KindExtension + } + var err error var locatorType LocatorType - if buildpackURI == "" && opts.ImageName != "" { + if moduleURI == "" && opts.ImageName != "" { c.logger.Warn("The 'image' key is deprecated. Use 'uri=\"docker://...\"' instead.") - buildpackURI = opts.ImageName + moduleURI = opts.ImageName locatorType = PackageLocator } else { - locatorType, err = GetLocatorType(buildpackURI, opts.RelativeBaseDir, []dist.BuildpackInfo{}) + locatorType, err = GetLocatorType(moduleURI, opts.RelativeBaseDir, []dist.ModuleInfo{}) if err != nil { return nil, nil, err } } - - var mainBP Buildpack - var depBPs []Buildpack + var mainBP BuildModule + var depBPs []BuildModule switch locatorType { case PackageLocator: - imageName := ParsePackageLocator(buildpackURI) - c.logger.Debugf("Downloading buildpack from image: %s", style.Symbol(imageName)) - mainBP, depBPs, err = extractPackagedBuildpacks(ctx, imageName, c.imageFetcher, image.FetchOptions{Daemon: opts.Daemon, PullPolicy: opts.PullPolicy}) + imageName := ParsePackageLocator(moduleURI) + c.logger.Debugf("Downloading %s from image: %s", kind, style.Symbol(imageName)) + mainBP, depBPs, err = extractPackaged(ctx, kind, imageName, c.imageFetcher, image.FetchOptions{ + Daemon: opts.Daemon, + PullPolicy: opts.PullPolicy, + Target: opts.Target, + }) if err != nil { - return nil, nil, errors.Wrapf(err, "extracting from registry %s", style.Symbol(buildpackURI)) + return nil, nil, errors.Wrapf(err, "extracting from registry %s", style.Symbol(moduleURI)) } case RegistryLocator: - c.logger.Debugf("Downloading buildpack from registry: %s", style.Symbol(buildpackURI)) - address, err := c.registryResolver.Resolve(opts.RegistryName, buildpackURI) + c.logger.Debugf("Downloading %s from registry: %s", kind, style.Symbol(moduleURI)) + address, err := c.registryResolver.Resolve(opts.RegistryName, moduleURI) if err != nil { - return nil, nil, errors.Wrapf(err, "locating in registry: %s", style.Symbol(buildpackURI)) + return nil, nil, errors.Wrapf(err, "locating in registry: %s", style.Symbol(moduleURI)) } - mainBP, depBPs, err = extractPackagedBuildpacks(ctx, address, c.imageFetcher, image.FetchOptions{Daemon: opts.Daemon, PullPolicy: opts.PullPolicy}) + mainBP, depBPs, err = extractPackaged(ctx, kind, address, c.imageFetcher, image.FetchOptions{ + Daemon: opts.Daemon, + PullPolicy: opts.PullPolicy, + Target: opts.Target, + }) if err != nil { - return nil, nil, errors.Wrapf(err, "extracting from registry %s", style.Symbol(buildpackURI)) + return nil, nil, errors.Wrapf(err, "extracting from registry %s", style.Symbol(moduleURI)) } case URILocator: - buildpackURI, err = paths.FilePathToURI(buildpackURI, opts.RelativeBaseDir) + moduleURI, err = paths.FilePathToURI(moduleURI, opts.RelativeBaseDir) if err != nil { - return nil, nil, errors.Wrapf(err, "making absolute: %s", style.Symbol(buildpackURI)) + return nil, nil, errors.Wrapf(err, "making absolute: %s", style.Symbol(moduleURI)) } - c.logger.Debugf("Downloading buildpack from URI: %s", style.Symbol(buildpackURI)) + c.logger.Debugf("Downloading %s from URI: %s", kind, style.Symbol(moduleURI)) - blob, err := c.downloader.Download(ctx, buildpackURI) + blob, err := c.downloader.Download(ctx, moduleURI) if err != nil { - return nil, nil, errors.Wrapf(err, "downloading buildpack from %s", style.Symbol(buildpackURI)) + return nil, nil, errors.Wrapf(err, "downloading %s from %s", kind, style.Symbol(moduleURI)) } - mainBP, depBPs, err = decomposeBuildpack(blob, opts.ImageOS) + imageOS := opts.ImageOS + if opts.Target != nil { + imageOS = opts.Target.OS + } + mainBP, depBPs, err = decomposeBlob(blob, kind, imageOS, c.logger) if err != nil { - return nil, nil, errors.Wrapf(err, "extracting from %s", style.Symbol(buildpackURI)) + return nil, nil, errors.Wrapf(err, "extracting from %s", style.Symbol(moduleURI)) } default: - return nil, nil, fmt.Errorf("error reading %s: invalid locator: %s", buildpackURI, locatorType) + return nil, nil, fmt.Errorf("error reading %s: invalid locator: %s", moduleURI, locatorType) } return mainBP, depBPs, nil } -// decomposeBuildpack decomposes a buildpack blob into the main builder (order buildpack) and it's dependencies buildpacks. -func decomposeBuildpack(blob blob.Blob, imageOS string) (mainBP Buildpack, depBPs []Buildpack, err error) { +// decomposeBlob decomposes a buildpack or extension blob into the main module (order buildpack or extension) and +// (for buildpack blobs) its dependent buildpacks. +func decomposeBlob(blob blob.Blob, kind string, imageOS string, logger Logger) (mainModule BuildModule, depModules []BuildModule, err error) { isOCILayout, err := IsOCILayoutBlob(blob) if err != nil { - return mainBP, depBPs, errors.Wrap(err, "inspecting buildpack blob") + return mainModule, depModules, errors.Wrapf(err, "inspecting %s blob", kind) } if isOCILayout { - mainBP, depBPs, err = BuildpacksFromOCILayoutBlob(blob) + mainModule, depModules, err = fromOCILayoutBlob(blob, kind) if err != nil { - return mainBP, depBPs, errors.Wrap(err, "extracting buildpacks") + return mainModule, depModules, errors.Wrapf(err, "extracting %ss", kind) } } else { layerWriterFactory, err := layer.NewWriterFactory(imageOS) if err != nil { - return mainBP, depBPs, errors.Wrapf(err, "get tar writer factory for OS %s", style.Symbol(imageOS)) + return mainModule, depModules, errors.Wrapf(err, "get tar writer factory for OS %s", style.Symbol(imageOS)) } - mainBP, err = FromRootBlob(blob, layerWriterFactory) + if kind == KindExtension { + mainModule, err = FromExtensionRootBlob(blob, layerWriterFactory, logger) + } else { + mainModule, err = FromBuildpackRootBlob(blob, layerWriterFactory, logger) + } if err != nil { - return mainBP, depBPs, errors.Wrap(err, "reading buildpack") + return mainModule, depModules, errors.Wrapf(err, "reading %s", kind) } } - return mainBP, depBPs, nil + return mainModule, depModules, nil +} + +func fromOCILayoutBlob(blob blob.Blob, kind string) (mainModule BuildModule, depModules []BuildModule, err error) { + switch kind { + case KindBuildpack: + mainModule, depModules, err = BuildpacksFromOCILayoutBlob(blob) + case KindExtension: + mainModule, err = ExtensionsFromOCILayoutBlob(blob) + default: + return nil, nil, fmt.Errorf("unknown module kind: %s", kind) + } + if err != nil { + return nil, nil, err + } + return mainModule, depModules, nil } -func extractPackagedBuildpacks(ctx context.Context, pkgImageRef string, fetcher ImageFetcher, fetchOptions image.FetchOptions) (mainBP Buildpack, depBPs []Buildpack, err error) { +func extractPackaged(ctx context.Context, kind string, pkgImageRef string, fetcher ImageFetcher, fetchOptions image.FetchOptions) (mainModule BuildModule, depModules []BuildModule, err error) { pkgImage, err := fetcher.Fetch(ctx, pkgImageRef, fetchOptions) if err != nil { return nil, nil, errors.Wrapf(err, "fetching image") } - mainBP, depBPs, err = ExtractBuildpacks(pkgImage) + switch kind { + case KindBuildpack: + mainModule, depModules, err = extractBuildpacks(pkgImage) + case KindExtension: + mainModule, err = extractExtensions(pkgImage) + default: + return nil, nil, fmt.Errorf("unknown module kind: %s", kind) + } if err != nil { - return nil, nil, errors.Wrapf(err, "extracting buildpacks from %s", style.Symbol(pkgImageRef)) + return nil, nil, errors.Wrapf(err, "extracting %ss from %s", kind, style.Symbol(pkgImageRef)) } - - return mainBP, depBPs, nil + return mainModule, depModules, nil } diff --git a/pkg/buildpack/downloader_test.go b/pkg/buildpack/downloader_test.go index 8672d00449..fb60d3e647 100644 --- a/pkg/buildpack/downloader_test.go +++ b/pkg/buildpack/downloader_test.go @@ -4,16 +4,16 @@ import ( "bytes" "context" "fmt" - "io/ioutil" "os" "path/filepath" "testing" "github.com/buildpacks/imgutil/fakes" "github.com/buildpacks/lifecycle/api" - "github.com/docker/docker/api/types" "github.com/golang/mock/gomock" "github.com/heroku/color" + mobysystem "github.com/moby/moby/api/types/system" + dockerclient "github.com/moby/moby/client" "github.com/pkg/errors" "github.com/sclevine/spec" "github.com/sclevine/spec/report" @@ -44,7 +44,7 @@ func testBuildpackDownloader(t *testing.T, when spec.G, it spec.S) { mockImageFactory *testmocks.MockImageFactory mockImageFetcher *testmocks.MockImageFetcher mockRegistryResolver *testmocks.MockRegistryResolver - mockDockerClient *testmocks.MockCommonAPIClient + mockDockerClient *testmocks.MockAPIClient buildpackDownloader client.BuildpackDownloader logger logging.Logger out bytes.Buffer @@ -52,7 +52,7 @@ func testBuildpackDownloader(t *testing.T, when spec.G, it spec.S) { ) var createBuildpack = func(descriptor dist.BuildpackDescriptor) string { - bp, err := ifakes.NewFakeBuildpackBlob(descriptor, 0644) + bp, err := ifakes.NewFakeBuildpackBlob(&descriptor, 0644) h.AssertNil(t, err) url := fmt.Sprintf("https://example.com/bp.%s.tgz", h.RandString(12)) mockDownloader.EXPECT().Download(gomock.Any(), url).Return(bp, nil).AnyTimes() @@ -61,7 +61,7 @@ func testBuildpackDownloader(t *testing.T, when spec.G, it spec.S) { var createPackage = func(imageName string) *fakes.Image { packageImage := fakes.NewImage(imageName, "", nil) - mockImageFactory.EXPECT().NewImage(packageImage.Name(), false, "linux").Return(packageImage, nil) + mockImageFactory.EXPECT().NewImage(packageImage.Name(), false, dist.Target{OS: "linux"}).Return(packageImage, nil) pack, err := client.NewClient( client.WithLogger(logger), @@ -77,9 +77,9 @@ func testBuildpackDownloader(t *testing.T, when spec.G, it spec.S) { Config: pubbldpkg.Config{ Platform: dist.Platform{OS: "linux"}, Buildpack: dist.BuildpackURI{URI: createBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.3"), - Info: dist.BuildpackInfo{ID: "example/foo", Version: "1.1.0"}, - Stacks: []dist.Stack{{ID: "some.stack.id"}}, + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ID: "example/foo", Version: "1.1.0"}, + WithStacks: []dist.Stack{{ID: "some.stack.id"}}, })}, }, Publish: true, @@ -95,13 +95,13 @@ func testBuildpackDownloader(t *testing.T, when spec.G, it spec.S) { mockRegistryResolver = testmocks.NewMockRegistryResolver(mockController) mockImageFetcher = testmocks.NewMockImageFetcher(mockController) mockImageFactory = testmocks.NewMockImageFactory(mockController) - mockDockerClient = testmocks.NewMockCommonAPIClient(mockController) + mockDockerClient = testmocks.NewMockAPIClient(mockController) mockDownloader.EXPECT().Download(gomock.Any(), "https://example.fake/bp-one.tgz").Return(blob.NewBlob(filepath.Join("testdata", "buildpack")), nil).AnyTimes() mockDownloader.EXPECT().Download(gomock.Any(), "some/buildpack/dir").Return(blob.NewBlob(filepath.Join("testdata", "buildpack")), nil).AnyTimes() buildpackDownloader = buildpack.NewDownloader(logger, mockImageFetcher, mockDownloader, mockRegistryResolver) - mockDockerClient.EXPECT().Info(context.TODO()).Return(types.Info{OSType: "linux"}, nil).AnyTimes() + mockDockerClient.EXPECT().Info(context.TODO(), gomock.Any()).Return(dockerclient.SystemInfoResult{Info: mobysystem.Info{OSType: "linux"}}, nil).AnyTimes() mockRegistryResolver.EXPECT(). Resolve("some-registry", "urn:cnb:registry:example/foo@1.1.0"). @@ -113,7 +113,7 @@ func testBuildpackDownloader(t *testing.T, when spec.G, it spec.S) { AnyTimes() var err error - tmpDir, err = ioutil.TempDir("", "buildpack-downloader-test") + tmpDir, err = os.MkdirTemp("", "buildpack-downloader-test") h.AssertNil(t, err) }) @@ -122,14 +122,20 @@ func testBuildpackDownloader(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, os.RemoveAll(tmpDir)) }) - when("#DownloadBuildpack", func() { + when("#Download", func() { var ( - packageImage *fakes.Image - buildpackDownloadOptions = buildpack.DownloadOptions{ImageOS: "linux"} + packageImage *fakes.Image + downloadOptions = buildpack.DownloadOptions{Target: &dist.Target{ + OS: "linux", + }} ) - shouldFetchPackageImageWith := func(demon bool, pull image.PullPolicy) { - mockImageFetcher.EXPECT().Fetch(gomock.Any(), packageImage.Name(), image.FetchOptions{Daemon: demon, PullPolicy: pull}).Return(packageImage, nil) + shouldFetchPackageImageWith := func(demon bool, pull image.PullPolicy, target *dist.Target) { + mockImageFetcher.EXPECT().Fetch(gomock.Any(), packageImage.Name(), image.FetchOptions{ + Daemon: demon, + PullPolicy: pull, + Target: target, + }).Return(packageImage, nil) } when("package image lives in cnb registry", func() { @@ -139,33 +145,33 @@ func testBuildpackDownloader(t *testing.T, when spec.G, it spec.S) { when("daemon=true and pull-policy=always", func() { it("should pull and use local package image", func() { - buildpackDownloadOptions = buildpack.DownloadOptions{ + downloadOptions = buildpack.DownloadOptions{ RegistryName: "some-registry", - ImageOS: "linux", + Target: &dist.Target{OS: "linux", Arch: "amd64"}, Daemon: true, PullPolicy: image.PullAlways, } - shouldFetchPackageImageWith(true, image.PullAlways) - mainBP, _, err := buildpackDownloader.Download(context.TODO(), "urn:cnb:registry:example/foo@1.1.0", buildpackDownloadOptions) + shouldFetchPackageImageWith(true, image.PullAlways, &dist.Target{OS: "linux", Arch: "amd64"}) + mainBP, _, err := buildpackDownloader.Download(context.TODO(), "urn:cnb:registry:example/foo@1.1.0", downloadOptions) h.AssertNil(t, err) - h.AssertEq(t, mainBP.Descriptor().Info.ID, "example/foo") + h.AssertEq(t, mainBP.Descriptor().Info().ID, "example/foo") }) }) when("ambigious URI provided", func() { it("should find package in registry", func() { - buildpackDownloadOptions = buildpack.DownloadOptions{ + downloadOptions = buildpack.DownloadOptions{ RegistryName: "some-registry", - ImageOS: "linux", + Target: &dist.Target{OS: "linux"}, Daemon: true, PullPolicy: image.PullAlways, } - shouldFetchPackageImageWith(true, image.PullAlways) - mainBP, _, err := buildpackDownloader.Download(context.TODO(), "example/foo@1.1.0", buildpackDownloadOptions) + shouldFetchPackageImageWith(true, image.PullAlways, &dist.Target{OS: "linux"}) + mainBP, _, err := buildpackDownloader.Download(context.TODO(), "example/foo@1.1.0", downloadOptions) h.AssertNil(t, err) - h.AssertEq(t, mainBP.Descriptor().Info.ID, "example/foo") + h.AssertEq(t, mainBP.Descriptor().Info().ID, "example/foo") }) }) }) @@ -182,141 +188,178 @@ func testBuildpackDownloader(t *testing.T, when spec.G, it spec.S) { when("image key is provided", func() { it("should succeed", func() { packageImage = createPackage("some/package:tag") - buildpackDownloadOptions = buildpack.DownloadOptions{ + downloadOptions = buildpack.DownloadOptions{ Daemon: true, PullPolicy: image.PullAlways, - ImageOS: "linux", + Target: &dist.Target{OS: "linux", Arch: "amd64"}, ImageName: "some/package:tag", } - shouldFetchPackageImageWith(true, image.PullAlways) - mainBP, _, err := buildpackDownloader.Download(context.TODO(), "", buildpackDownloadOptions) + shouldFetchPackageImageWith(true, image.PullAlways, &dist.Target{OS: "linux", Arch: "amd64"}) + mainBP, _, err := buildpackDownloader.Download(context.TODO(), "", downloadOptions) h.AssertNil(t, err) - h.AssertEq(t, mainBP.Descriptor().Info.ID, "example/foo") + h.AssertEq(t, mainBP.Descriptor().Info().ID, "example/foo") }) }) when("daemon=true and pull-policy=always", func() { it("should pull and use local package image", func() { - buildpackDownloadOptions = buildpack.DownloadOptions{ - ImageOS: "linux", + downloadOptions = buildpack.DownloadOptions{ + Target: &dist.Target{OS: "linux"}, ImageName: packageImage.Name(), Daemon: true, PullPolicy: image.PullAlways, } - shouldFetchPackageImageWith(true, image.PullAlways) - mainBP, _, err := buildpackDownloader.Download(context.TODO(), "", buildpackDownloadOptions) + shouldFetchPackageImageWith(true, image.PullAlways, &dist.Target{OS: "linux"}) + mainBP, _, err := buildpackDownloader.Download(context.TODO(), "", downloadOptions) h.AssertNil(t, err) - h.AssertEq(t, mainBP.Descriptor().Info.ID, "example/foo") + h.AssertEq(t, mainBP.Descriptor().Info().ID, "example/foo") }) }) when("daemon=false and pull-policy=always", func() { it("should use remote package image", func() { - buildpackDownloadOptions = buildpack.DownloadOptions{ - ImageOS: "linux", + downloadOptions = buildpack.DownloadOptions{ + Target: &dist.Target{OS: "linux"}, ImageName: packageImage.Name(), Daemon: false, PullPolicy: image.PullAlways, } - shouldFetchPackageImageWith(false, image.PullAlways) - mainBP, _, err := buildpackDownloader.Download(context.TODO(), "", buildpackDownloadOptions) + shouldFetchPackageImageWith(false, image.PullAlways, &dist.Target{OS: "linux"}) + mainBP, _, err := buildpackDownloader.Download(context.TODO(), "", downloadOptions) h.AssertNil(t, err) - h.AssertEq(t, mainBP.Descriptor().Info.ID, "example/foo") + h.AssertEq(t, mainBP.Descriptor().Info().ID, "example/foo") }) }) when("daemon=false and pull-policy=always", func() { it("should use remote package URI", func() { - buildpackDownloadOptions = buildpack.DownloadOptions{ - ImageOS: "linux", + downloadOptions = buildpack.DownloadOptions{ + Target: &dist.Target{OS: "linux"}, Daemon: false, PullPolicy: image.PullAlways, } - shouldFetchPackageImageWith(false, image.PullAlways) - mainBP, _, err := buildpackDownloader.Download(context.TODO(), packageImage.Name(), buildpackDownloadOptions) + shouldFetchPackageImageWith(false, image.PullAlways, &dist.Target{OS: "linux"}) + mainBP, _, err := buildpackDownloader.Download(context.TODO(), packageImage.Name(), downloadOptions) h.AssertNil(t, err) - h.AssertEq(t, mainBP.Descriptor().Info.ID, "example/foo") + h.AssertEq(t, mainBP.Descriptor().Info().ID, "example/foo") }) }) when("publish=true and pull-policy=never", func() { it("should push to registry and not pull package image", func() { - buildpackDownloadOptions = buildpack.DownloadOptions{ - ImageOS: "linux", + downloadOptions = buildpack.DownloadOptions{ + Target: &dist.Target{OS: "linux"}, ImageName: packageImage.Name(), Daemon: false, PullPolicy: image.PullNever, } - shouldFetchPackageImageWith(false, image.PullNever) - mainBP, _, err := buildpackDownloader.Download(context.TODO(), "", buildpackDownloadOptions) + shouldFetchPackageImageWith(false, image.PullNever, &dist.Target{OS: "linux"}) + mainBP, _, err := buildpackDownloader.Download(context.TODO(), "", downloadOptions) h.AssertNil(t, err) - h.AssertEq(t, mainBP.Descriptor().Info.ID, "example/foo") + h.AssertEq(t, mainBP.Descriptor().Info().ID, "example/foo") }) }) when("daemon=true pull-policy=never and there is no local package image", func() { it("should fail without trying to retrieve package image from registry", func() { - buildpackDownloadOptions = buildpack.DownloadOptions{ - ImageOS: "linux", + downloadOptions = buildpack.DownloadOptions{ + Target: &dist.Target{OS: "linux"}, ImageName: packageImage.Name(), Daemon: true, PullPolicy: image.PullNever, } prepareFetcherWithMissingPackageImage() - _, _, err := buildpackDownloader.Download(context.TODO(), "", buildpackDownloadOptions) + _, _, err := buildpackDownloader.Download(context.TODO(), "", downloadOptions) h.AssertError(t, err, "not found") }) }) }) + when("package lives on filesystem", func() { it("should successfully retrieve package from absolute path", func() { buildpackPath := filepath.Join("testdata", "buildpack") buildpackURI, _ := paths.FilePathToURI(buildpackPath, "") mockDownloader.EXPECT().Download(gomock.Any(), buildpackURI).Return(blob.NewBlob(buildpackPath), nil).AnyTimes() - mainBP, _, err := buildpackDownloader.Download(context.TODO(), buildpackURI, buildpackDownloadOptions) + mainBP, _, err := buildpackDownloader.Download(context.TODO(), buildpackURI, downloadOptions) h.AssertNil(t, err) - h.AssertEq(t, mainBP.Descriptor().Info.ID, "bp.one") + h.AssertEq(t, mainBP.Descriptor().Info().ID, "bp.one") }) + it("should successfully retrieve package from relative path", func() { buildpackPath := filepath.Join("testdata", "buildpack") buildpackURI, _ := paths.FilePathToURI(buildpackPath, "") mockDownloader.EXPECT().Download(gomock.Any(), buildpackURI).Return(blob.NewBlob(buildpackPath), nil).AnyTimes() - buildpackDownloadOptions = buildpack.DownloadOptions{ - ImageOS: "linux", + downloadOptions = buildpack.DownloadOptions{ + Target: &dist.Target{OS: "linux"}, RelativeBaseDir: "testdata", } - mainBP, _, err := buildpackDownloader.Download(context.TODO(), "buildpack", buildpackDownloadOptions) + mainBP, _, err := buildpackDownloader.Download(context.TODO(), "buildpack", downloadOptions) h.AssertNil(t, err) - h.AssertEq(t, mainBP.Descriptor().Info.ID, "bp.one") + h.AssertEq(t, mainBP.Descriptor().Info().ID, "bp.one") + }) + + when("kind == extension", func() { + it("succeeds", func() { + extensionPath := filepath.Join("testdata", "extension") + extensionURI, _ := paths.FilePathToURI(extensionPath, "") + mockDownloader.EXPECT().Download(gomock.Any(), extensionURI).Return(blob.NewBlob(extensionPath), nil).AnyTimes() + downloadOptions = buildpack.DownloadOptions{ + Target: &dist.Target{OS: "linux"}, + ModuleKind: "extension", + RelativeBaseDir: "testdata", + } + mainExt, _, err := buildpackDownloader.Download(context.TODO(), "extension", downloadOptions) + h.AssertNil(t, err) + h.AssertEq(t, mainExt.Descriptor().Info().ID, "ext.one") + }) + }) + + when("kind == packagedExtension", func() { + it("succeeds", func() { + packagedExtensionPath := filepath.Join("testdata", "tree-extension.cnb") + packagedExtensionURI, _ := paths.FilePathToURI(packagedExtensionPath, "") + mockDownloader.EXPECT().Download(gomock.Any(), packagedExtensionURI).Return(blob.NewBlob(packagedExtensionPath), nil).AnyTimes() + downloadOptions = buildpack.DownloadOptions{ + Target: &dist.Target{OS: "linux"}, + ModuleKind: "extension", + RelativeBaseDir: "testdata", + Daemon: true, + PullPolicy: image.PullAlways, + } + mainExt, _, _ := buildpackDownloader.Download(context.TODO(), "tree-extension.cnb", downloadOptions) + h.AssertEq(t, mainExt.Descriptor().Info().ID, "samples-tree") + }) }) }) + when("package image is not a valid package", func() { - it("should error", func() { + it("errors", func() { notPackageImage := fakes.NewImage("docker.io/not/package", "", nil) mockImageFetcher.EXPECT().Fetch(gomock.Any(), notPackageImage.Name(), gomock.Any()).Return(notPackageImage, nil) h.AssertNil(t, notPackageImage.SetLabel("io.buildpacks.buildpack.layers", "")) - buildpackDownloadOptions.ImageName = notPackageImage.Name() - _, _, err := buildpackDownloader.Download(context.TODO(), "", buildpackDownloadOptions) + downloadOptions.ImageName = notPackageImage.Name() + _, _, err := buildpackDownloader.Download(context.TODO(), "", downloadOptions) h.AssertError(t, err, "extracting buildpacks from 'docker.io/not/package': could not find label 'io.buildpacks.buildpackage.metadata'") }) }) + when("invalid buildpack URI", func() { when("buildpack URI is from=builder:fake", func() { it("errors", func() { - _, _, err := buildpackDownloader.Download(context.TODO(), "from=builder:fake", buildpackDownloadOptions) + _, _, err := buildpackDownloader.Download(context.TODO(), "from=builder:fake", downloadOptions) h.AssertError(t, err, "'from=builder:fake' is not a valid identifier") }) }) when("buildpack URI is from=builder", func() { it("errors", func() { - _, _, err := buildpackDownloader.Download(context.TODO(), "from=builder", buildpackDownloadOptions) + _, _, err := buildpackDownloader.Download(context.TODO(), "from=builder", downloadOptions) h.AssertError(t, err, "invalid locator: FromBuilderLocator") }) @@ -329,8 +372,8 @@ func testBuildpackDownloader(t *testing.T, when spec.G, it spec.S) { Return("", errors.New("bad mhkay")). AnyTimes() - buildpackDownloadOptions.RegistryName = "://bad-url" - _, _, err := buildpackDownloader.Download(context.TODO(), "urn:cnb:registry:fake", buildpackDownloadOptions) + downloadOptions.RegistryName = "://bad-url" + _, _, err := buildpackDownloader.Download(context.TODO(), "urn:cnb:registry:fake", downloadOptions) h.AssertError(t, err, "locating in registry") }) }) @@ -338,17 +381,18 @@ func testBuildpackDownloader(t *testing.T, when spec.G, it spec.S) { when("can't download image from registry", func() { it("errors", func() { packageImage := fakes.NewImage("example.com/some/package@sha256:74eb48882e835d8767f62940d453eb96ed2737de3a16573881dcea7dea769df7", "", nil) - mockImageFetcher.EXPECT().Fetch(gomock.Any(), packageImage.Name(), image.FetchOptions{Daemon: false, PullPolicy: image.PullAlways}).Return(nil, errors.New("failed to pull")) + mockImageFetcher.EXPECT().Fetch(gomock.Any(), packageImage.Name(), image.FetchOptions{Daemon: false, PullPolicy: image.PullAlways, Target: &dist.Target{OS: "linux"}}).Return(nil, errors.New("failed to pull")) - buildpackDownloadOptions.RegistryName = "some-registry" - _, _, err := buildpackDownloader.Download(context.TODO(), "urn:cnb:registry:example/foo@1.1.0", buildpackDownloadOptions) + downloadOptions.RegistryName = "some-registry" + _, _, err := buildpackDownloader.Download(context.TODO(), "urn:cnb:registry:example/foo@1.1.0", downloadOptions) h.AssertError(t, err, "extracting from registry") }) }) + when("buildpack URI is an invalid locator", func() { it("errors", func() { - _, _, err := buildpackDownloader.Download(context.TODO(), "nonsense string here", buildpackDownloadOptions) + _, _, err := buildpackDownloader.Download(context.TODO(), "nonsense string here", downloadOptions) h.AssertError(t, err, "invalid locator: InvalidLocator") }) diff --git a/pkg/buildpack/locator_type.go b/pkg/buildpack/locator_type.go index 2f1a74bb5b..0019cc9004 100644 --- a/pkg/buildpack/locator_type.go +++ b/pkg/buildpack/locator_type.go @@ -53,7 +53,7 @@ func (l LocatorType) String() string { // GetLocatorType determines which type of locator is designated by the given input. // If a type cannot be determined, `INVALID_LOCATOR` will be returned. If an error // is encountered, it will be returned. -func GetLocatorType(locator string, relativeBaseDir string, buildpacksFromBuilder []dist.BuildpackInfo) (LocatorType, error) { +func GetLocatorType(locator string, relativeBaseDir string, buildpacksFromBuilder []dist.ModuleInfo) (LocatorType, error) { if locator == deprecatedFromBuilderPrefix { return FromBuilderLocator, nil } @@ -85,14 +85,13 @@ func HasDockerLocator(locator string) bool { return strings.HasPrefix(locator, fromDockerPrefix) } -func parseNakedLocator(locator, relativeBaseDir string, buildpacksFromBuilder []dist.BuildpackInfo) LocatorType { +func parseNakedLocator(locator, relativeBaseDir string, buildpacksFromBuilder []dist.ModuleInfo) LocatorType { // from here on, we're dealing with a naked locator, and we try to figure out what it is. To do this we check // the following characteristics in order: // 1. Does it match a path on the file system // 2. Does it match a buildpack ID in the builder // 3. Does it look like a Buildpack Registry ID // 4. Does it look like a Docker ref - if isLocalFile(locator, relativeBaseDir) { return URILocator } @@ -124,7 +123,7 @@ func canBeRegistryRef(locator string) bool { return registryPattern.MatchString(locator) } -func isFoundInBuilder(locator string, candidates []dist.BuildpackInfo) bool { +func isFoundInBuilder(locator string, candidates []dist.ModuleInfo) bool { id, version := ParseIDLocator(locator) for _, c := range candidates { if id == c.ID && (version == "" || version == c.Version) { diff --git a/pkg/buildpack/locator_type_test.go b/pkg/buildpack/locator_type_test.go index c38c247f7d..bdc045373f 100644 --- a/pkg/buildpack/locator_type_test.go +++ b/pkg/buildpack/locator_type_test.go @@ -23,7 +23,7 @@ func TestGetLocatorType(t *testing.T) { func testGetLocatorType(t *testing.T, when spec.G, it spec.S) { type testCase struct { locator string - builderBPs []dist.BuildpackInfo + builderBPs []dist.ModuleInfo expectedType buildpack.LocatorType expectedErr string } @@ -39,7 +39,7 @@ func testGetLocatorType(t *testing.T, when spec.G, it spec.S) { }, { locator: "from=builder:some-bp", - builderBPs: []dist.BuildpackInfo{{ID: "some-bp", Version: "some-version"}}, + builderBPs: []dist.ModuleInfo{{ID: "some-bp", Version: "some-version"}}, expectedType: buildpack.IDLocator, }, { @@ -48,12 +48,12 @@ func testGetLocatorType(t *testing.T, when spec.G, it spec.S) { }, { locator: "from=builder:some-bp@some-other-version", - builderBPs: []dist.BuildpackInfo{{ID: "some-bp", Version: "some-version"}}, + builderBPs: []dist.ModuleInfo{{ID: "some-bp", Version: "some-version"}}, expectedErr: "'from=builder:some-bp@some-other-version' is not a valid identifier", }, { locator: "urn:cnb:builder:some-bp", - builderBPs: []dist.BuildpackInfo{{ID: "some-bp", Version: "some-version"}}, + builderBPs: []dist.ModuleInfo{{ID: "some-bp", Version: "some-version"}}, expectedType: buildpack.IDLocator, }, { @@ -62,17 +62,17 @@ func testGetLocatorType(t *testing.T, when spec.G, it spec.S) { }, { locator: "urn:cnb:builder:some-bp@some-other-version", - builderBPs: []dist.BuildpackInfo{{ID: "some-bp", Version: "some-version"}}, + builderBPs: []dist.ModuleInfo{{ID: "some-bp", Version: "some-version"}}, expectedErr: "'urn:cnb:builder:some-bp@some-other-version' is not a valid identifier", }, { locator: "some-bp", - builderBPs: []dist.BuildpackInfo{{ID: "some-bp", Version: "any-version"}}, + builderBPs: []dist.ModuleInfo{{ID: "some-bp", Version: "any-version"}}, expectedType: buildpack.IDLocator, }, { locator: localPath("buildpack"), - builderBPs: []dist.BuildpackInfo{{ID: "bp.one", Version: "1.2.3"}}, + builderBPs: []dist.ModuleInfo{{ID: "bp.one", Version: "1.2.3"}}, expectedType: buildpack.URILocator, }, { diff --git a/pkg/buildpack/managed_collection.go b/pkg/buildpack/managed_collection.go new file mode 100644 index 0000000000..5896caeb63 --- /dev/null +++ b/pkg/buildpack/managed_collection.go @@ -0,0 +1,150 @@ +package buildpack + +// ManagedCollection keeps track of build modules and the manner in which they should be added to an OCI image (as flattened or exploded). +type ManagedCollection interface { + // AllModules returns all build modules handled by the manager. + AllModules() []BuildModule + + // ExplodedModules returns all build modules that will be added to the output artifact as a single layer + // containing a single module. + ExplodedModules() []BuildModule + + // AddModules adds module information to the collection as flattened or not, depending on how the collection is configured. + AddModules(main BuildModule, deps ...BuildModule) + + // FlattenedModules returns all build modules that will be added to the output artifact as a single layer + // containing multiple modules. + FlattenedModules() [][]BuildModule + + // ShouldFlatten returns true if the given module should be flattened. + ShouldFlatten(module BuildModule) bool +} + +type managedCollection struct { + explodedModules []BuildModule + flattenedModules [][]BuildModule +} + +func (f *managedCollection) ExplodedModules() []BuildModule { + return f.explodedModules +} + +func (f *managedCollection) FlattenedModules() [][]BuildModule { + return f.flattenedModules +} + +func (f *managedCollection) AllModules() []BuildModule { + all := f.explodedModules + for _, modules := range f.flattenedModules { + all = append(all, modules...) + } + return all +} + +func (f *managedCollection) ShouldFlatten(module BuildModule) bool { + for _, modules := range f.flattenedModules { + for _, v := range modules { + if v == module { + return true + } + } + } + return false +} + +// managedCollectionV1 can be used to flatten all the flattenModuleInfos or none of them. +type managedCollectionV1 struct { + managedCollection + flattenAll bool +} + +// NewManagedCollectionV1 will create a manager instance responsible for flattening Buildpack Packages. +func NewManagedCollectionV1(flattenAll bool) ManagedCollection { + return &managedCollectionV1{ + flattenAll: flattenAll, + managedCollection: managedCollection{ + explodedModules: []BuildModule{}, + flattenedModules: [][]BuildModule{}, + }, + } +} + +func (f *managedCollectionV1) AddModules(main BuildModule, deps ...BuildModule) { + if !f.flattenAll { + // default behavior + f.explodedModules = append(f.explodedModules, append([]BuildModule{main}, deps...)...) + } else { + // flatten all + if len(f.flattenedModules) == 1 { + // we already have data in the array, append to the first element + f.flattenedModules[0] = append(f.flattenedModules[0], append([]BuildModule{main}, deps...)...) + } else { + // the array is empty, create the first element + f.flattenedModules = append(f.flattenedModules, append([]BuildModule{main}, deps...)) + } + } +} + +// NewManagedCollectionV2 will create a manager instance responsible for flattening buildpacks inside a Builder. +// The flattened build modules provided are the groups of buildpacks that must be put together in a single layer; the manager +// will take care of keeping them in the correct group (flattened or exploded) once they are added. +func NewManagedCollectionV2(modules FlattenModuleInfos) ManagedCollection { + flattenGroups := 0 + if modules != nil { + flattenGroups = len(modules.FlattenModules()) + } + + return &managedCollectionV2{ + flattenModuleInfos: modules, + managedCollection: managedCollection{ + explodedModules: []BuildModule{}, + flattenedModules: make([][]BuildModule, flattenGroups), + }, + } +} + +// managedCollectionV2 can be used when the build modules to be flattened are known at the point of initialization. +// The flattened build modules are provided when the collection is initialized and the collection will take care of +// keeping them in the correct group (flattened or exploded) once they are added. +type managedCollectionV2 struct { + managedCollection + flattenModuleInfos FlattenModuleInfos +} + +func (ff *managedCollectionV2) flattenGroups() []ModuleInfos { + return ff.flattenModuleInfos.FlattenModules() +} + +func (ff *managedCollectionV2) AddModules(main BuildModule, deps ...BuildModule) { + var allModules []BuildModule + allModules = append(allModules, append([]BuildModule{main}, deps...)...) + for _, module := range allModules { + if ff.flattenModuleInfos != nil && len(ff.flattenGroups()) > 0 { + pos := ff.flattenedLayerFor(module) + if pos >= 0 { + ff.flattenedModules[pos] = append(ff.flattenedModules[pos], module) + } else { + // this module must not be flattened + ff.explodedModules = append(ff.explodedModules, module) + } + } else { + // we don't want to flatten anything + ff.explodedModules = append(ff.explodedModules, module) + } + } +} + +// flattenedLayerFor given a module will try to determine which row (layer) this module must be added to in order to be flattened. +// If the layer is not found, it means the module must not be flattened at all. +func (ff *managedCollectionV2) flattenedLayerFor(module BuildModule) int { + // flattenGroups is a two-dimensional array, where each row represents + // a group of module infos that must be flattened together in the same layer. + for i, flattenGroup := range ff.flattenGroups() { + for _, buildModuleInfo := range flattenGroup.BuildModule() { + if buildModuleInfo.FullName() == module.Descriptor().Info().FullName() { + return i + } + } + } + return -1 +} diff --git a/pkg/buildpack/managed_collection_test.go b/pkg/buildpack/managed_collection_test.go new file mode 100644 index 0000000000..96357f61c5 --- /dev/null +++ b/pkg/buildpack/managed_collection_test.go @@ -0,0 +1,356 @@ +package buildpack_test + +import ( + "testing" + + "github.com/buildpacks/lifecycle/api" + "github.com/heroku/color" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + ifakes "github.com/buildpacks/pack/internal/fakes" + "github.com/buildpacks/pack/pkg/buildpack" + "github.com/buildpacks/pack/pkg/dist" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestModuleManager(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "ManagedCollection", testModuleManager, spec.Report(report.Terminal{})) +} + +func testModuleManager(t *testing.T, when spec.G, it spec.S) { + /* compositeBP1 + * / \ + * bp1 compositeBP2 + * / | \ + * bp21 bp22 compositeBP3 + * | + * bp31 + */ + var ( + moduleManager buildpack.ManagedCollection + compositeBP1 buildpack.BuildModule + bp1 buildpack.BuildModule + compositeBP2 buildpack.BuildModule + bp21 buildpack.BuildModule + bp22 buildpack.BuildModule + compositeBP3 buildpack.BuildModule + bp31 buildpack.BuildModule + flattenBuildModules buildpack.FlattenModuleInfos + err error + ) + + it.Before(func() { + bp1, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ + ID: "buildpack-1-id", + Version: "buildpack-1-version", + }, + }, 0644) + h.AssertNil(t, err) + + bp21, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ + ID: "buildpack-21-id", + Version: "buildpack-21-version", + }, + }, 0644) + h.AssertNil(t, err) + + bp22, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ + ID: "buildpack-22-id", + Version: "buildpack-22-version", + }, + }, 0644) + h.AssertNil(t, err) + + bp31, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ + ID: "buildpack-31-id", + Version: "buildpack-31-version", + }, + }, 0644) + h.AssertNil(t, err) + + compositeBP3, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ + ID: "composite-buildpack-3-id", + Version: "composite-buildpack-3-version", + }, + WithOrder: []dist.OrderEntry{{ + Group: []dist.ModuleRef{ + { + ModuleInfo: bp31.Descriptor().Info(), + }, + }, + }}, + }, 0644) + h.AssertNil(t, err) + + compositeBP2, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ + ID: "composite-buildpack-2-id", + Version: "composite-buildpack-2-version", + }, + WithOrder: []dist.OrderEntry{{ + Group: []dist.ModuleRef{ + { + ModuleInfo: bp21.Descriptor().Info(), + }, + { + ModuleInfo: bp22.Descriptor().Info(), + }, + { + ModuleInfo: compositeBP3.Descriptor().Info(), + }, + }, + }}, + }, 0644) + h.AssertNil(t, err) + + compositeBP1, err = ifakes.NewFakeBuildpack(dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ + ID: "composite-buildpack-1-id", + Version: "composite-buildpack-1-version", + }, + WithOrder: []dist.OrderEntry{{ + Group: []dist.ModuleRef{ + { + ModuleInfo: bp1.Descriptor().Info(), + }, + { + ModuleInfo: compositeBP2.Descriptor().Info(), + }, + }, + }}, + }, 0644) + h.AssertNil(t, err) + }) + + when("manager is configured in flatten mode", func() { + when("V1 is used", func() { + when("flatten all", func() { + it.Before(func() { + moduleManager = buildpack.NewManagedCollectionV1(true) + moduleManager.AddModules(compositeBP1, []buildpack.BuildModule{bp1, compositeBP2, bp21, bp22, compositeBP3, bp31}...) + }) + + when("#FlattenedModules", func() { + it("returns one flatten module (1 layer)", func() { + modules := moduleManager.FlattenedModules() + h.AssertEq(t, len(modules), 1) + h.AssertEq(t, len(modules[0]), 7) + }) + }) + + when("#ExplodedModules", func() { + it("returns empty", func() { + modules := moduleManager.ExplodedModules() + h.AssertEq(t, len(modules), 0) + }) + }) + + when("#AllModules", func() { + it("returns all explodedModules", func() { + modules := moduleManager.AllModules() + h.AssertEq(t, len(modules), 7) + }) + }) + + when("#ShouldFlatten", func() { + it("returns true for flatten explodedModules", func() { + h.AssertTrue(t, moduleManager.ShouldFlatten(compositeBP1)) + h.AssertTrue(t, moduleManager.ShouldFlatten(bp1)) + h.AssertTrue(t, moduleManager.ShouldFlatten(compositeBP2)) + h.AssertTrue(t, moduleManager.ShouldFlatten(bp21)) + h.AssertTrue(t, moduleManager.ShouldFlatten(bp22)) + h.AssertTrue(t, moduleManager.ShouldFlatten(compositeBP3)) + h.AssertTrue(t, moduleManager.ShouldFlatten(bp31)) + }) + }) + }) + }) + + when("V2 is used", func() { + when("flattened build modules are provided", func() { + it.Before(func() { + flattenBuildModules, err = buildpack.ParseFlattenBuildModules([]string{"composite-buildpack-3-id@composite-buildpack-3-version,buildpack-31-id@buildpack-31-version", "composite-buildpack-2-id@composite-buildpack-2-version,buildpack-21-id@buildpack-21-version,buildpack-22-id@buildpack-22-version"}) + h.AssertNil(t, err) + + moduleManager = buildpack.NewManagedCollectionV2(flattenBuildModules) + moduleManager.AddModules(compositeBP1, []buildpack.BuildModule{bp1, compositeBP2, bp21, bp22, compositeBP3, bp31}...) + }) + + when("#FlattenedModules", func() { + it("returns two flattened modules (2 layers)", func() { + modules := moduleManager.FlattenedModules() + h.AssertEq(t, len(modules), 2) + h.AssertTrue(t, len(modules[0]) == 2 || len(modules[0]) == 3) + if len(modules[0]) == 2 { + h.AssertEq(t, len(modules[1]), 3) + } else if len(modules[0]) == 3 { + h.AssertEq(t, len(modules[1]), 2) + } + }) + }) + + when("#ExplodedModules", func() { + it("returns two exploded modules: compositeBP1 and bp1", func() { + modules := moduleManager.ExplodedModules() + h.AssertEq(t, len(modules), 2) + }) + }) + + when("#AllModules", func() { + it("returns all modules", func() { + modules := moduleManager.AllModules() + h.AssertEq(t, len(modules), 7) + }) + }) + + when("#ShouldFlatten", func() { + it("returns true for flattened modules", func() { + // exploded modules + h.AssertFalse(t, moduleManager.ShouldFlatten(compositeBP1)) + h.AssertFalse(t, moduleManager.ShouldFlatten(bp1)) + + // flattened modules + h.AssertTrue(t, moduleManager.ShouldFlatten(compositeBP2)) + h.AssertTrue(t, moduleManager.ShouldFlatten(bp21)) + h.AssertTrue(t, moduleManager.ShouldFlatten(bp22)) + h.AssertTrue(t, moduleManager.ShouldFlatten(compositeBP3)) + h.AssertTrue(t, moduleManager.ShouldFlatten(bp31)) + }) + }) + }) + }) + }) + + when("manager is not configured in flatten mode", func() { + when("V1 is used", func() { + it.Before(func() { + moduleManager = buildpack.NewManagedCollectionV1(false) + }) + + when("#ExplodedModules", func() { + it("returns nil when no explodedModules are added", func() { + modules := moduleManager.ExplodedModules() + h.AssertEq(t, len(modules), 0) + }) + + when("explodedModules are added", func() { + it.Before(func() { + moduleManager.AddModules(compositeBP1, []buildpack.BuildModule{bp1, compositeBP2, bp21, bp22, compositeBP3, bp31}...) + }) + it("returns all explodedModules added", func() { + modules := moduleManager.ExplodedModules() + h.AssertEq(t, len(modules), 7) + }) + }) + }) + + when("#FlattenedModules", func() { + it("returns nil when no explodedModules are added", func() { + modules := moduleManager.FlattenedModules() + h.AssertEq(t, len(modules), 0) + }) + + when("explodedModules are added", func() { + it.Before(func() { + moduleManager.AddModules(compositeBP1, []buildpack.BuildModule{bp1, compositeBP2, bp21, bp22, compositeBP3, bp31}...) + }) + it("returns nil", func() { + modules := moduleManager.FlattenedModules() + h.AssertEq(t, len(modules), 0) + }) + }) + }) + + when("#ShouldFlatten", func() { + it("returns false when no explodedModules are added", func() { + h.AssertFalse(t, moduleManager.ShouldFlatten(bp1)) + }) + + when("explodedModules are added", func() { + it.Before(func() { + moduleManager.AddModules(compositeBP1, []buildpack.BuildModule{bp1, compositeBP2, bp21, bp22, compositeBP3, bp31}...) + }) + it("returns false", func() { + h.AssertFalse(t, moduleManager.ShouldFlatten(bp1)) + h.AssertFalse(t, moduleManager.ShouldFlatten(bp21)) + h.AssertFalse(t, moduleManager.ShouldFlatten(bp22)) + h.AssertFalse(t, moduleManager.ShouldFlatten(bp31)) + }) + }) + }) + }) + + when("V2 is used", func() { + it.Before(func() { + moduleManager = buildpack.NewManagedCollectionV2(nil) + }) + + when("#ExplodedModules", func() { + it("returns nil when no explodedModules are added", func() { + modules := moduleManager.ExplodedModules() + h.AssertEq(t, len(modules), 0) + }) + + when("explodedModules are added", func() { + it.Before(func() { + moduleManager.AddModules(compositeBP1, []buildpack.BuildModule{bp1, compositeBP2, bp21, bp22, compositeBP3, bp31}...) + }) + it("returns all explodedModules added", func() { + modules := moduleManager.ExplodedModules() + h.AssertEq(t, len(modules), 7) + }) + }) + }) + + when("#FlattenedModules", func() { + it("returns nil when no explodedModules are added", func() { + modules := moduleManager.FlattenedModules() + h.AssertEq(t, len(modules), 0) + }) + + when("explodedModules are added", func() { + it.Before(func() { + moduleManager.AddModules(compositeBP1, []buildpack.BuildModule{bp1, compositeBP2, bp21, bp22, compositeBP3, bp31}...) + }) + it("returns nil", func() { + modules := moduleManager.FlattenedModules() + h.AssertEq(t, len(modules), 0) + }) + }) + }) + + when("#ShouldFlatten", func() { + it("returns false when no explodedModules are added", func() { + h.AssertFalse(t, moduleManager.ShouldFlatten(bp1)) + }) + + when("explodedModules are added", func() { + it.Before(func() { + moduleManager.AddModules(compositeBP1, []buildpack.BuildModule{bp1, compositeBP2, bp21, bp22, compositeBP3, bp31}...) + }) + it("returns false", func() { + h.AssertFalse(t, moduleManager.ShouldFlatten(bp1)) + h.AssertFalse(t, moduleManager.ShouldFlatten(bp21)) + h.AssertFalse(t, moduleManager.ShouldFlatten(bp22)) + h.AssertFalse(t, moduleManager.ShouldFlatten(bp31)) + }) + }) + }) + }) + }) +} diff --git a/pkg/buildpack/multi_architecture_helper.go b/pkg/buildpack/multi_architecture_helper.go new file mode 100644 index 0000000000..92c6d56bc7 --- /dev/null +++ b/pkg/buildpack/multi_architecture_helper.go @@ -0,0 +1,163 @@ +package buildpack + +import ( + "io" + "os" + "path/filepath" + + "github.com/buildpacks/pack/internal/paths" + "github.com/buildpacks/pack/pkg/dist" + "github.com/buildpacks/pack/pkg/logging" +) + +// MultiArchConfig targets can be defined in .toml files or can be overridden by end-users via the command line; this structure offers +// utility methods to determine the expected final targets configuration. +type MultiArchConfig struct { + // Targets defined in .toml files + buildpackTargets []dist.Target + + // Targets defined by end-users to override configuration files + expectedTargets []dist.Target + logger logging.Logger +} + +func NewMultiArchConfig(targets []dist.Target, expected []dist.Target, logger logging.Logger) (*MultiArchConfig, error) { + return &MultiArchConfig{ + buildpackTargets: targets, + expectedTargets: expected, + logger: logger, + }, nil +} + +func (m *MultiArchConfig) Targets() []dist.Target { + if len(m.expectedTargets) == 0 { + return m.buildpackTargets + } + return m.expectedTargets +} + +// CopyConfigFiles will, given a base directory (which is expected to be the root folder of a single buildpack or an extension), +// copy the buildpack.toml or the extension.toml file from the base directory into the corresponding platform root folder for each target. +// It will return an array with all the platform root folders where the buildpack.toml or the extension.toml file was copied. +// Whether to copy the buildpack or the extension TOML file is determined by the buildpackType parameter. +func (m *MultiArchConfig) CopyConfigFiles(baseDir string, buildpackType string) ([]string, error) { + var filesToClean []string + if buildpackType == "" { + buildpackType = KindBuildpack + } + targets := dist.ExpandTargetsDistributions(m.Targets()...) + for _, target := range targets { + path, err := CopyConfigFile(baseDir, target, buildpackType) + if err != nil { + return nil, err + } + if path != "" { + filesToClean = append(filesToClean, path) + } + } + return filesToClean, nil +} + +// CopyConfigFile will copy the buildpack.toml or the extension.toml file, based on the buildpackType parameter, +// from the base directory into the corresponding platform folder +// for the specified target and desired distribution version. +func CopyConfigFile(baseDir string, target dist.Target, buildpackType string) (string, error) { + var path string + var err error + + if ok, platformRootFolder := PlatformRootFolder(baseDir, target); ok { + if buildpackType == KindExtension { + path, err = copyExtensionTOML(baseDir, platformRootFolder) + } else { + path, err = copyBuildpackTOML(baseDir, platformRootFolder) + } + if err != nil { + return "", err + } + return path, nil + } + return "", nil +} + +// PlatformRootFolder finds the top-most directory that identifies a target in a given buildpack folder. +// Let's define a target with the following format: [os][/arch][/variant]:[name@version], and consider the following examples: +// - Given a target linux/amd64 the platform root folder will be /linux/amd64 if the folder exists +// - Given a target windows/amd64:windows@10.0.20348.1970 the platform root folder will be /windows/amd64/windows@10.0.20348.1970 if the folder exists +// - When no target folder exists, the root folder will be equal to folder +// +// Note: If the given target has more than 1 distribution, it is recommended to use `ExpandTargetsDistributions` before +// calling this method. +func PlatformRootFolder(bpPathURI string, target dist.Target) (bool, string) { + var ( + pRootFolder string + err error + ) + + if paths.IsURI(bpPathURI) { + if pRootFolder, err = paths.URIToFilePath(bpPathURI); err != nil { + return false, "" + } + } else { + pRootFolder = bpPathURI + } + + targets := target.ValuesAsSlice() + found := false + current := false + for _, t := range targets { + current, pRootFolder = targetExists(pRootFolder, t) + if current { + found = current + } else { + // No need to keep looking + break + } + } + // We will return the last matching folder + return found, pRootFolder +} + +func targetExists(root, expected string) (bool, string) { + if expected == "" { + return false, root + } + path := filepath.Join(root, expected) + if exists, _ := paths.IsDir(path); exists { + return true, path + } + return false, root +} + +func copyBuildpackTOML(src string, dest string) (string, error) { + return copyFile(src, dest, "buildpack.toml") +} + +func copyExtensionTOML(src string, dest string) (string, error) { + return copyFile(src, dest, "extension.toml") +} +func copyFile(src, dest, fileName string) (string, error) { + filePath := filepath.Join(dest, fileName) + fileToCopy, err := os.Create(filePath) + if err != nil { + return "", err + } + defer fileToCopy.Close() + + fileCopyFrom, err := os.Open(filepath.Join(src, fileName)) + if err != nil { + return "", err + } + defer fileCopyFrom.Close() + + _, err = io.Copy(fileToCopy, fileCopyFrom) + if err != nil { + return "", err + } + + fileToCopy.Sync() + if err != nil { + return "", err + } + + return filePath, nil +} diff --git a/pkg/buildpack/multi_architecture_helper_test.go b/pkg/buildpack/multi_architecture_helper_test.go new file mode 100644 index 0000000000..13670435c0 --- /dev/null +++ b/pkg/buildpack/multi_architecture_helper_test.go @@ -0,0 +1,250 @@ +package buildpack_test + +import ( + "bytes" + "os" + "path/filepath" + "testing" + + "github.com/heroku/color" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + "github.com/buildpacks/pack/internal/paths" + "github.com/buildpacks/pack/pkg/buildpack" + "github.com/buildpacks/pack/pkg/dist" + "github.com/buildpacks/pack/pkg/logging" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestMultiArchConfig(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "testMultiArchConfig", testMultiArchConfig, spec.Parallel(), spec.Report(report.Terminal{})) +} + +func testMultiArchConfig(t *testing.T, when spec.G, it spec.S) { + var ( + err error + outBuf bytes.Buffer + logger *logging.LogWithWriters + multiArchConfig *buildpack.MultiArchConfig + targetsFromBuildpack []dist.Target + targetsFromExtension []dist.Target + targetsFromFlags []dist.Target + tmpDir string + ) + + it.Before(func() { + targetsFromBuildpack = []dist.Target{{OS: "linux", Arch: "amd64"}} + targetsFromFlags = []dist.Target{{OS: "linux", Arch: "arm64", ArchVariant: "v6"}} + logger = logging.NewLogWithWriters(&outBuf, &outBuf) + + tmpDir, err = os.MkdirTemp("", "test-multi-arch") + h.AssertNil(t, err) + }) + + it.After(func() { + os.RemoveAll(tmpDir) + }) + + when("#Targets", func() { + when("buildpack targets are defined", func() { + it.Before(func() { + multiArchConfig, err = buildpack.NewMultiArchConfig(targetsFromBuildpack, []dist.Target{}, logger) + h.AssertNil(t, err) + }) + + it("returns buildpack targets", func() { + h.AssertEq(t, len(multiArchConfig.Targets()), 1) + h.AssertEq(t, multiArchConfig.Targets()[0].OS, "linux") + h.AssertEq(t, multiArchConfig.Targets()[0].Arch, "amd64") + }) + }) + + when("buildpack targets are not defined, but flags are provided", func() { + it.Before(func() { + multiArchConfig, err = buildpack.NewMultiArchConfig([]dist.Target{}, targetsFromFlags, logger) + h.AssertNil(t, err) + }) + + it("returns targets from flags", func() { + h.AssertEq(t, len(multiArchConfig.Targets()), 1) + h.AssertEq(t, multiArchConfig.Targets()[0].OS, "linux") + h.AssertEq(t, multiArchConfig.Targets()[0].Arch, "arm64") + h.AssertEq(t, multiArchConfig.Targets()[0].ArchVariant, "v6") + }) + }) + + when("buildpack targets are defined and flags are provided", func() { + it.Before(func() { + multiArchConfig, err = buildpack.NewMultiArchConfig(targetsFromBuildpack, targetsFromFlags, logger) + h.AssertNil(t, err) + }) + + it("returns targets from flags", func() { + // flags overrides the targets in the configuration files + h.AssertEq(t, len(multiArchConfig.Targets()), 1) + h.AssertEq(t, multiArchConfig.Targets()[0].OS, "linux") + h.AssertEq(t, multiArchConfig.Targets()[0].Arch, "arm64") + h.AssertEq(t, multiArchConfig.Targets()[0].ArchVariant, "v6") + }) + }) + }) + + when("#CopyConfigFiles", func() { + when("buildpack root folder exists", func() { + var rootFolder string + + it.Before(func() { + rootFolder = filepath.Join(tmpDir, "some-buildpack") + targetsFromBuildpack = []dist.Target{{OS: "linux", Arch: "amd64"}, {OS: "linux", Arch: "arm64", ArchVariant: "v8"}} + multiArchConfig, err = buildpack.NewMultiArchConfig(targetsFromBuildpack, []dist.Target{}, logger) + h.AssertNil(t, err) + + // dummy multi-platform buildpack structure + os.MkdirAll(filepath.Join(rootFolder, "linux", "amd64"), 0755) + os.MkdirAll(filepath.Join(rootFolder, "linux", "arm64", "v8"), 0755) + _, err = os.Create(filepath.Join(rootFolder, "buildpack.toml")) + h.AssertNil(t, err) + }) + + it("copies the buildpack.toml to each target platform folder", func() { + paths, err := multiArchConfig.CopyConfigFiles(rootFolder, "buildpack") + h.AssertNil(t, err) + h.AssertEq(t, len(paths), 2) + h.AssertPathExists(t, filepath.Join(rootFolder, "linux", "amd64", "buildpack.toml")) + h.AssertPathExists(t, filepath.Join(rootFolder, "linux", "arm64", "v8", "buildpack.toml")) + }) + }) + + when("extension root folder exists", func() { + var rootFolder string + + it.Before(func() { + rootFolder = filepath.Join(tmpDir, "some-extension") + targetsFromExtension = []dist.Target{{OS: "linux", Arch: "amd64"}, {OS: "linux", Arch: "arm64", ArchVariant: "v8"}} + multiArchConfig, err = buildpack.NewMultiArchConfig(targetsFromExtension, []dist.Target{}, logger) + h.AssertNil(t, err) + + // dummy multi-platform extension structure + os.MkdirAll(filepath.Join(rootFolder, "linux", "amd64"), 0755) + os.MkdirAll(filepath.Join(rootFolder, "linux", "arm64", "v8"), 0755) + _, err = os.Create(filepath.Join(rootFolder, "extension.toml")) + h.AssertNil(t, err) + }) + + it("copies the extension.toml to each target platform folder", func() { + paths, err := multiArchConfig.CopyConfigFiles(rootFolder, "extension") + h.AssertNil(t, err) + h.AssertEq(t, len(paths), 2) + h.AssertPathExists(t, filepath.Join(rootFolder, "linux", "amd64", "extension.toml")) + h.AssertPathExists(t, filepath.Join(rootFolder, "linux", "arm64", "v8", "extension.toml")) + }) + }) + }) + + when("#PlatformRootFolder", func() { + var target dist.Target + + when("root folder exists", func() { + var bpURI string + + it.Before(func() { + os.MkdirAll(filepath.Join(tmpDir, "linux", "arm64", "v8"), 0755) + os.MkdirAll(filepath.Join(tmpDir, "windows", "amd64", "v2", "windows@10.0.20348.1970"), 0755) + bpURI, err = paths.FilePathToURI(tmpDir, "") + h.AssertNil(t, err) + }) + + when("target has 'os'", func() { + when("'os' directory exists", func() { + it.Before(func() { + target = dist.Target{OS: "linux"} + }) + + it("returns /", func() { + found, path := buildpack.PlatformRootFolder(bpURI, target) + h.AssertTrue(t, found) + h.AssertEq(t, path, filepath.Join(tmpDir, "linux")) + }) + }) + + when("'os' directory doesn't exist", func() { + it.Before(func() { + target = dist.Target{OS: "darwin"} + }) + + it("returns not found", func() { + found, _ := buildpack.PlatformRootFolder(bpURI, target) + h.AssertFalse(t, found) + }) + }) + }) + + when("target has 'os' and 'arch'", func() { + when("'arch' directory exists", func() { + it.Before(func() { + target = dist.Target{OS: "linux", Arch: "arm64"} + }) + + it("returns //", func() { + found, path := buildpack.PlatformRootFolder(bpURI, target) + h.AssertTrue(t, found) + h.AssertEq(t, path, filepath.Join(tmpDir, "linux", "arm64")) + }) + }) + + when("'arch' directory doesn't exist", func() { + it.Before(func() { + target = dist.Target{OS: "linux", Arch: "amd64"} + }) + + it("returns /", func() { + found, path := buildpack.PlatformRootFolder(bpURI, target) + h.AssertTrue(t, found) + h.AssertEq(t, path, filepath.Join(tmpDir, "linux")) + }) + }) + }) + + when("target has 'os', 'arch' and 'variant'", func() { + it.Before(func() { + target = dist.Target{OS: "linux", Arch: "arm64", ArchVariant: "v8"} + }) + + it("returns ///", func() { + found, path := buildpack.PlatformRootFolder(bpURI, target) + h.AssertTrue(t, found) + h.AssertEq(t, path, filepath.Join(tmpDir, "linux", "arm64", "v8")) + }) + }) + + when("target has 'os', 'arch', 'variant' and name@version", func() { + when("all directories exist", func() { + it.Before(func() { + target = dist.Target{OS: "windows", Arch: "amd64", ArchVariant: "v2", Distributions: []dist.Distribution{{Name: "windows", Version: "10.0.20348.1970"}}} + }) + + it("returns ////@", func() { + found, path := buildpack.PlatformRootFolder(bpURI, target) + h.AssertTrue(t, found) + h.AssertEq(t, path, filepath.Join(tmpDir, "windows", "amd64", "v2", "windows@10.0.20348.1970")) + }) + }) + + when("version doesn't exist", func() { + it.Before(func() { + target = dist.Target{OS: "windows", Arch: "amd64", ArchVariant: "v2", Distributions: []dist.Distribution{{Name: "windows", Version: "foo"}}} + }) + + it("returns the most specific matching directory (///)", func() { + found, path := buildpack.PlatformRootFolder(bpURI, target) + h.AssertTrue(t, found) + h.AssertEq(t, path, filepath.Join(tmpDir, "windows", "amd64", "v2")) + }) + }) + }) + }) + }) +} diff --git a/pkg/buildpack/oci_layout_package.go b/pkg/buildpack/oci_layout_package.go index 4be4ac6882..7ce5039991 100644 --- a/pkg/buildpack/oci_layout_package.go +++ b/pkg/buildpack/oci_layout_package.go @@ -4,6 +4,7 @@ import ( "archive/tar" "compress/gzip" "encoding/json" + "fmt" "io" "path" "strings" @@ -12,6 +13,7 @@ import ( v1 "github.com/opencontainers/image-spec/specs-go/v1" "github.com/pkg/errors" + "github.com/buildpacks/pack/internal/paths" "github.com/buildpacks/pack/internal/style" "github.com/buildpacks/pack/pkg/archive" blob2 "github.com/buildpacks/pack/pkg/blob" @@ -26,7 +28,7 @@ func IsOCILayoutBlob(blob blob2.Blob) (bool, error) { } defer readCloser.Close() - _, _, err = archive.ReadTarEntry(readCloser, "/oci-layout") + _, _, err = archive.ReadTarEntry(readCloser, v1.ImageLayoutFile) if err != nil { if archive.IsEntryNotExist(err) { return false, nil @@ -38,18 +40,28 @@ func IsOCILayoutBlob(blob blob2.Blob) (bool, error) { return true, nil } -// BuildpackFromOCILayoutBlob constructs buildpacks from a blob in OCI layout format. -func BuildpacksFromOCILayoutBlob(blob Blob) (mainBP Buildpack, dependencies []Buildpack, err error) { - layoutPackage, err := newOCILayoutPackage(blob) +// BuildpacksFromOCILayoutBlob constructs buildpacks from a blob in OCI layout format. +func BuildpacksFromOCILayoutBlob(blob Blob) (mainBP BuildModule, dependencies []BuildModule, err error) { + layoutPackage, err := newOCILayoutPackage(blob, KindBuildpack) if err != nil { return nil, nil, err } - return ExtractBuildpacks(layoutPackage) + return extractBuildpacks(layoutPackage) +} + +// ExtensionsFromOCILayoutBlob constructs extensions from a blob in OCI layout format. +func ExtensionsFromOCILayoutBlob(blob Blob) (mainExt BuildModule, err error) { + layoutPackage, err := newOCILayoutPackage(blob, KindExtension) + if err != nil { + return nil, err + } + + return extractExtensions(layoutPackage) } func ConfigFromOCILayoutBlob(blob Blob) (config v1.ImageConfig, err error) { - layoutPackage, err := newOCILayoutPackage(blob) + layoutPackage, err := newOCILayoutPackage(blob, KindBuildpack) if err != nil { return v1.ImageConfig{}, err } @@ -62,17 +74,17 @@ type ociLayoutPackage struct { blob Blob } -func newOCILayoutPackage(blob Blob) (*ociLayoutPackage, error) { +func newOCILayoutPackage(blob Blob, kind string) (*ociLayoutPackage, error) { index := &v1.Index{} - if err := unmarshalJSONFromBlob(blob, "/index.json", index); err != nil { + if err := unmarshalJSONFromBlob(blob, v1.ImageIndexFile, index); err != nil { return nil, err } var manifestDescriptor *v1.Descriptor for _, m := range index.Manifests { - if m.MediaType == "application/vnd.docker.distribution.manifest.v2+json" { - manifestDescriptor = &m // nolint:scopelint + if m.MediaType == "application/vnd.docker.distribution.manifest.v2+json" || m.MediaType == v1.MediaTypeImageManifest { + manifestDescriptor = &m // nolint:exportloopref break } } @@ -90,13 +102,23 @@ func newOCILayoutPackage(blob Blob) (*ociLayoutPackage, error) { if err := unmarshalJSONFromBlob(blob, pathFromDescriptor(manifest.Config), imageInfo); err != nil { return nil, err } - - layersLabel := imageInfo.Config.Labels[dist.BuildpackLayersLabel] - if layersLabel == "" { - return nil, errors.Errorf("label %s not found", style.Symbol(dist.BuildpackLayersLabel)) + var layersLabel string + switch kind { + case KindBuildpack: + layersLabel = imageInfo.Config.Labels[dist.BuildpackLayersLabel] + if layersLabel == "" { + return nil, errors.Errorf("label %s not found", style.Symbol(dist.BuildpackLayersLabel)) + } + case KindExtension: + layersLabel = imageInfo.Config.Labels[dist.ExtensionLayersLabel] + if layersLabel == "" { + return nil, errors.Errorf("label %s not found", style.Symbol(dist.ExtensionLayersLabel)) + } + default: + return nil, fmt.Errorf("unknown module kind: %s", kind) } - bpLayers := dist.BuildpackLayers{} + bpLayers := dist.ModuleLayers{} if err := json.Unmarshal([]byte(layersLabel), &bpLayers); err != nil { return nil, errors.Wrap(err, "unmarshaling layers label") } @@ -125,7 +147,7 @@ func (o *ociLayoutPackage) GetLayer(diffID string) (io.ReadCloser, error) { } layerDescriptor := o.manifest.Layers[index] - layerPath := pathFromDescriptor(layerDescriptor) + layerPath := paths.CanonicalTarPath(pathFromDescriptor(layerDescriptor)) blobReader, err := o.blob.Open() if err != nil { @@ -142,10 +164,10 @@ func (o *ociLayoutPackage) GetLayer(diffID string) (io.ReadCloser, error) { return nil, errors.Wrap(err, "failed to get next tar entry") } - if path.Clean(header.Name) == path.Clean(layerPath) { + if paths.CanonicalTarPath(header.Name) == layerPath { finalReader := blobReader - if strings.HasSuffix(layerDescriptor.MediaType, ".gzip") { + if strings.HasSuffix(layerDescriptor.MediaType, "gzip") { finalReader, err = gzip.NewReader(tr) if err != nil { return nil, err diff --git a/pkg/buildpack/oci_layout_package_test.go b/pkg/buildpack/oci_layout_package_test.go index 528d470eb7..38b8321acd 100644 --- a/pkg/buildpack/oci_layout_package_test.go +++ b/pkg/buildpack/oci_layout_package_test.go @@ -7,6 +7,7 @@ import ( "github.com/buildpacks/lifecycle/api" "github.com/heroku/color" + v1 "github.com/opencontainers/image-spec/specs-go/v1" "github.com/sclevine/spec" "github.com/sclevine/spec/report" @@ -24,36 +25,80 @@ func TestOCILayoutPackage(t *testing.T) { spec.Run(t, "Extract", testOCILayoutPackage, spec.Parallel(), spec.Report(report.Terminal{})) } +type testCase struct { + mediatype string + file string +} + func testOCILayoutPackage(t *testing.T, when spec.G, it spec.S) { when("#BuildpacksFromOCILayoutBlob", func() { + for _, test := range []testCase{ + { + mediatype: "application/vnd.docker.distribution.manifest.v2+json", + file: "hello-universe.cnb", + }, + { + mediatype: v1.MediaTypeImageManifest, + file: "hello-universe-oci.cnb", + }, + } { + it(fmt.Sprintf("extracts buildpacks, media type: %s", test.mediatype), func() { + mainBP, depBPs, err := buildpack.BuildpacksFromOCILayoutBlob(blob.NewBlob(filepath.Join("testdata", test.file))) + h.AssertNil(t, err) + + h.AssertEq(t, mainBP.Descriptor().Info().ID, "io.buildpacks.samples.hello-universe") + h.AssertEq(t, mainBP.Descriptor().Info().Version, "0.0.1") + h.AssertEq(t, len(depBPs), 2) + }) + + it(fmt.Sprintf("provides readable blobs, media type: %s", test.mediatype), func() { + mainBP, depBPs, err := buildpack.BuildpacksFromOCILayoutBlob(blob.NewBlob(filepath.Join("testdata", test.file))) + h.AssertNil(t, err) + + for _, bp := range append([]buildpack.BuildModule{mainBP}, depBPs...) { + reader, err := bp.Open() + h.AssertNil(t, err) + + _, contents, err := archive.ReadTarEntry( + reader, + fmt.Sprintf("/cnb/buildpacks/%s/%s/buildpack.toml", + bp.Descriptor().Info().ID, + bp.Descriptor().Info().Version, + ), + ) + h.AssertNil(t, err) + h.AssertContains(t, string(contents), bp.Descriptor().Info().ID) + h.AssertContains(t, string(contents), bp.Descriptor().Info().Version) + } + }) + } + }) + + when("#ExtensionsFromOCILayoutBlob", func() { it("extracts buildpacks", func() { - mainBP, depBPs, err := buildpack.BuildpacksFromOCILayoutBlob(blob.NewBlob(filepath.Join("testdata", "hello-universe.cnb"))) + ext, err := buildpack.ExtensionsFromOCILayoutBlob(blob.NewBlob(filepath.Join("testdata", "tree-extension.cnb"))) h.AssertNil(t, err) - h.AssertEq(t, mainBP.Descriptor().Info.ID, "io.buildpacks.samples.hello-universe") - h.AssertEq(t, mainBP.Descriptor().Info.Version, "0.0.1") - h.AssertEq(t, len(depBPs), 2) + h.AssertEq(t, ext.Descriptor().Info().ID, "samples-tree") + h.AssertEq(t, ext.Descriptor().Info().Version, "0.0.1") }) it("provides readable blobs", func() { - mainBP, depBPs, err := buildpack.BuildpacksFromOCILayoutBlob(blob.NewBlob(filepath.Join("testdata", "hello-universe.cnb"))) + ext, err := buildpack.ExtensionsFromOCILayoutBlob(blob.NewBlob(filepath.Join("testdata", "tree-extension.cnb"))) + h.AssertNil(t, err) + reader, err := ext.Open() h.AssertNil(t, err) - for _, bp := range append([]buildpack.Buildpack{mainBP}, depBPs...) { - reader, err := bp.Open() - h.AssertNil(t, err) - - _, contents, err := archive.ReadTarEntry( - reader, - fmt.Sprintf("/cnb/buildpacks/%s/%s/buildpack.toml", - bp.Descriptor().Info.ID, - bp.Descriptor().Info.Version, - ), - ) - h.AssertNil(t, err) - h.AssertContains(t, string(contents), bp.Descriptor().Info.ID) - h.AssertContains(t, string(contents), bp.Descriptor().Info.Version) - } + _, contents, err := archive.ReadTarEntry( + reader, + fmt.Sprintf("/cnb/extensions/%s/%s/extension.toml", + ext.Descriptor().Info().ID, + ext.Descriptor().Info().Version, + ), + ) + h.AssertNil(t, err) + h.AssertContains(t, string(contents), ext.Descriptor().Info().ID) + h.AssertContains(t, string(contents), ext.Descriptor().Info().Version) }) }) @@ -68,14 +113,14 @@ func testOCILayoutPackage(t *testing.T, when spec.G, it spec.S) { when("is NOT an OCI layout blob", func() { it("returns false", func() { - buildpackBlob, err := fakes.NewFakeBuildpackBlob(dist.BuildpackDescriptor{ - API: api.MustParse("0.3"), - Info: dist.BuildpackInfo{ + buildpackBlob, err := fakes.NewFakeBuildpackBlob(&dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ ID: "bp.id", Version: "bp.version", }, - Stacks: []dist.Stack{{}}, - Order: nil, + WithStacks: []dist.Stack{{}}, + WithOrder: nil, }, 0755) h.AssertNil(t, err) diff --git a/pkg/buildpack/package.go b/pkg/buildpack/package.go index c8ae73fb8c..012864c927 100644 --- a/pkg/buildpack/package.go +++ b/pkg/buildpack/package.go @@ -2,6 +2,8 @@ package buildpack import ( "io" + "strings" + "sync" "github.com/pkg/errors" @@ -14,7 +16,27 @@ type Package interface { GetLayer(diffID string) (io.ReadCloser, error) } -func ExtractBuildpacks(pkg Package) (mainBP Buildpack, depBPs []Buildpack, err error) { +type syncPkg struct { + mu sync.Mutex + pkg Package +} + +func (s *syncPkg) Label(name string) (value string, err error) { + s.mu.Lock() + defer s.mu.Unlock() + return s.pkg.Label(name) +} + +func (s *syncPkg) GetLayer(diffID string) (io.ReadCloser, error) { + s.mu.Lock() + defer s.mu.Unlock() + return s.pkg.GetLayer(diffID) +} + +// extractBuildpacks when provided a flattened buildpack package containing N buildpacks, +// will return N modules: 1 module with a single tar containing ALL N buildpacks, and N-1 modules with empty tar files. +func extractBuildpacks(pkg Package) (mainBP BuildModule, depBPs []BuildModule, err error) { + pkg = &syncPkg{pkg: pkg} md := &Metadata{} if found, err := dist.GetLabel(pkg, MetadataLabel, md); err != nil { return nil, nil, err @@ -25,8 +47,8 @@ func ExtractBuildpacks(pkg Package) (mainBP Buildpack, depBPs []Buildpack, err e ) } - bpLayers := dist.BuildpackLayers{} - ok, err := dist.GetLabel(pkg, dist.BuildpackLayersLabel, &bpLayers) + pkgLayers := dist.ModuleLayers{} + ok, err := dist.GetLabel(pkg, dist.BuildpackLayersLabel, &pkgLayers) if err != nil { return nil, nil, err } @@ -38,39 +60,74 @@ func ExtractBuildpacks(pkg Package) (mainBP Buildpack, depBPs []Buildpack, err e ) } - for bpID, v := range bpLayers { + // Example `dist.ModuleLayers{}`: + // + //{ + // "samples/hello-moon": { + // "0.0.1": { + // "api": "0.2", + // "stacks": [ + // { + // "id": "*" + // } + // ], + // "layerDiffID": "sha256:37ab46923c181aa5fb27c9a23479a38aec2679237f35a0ea4115e5ae81a17bba", + // "homepage": "https://github.com/buildpacks/samples/tree/main/buildpacks/hello-moon", + // "name": "Hello Moon Buildpack" + // } + // } + //} + + // If the package is a flattened buildpack, the first buildpack in the package returns all the tar content, + // and subsequent buildpacks return an empty tar. + var processedDiffIDs = make(map[string]bool) + for bpID, v := range pkgLayers { for bpVersion, bpInfo := range v { desc := dist.BuildpackDescriptor{ - API: bpInfo.API, - Info: dist.BuildpackInfo{ + WithAPI: bpInfo.API, + WithInfo: dist.ModuleInfo{ ID: bpID, Version: bpVersion, Homepage: bpInfo.Homepage, Name: bpInfo.Name, }, - Stacks: bpInfo.Stacks, - Order: bpInfo.Order, + WithStacks: bpInfo.Stacks, + WithTargets: bpInfo.Targets, + WithOrder: bpInfo.Order, } diffID := bpInfo.LayerDiffID // Allow use in closure - b := &openerBlob{ - opener: func() (io.ReadCloser, error) { + + var openerFunc func() (io.ReadCloser, error) + if _, ok := processedDiffIDs[diffID]; ok { + // We already processed a layer with this diffID, so the module must be flattened; + // return an empty reader to avoid multiple tars with the same content. + openerFunc = func() (io.ReadCloser, error) { + return io.NopCloser(strings.NewReader("")), nil + } + } else { + openerFunc = func() (io.ReadCloser, error) { rc, err := pkg.GetLayer(diffID) if err != nil { return nil, errors.Wrapf(err, "extracting buildpack %s layer (diffID %s)", - style.Symbol(desc.Info.FullName()), + style.Symbol(desc.Info().FullName()), style.Symbol(diffID), ) } return rc, nil - }, + } + processedDiffIDs[diffID] = true } - if desc.Info.Match(md.BuildpackInfo) { // This is the order buildpack of the package - mainBP = FromBlob(desc, b) + b := &openerBlob{ + opener: openerFunc, + } + + if desc.Info().Match(md.ModuleInfo) { // Current module is the order buildpack of the package + mainBP = FromBlob(&desc, b) } else { - depBPs = append(depBPs, FromBlob(desc, b)) + depBPs = append(depBPs, FromBlob(&desc, b)) } } } @@ -78,6 +135,63 @@ func ExtractBuildpacks(pkg Package) (mainBP Buildpack, depBPs []Buildpack, err e return mainBP, depBPs, nil } +func extractExtensions(pkg Package) (mainExt BuildModule, err error) { + pkg = &syncPkg{pkg: pkg} + md := &Metadata{} + if found, err := dist.GetLabel(pkg, MetadataLabel, md); err != nil { + return nil, err + } else if !found { + return nil, errors.Errorf( + "could not find label %s", + style.Symbol(MetadataLabel), + ) + } + + pkgLayers := dist.ModuleLayers{} + ok, err := dist.GetLabel(pkg, dist.ExtensionLayersLabel, &pkgLayers) + if err != nil { + return nil, err + } + + if !ok { + return nil, errors.Errorf( + "could not find label %s", + style.Symbol(dist.ExtensionLayersLabel), + ) + } + for extID, v := range pkgLayers { + for extVersion, extInfo := range v { + desc := dist.ExtensionDescriptor{ + WithAPI: extInfo.API, + WithInfo: dist.ModuleInfo{ + ID: extID, + Version: extVersion, + Homepage: extInfo.Homepage, + Name: extInfo.Name, + }, + } + + diffID := extInfo.LayerDiffID // Allow use in closure + b := &openerBlob{ + opener: func() (io.ReadCloser, error) { + rc, err := pkg.GetLayer(diffID) + if err != nil { + return nil, errors.Wrapf(err, + "extracting extension %s layer (diffID %s)", + style.Symbol(desc.Info().FullName()), + style.Symbol(diffID), + ) + } + return rc, nil + }, + } + + mainExt = FromBlob(&desc, b) + } + } + return mainExt, nil +} + type openerBlob struct { opener func() (io.ReadCloser, error) } diff --git a/pkg/buildpack/parse_name.go b/pkg/buildpack/parse_name.go index db00972241..bc170c2788 100644 --- a/pkg/buildpack/parse_name.go +++ b/pkg/buildpack/parse_name.go @@ -37,7 +37,6 @@ func ParsePackageLocator(locator string) (imageName string) { // Supported formats: // - /[@] // - urn:cnb:registry:/[@] -// func ParseRegistryID(registryID string) (namespace string, name string, version string, err error) { id, version := ParseIDLocator(registryID) diff --git a/pkg/buildpack/testdata/buildpack-with-hardlink/bin/build b/pkg/buildpack/testdata/buildpack-with-hardlink/bin/build new file mode 100644 index 0000000000..c76df1a291 --- /dev/null +++ b/pkg/buildpack/testdata/buildpack-with-hardlink/bin/build @@ -0,0 +1 @@ +build-contents \ No newline at end of file diff --git a/pkg/buildpack/testdata/buildpack-with-hardlink/bin/detect b/pkg/buildpack/testdata/buildpack-with-hardlink/bin/detect new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pkg/buildpack/testdata/buildpack-with-hardlink/buildpack.toml b/pkg/buildpack/testdata/buildpack-with-hardlink/buildpack.toml new file mode 100644 index 0000000000..131cb045f6 --- /dev/null +++ b/pkg/buildpack/testdata/buildpack-with-hardlink/buildpack.toml @@ -0,0 +1,10 @@ +api = "0.3" + +[buildpack] +id = "bp.one" +version = "1.2.3" +homepage = "http://one.buildpack" + +[[stacks]] +id = "some.stack.id" +mixins = ["mixinX", "build:mixinY", "run:mixinZ"] diff --git a/pkg/buildpack/testdata/buildpack-with-hardlink/original-file b/pkg/buildpack/testdata/buildpack-with-hardlink/original-file new file mode 100644 index 0000000000..257cc5642c --- /dev/null +++ b/pkg/buildpack/testdata/buildpack-with-hardlink/original-file @@ -0,0 +1 @@ +foo diff --git a/pkg/buildpack/testdata/extension/bin/detect b/pkg/buildpack/testdata/extension/bin/detect new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pkg/buildpack/testdata/extension/bin/generate b/pkg/buildpack/testdata/extension/bin/generate new file mode 100644 index 0000000000..28f453ed08 --- /dev/null +++ b/pkg/buildpack/testdata/extension/bin/generate @@ -0,0 +1 @@ +generate-contents \ No newline at end of file diff --git a/pkg/buildpack/testdata/extension/extension.toml b/pkg/buildpack/testdata/extension/extension.toml new file mode 100644 index 0000000000..3ca26a4206 --- /dev/null +++ b/pkg/buildpack/testdata/extension/extension.toml @@ -0,0 +1,6 @@ +api = "0.9" + +[extension] +id = "ext.one" +version = "1.2.3" +homepage = "http://one.extension" diff --git a/pkg/buildpack/testdata/hello-universe-oci.cnb b/pkg/buildpack/testdata/hello-universe-oci.cnb new file mode 100644 index 0000000000..59b4b657fb Binary files /dev/null and b/pkg/buildpack/testdata/hello-universe-oci.cnb differ diff --git a/pkg/buildpack/testdata/tree-extension.cnb b/pkg/buildpack/testdata/tree-extension.cnb new file mode 100644 index 0000000000..8f7df920b2 Binary files /dev/null and b/pkg/buildpack/testdata/tree-extension.cnb differ diff --git a/pkg/cache/bind_cache.go b/pkg/cache/bind_cache.go new file mode 100644 index 0000000000..df697db0c6 --- /dev/null +++ b/pkg/cache/bind_cache.go @@ -0,0 +1,34 @@ +package cache + +import ( + "context" + "os" +) + +type BindCache struct { + docker DockerClient + bind string +} + +func NewBindCache(cacheType CacheInfo, dockerClient DockerClient) *BindCache { + return &BindCache{ + bind: cacheType.Source, + docker: dockerClient, + } +} + +func (c *BindCache) Name() string { + return c.bind +} + +func (c *BindCache) Clear(ctx context.Context) error { + err := os.RemoveAll(c.bind) + if err != nil { + return err + } + return nil +} + +func (c *BindCache) Type() Type { + return Bind +} diff --git a/pkg/cache/cache_opts.go b/pkg/cache/cache_opts.go new file mode 100644 index 0000000000..19767446f0 --- /dev/null +++ b/pkg/cache/cache_opts.go @@ -0,0 +1,160 @@ +package cache + +import ( + "encoding/csv" + "fmt" + "path/filepath" + "strings" + + "github.com/pkg/errors" +) + +type Format int +type CacheInfo struct { + Format Format + Source string +} + +type CacheOpts struct { + Build CacheInfo + Launch CacheInfo + Kaniko CacheInfo +} + +const ( + CacheVolume Format = iota + CacheImage + CacheBind +) + +func (f Format) String() string { + switch f { + case CacheImage: + return "image" + case CacheVolume: + return "volume" + case CacheBind: + return "bind" + } + return "" +} + +func (c *CacheInfo) SourceName() string { + switch c.Format { + case CacheImage: + fallthrough + case CacheVolume: + return "name" + case CacheBind: + return "source" + } + return "" +} + +func (c *CacheOpts) Set(value string) error { + csvReader := csv.NewReader(strings.NewReader(value)) + csvReader.Comma = ';' + fields, err := csvReader.Read() + if err != nil { + return err + } + + cache := &c.Build + for _, field := range fields { + parts := strings.SplitN(field, "=", 2) + if len(parts) != 2 { + return errors.Errorf("invalid field '%s' must be a key=value pair", field) + } + key := strings.ToLower(parts[0]) + value := parts[1] + if key == "type" { + switch value { + case "build": + cache = &c.Build + case "launch": + cache = &c.Launch + default: + return errors.Errorf("invalid cache type '%s'", value) + } + break + } + } + + for _, field := range fields { + parts := strings.SplitN(field, "=", 2) + if len(parts) != 2 { + return errors.Errorf("invalid field '%s' must be a key=value pair", field) + } + key := strings.ToLower(parts[0]) + value := parts[1] + switch key { + case "format": + switch value { + case "image": + cache.Format = CacheImage + case "volume": + cache.Format = CacheVolume + case "bind": + cache.Format = CacheBind + default: + return errors.Errorf("invalid cache format '%s'", value) + } + case "name": + cache.Source = value + case "source": + cache.Source = value + } + } + + err = sanitize(c) + if err != nil { + return err + } + return nil +} + +func (c *CacheOpts) String() string { + var cacheFlag string + cacheFlag = fmt.Sprintf("type=build;format=%s;", c.Build.Format.String()) + if c.Build.Source != "" { + cacheFlag += fmt.Sprintf("%s=%s;", c.Build.SourceName(), c.Build.Source) + } + + cacheFlag += fmt.Sprintf("type=launch;format=%s;", c.Launch.Format.String()) + if c.Launch.Source != "" { + cacheFlag += fmt.Sprintf("%s=%s;", c.Launch.SourceName(), c.Launch.Source) + } + + return cacheFlag +} + +func (c *CacheOpts) Type() string { + return "cache" +} + +func sanitize(c *CacheOpts) error { + for _, v := range []CacheInfo{c.Build, c.Launch} { + // volume cache name can be auto-generated + if v.Format != CacheVolume && v.Source == "" { + return errors.Errorf("cache '%s' is required", v.SourceName()) + } + } + + var ( + resolvedPath string + err error + ) + if c.Build.Format == CacheBind { + if resolvedPath, err = filepath.Abs(c.Build.Source); err != nil { + return errors.Wrap(err, "resolve absolute path") + } + c.Build.Source = filepath.Join(resolvedPath, "build-cache") + } + if c.Launch.Format == CacheBind { + if resolvedPath, err = filepath.Abs(c.Launch.Source); err != nil { + return errors.Wrap(err, "resolve absolute path") + } + c.Launch.Source = filepath.Join(resolvedPath, "launch-cache") + } + return nil +} diff --git a/pkg/cache/cache_opts_test.go b/pkg/cache/cache_opts_test.go new file mode 100644 index 0000000000..4694e06dab --- /dev/null +++ b/pkg/cache/cache_opts_test.go @@ -0,0 +1,320 @@ +package cache + +import ( + "fmt" + "os" + "runtime" + "strings" + "testing" + + "github.com/heroku/color" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + h "github.com/buildpacks/pack/testhelpers" +) + +type CacheOptTestCase struct { + name string + input string + output string + shouldFail bool +} + +func TestMetadata(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "Metadata", testCacheOpts, spec.Sequential(), spec.Report(report.Terminal{})) +} + +func testCacheOpts(t *testing.T, when spec.G, it spec.S) { + when("image cache format options are passed", func() { + it("with complete options", func() { + testcases := []CacheOptTestCase{ + { + name: "Build cache as Image", + input: "type=build;format=image;name=io.test.io/myorg/my-cache:build", + output: "type=build;format=image;name=io.test.io/myorg/my-cache:build;type=launch;format=volume;", + }, + { + name: "Launch cache as Image", + input: "type=launch;format=image;name=io.test.io/myorg/my-cache:build", + output: "type=build;format=volume;type=launch;format=image;name=io.test.io/myorg/my-cache:build;", + }, + } + + for _, testcase := range testcases { + var cacheFlags CacheOpts + t.Logf("Testing cache type: %s", testcase.name) + err := cacheFlags.Set(testcase.input) + h.AssertNil(t, err) + h.AssertEq(t, testcase.output, cacheFlags.String()) + } + }) + + it("with missing options", func() { + successTestCases := []CacheOptTestCase{ + { + name: "Build cache as Image missing: type", + input: "format=image;name=io.test.io/myorg/my-cache:build", + output: "type=build;format=image;name=io.test.io/myorg/my-cache:build;type=launch;format=volume;", + }, + { + name: "Build cache as Image missing: format", + input: "type=build;name=io.test.io/myorg/my-cache:build", + output: "type=build;format=volume;name=io.test.io/myorg/my-cache:build;type=launch;format=volume;", + }, + { + name: "Build cache as Image missing: name", + input: "type=build;format=image", + output: "cache 'name' is required", + shouldFail: true, + }, + { + name: "Build cache as Image missing: type, format", + input: "name=io.test.io/myorg/my-cache:build", + output: "type=build;format=volume;name=io.test.io/myorg/my-cache:build;type=launch;format=volume;", + }, + { + name: "Build cache as Image missing: format, name", + input: "type=build", + output: "type=build;format=volume;type=launch;format=volume;", + }, + { + name: "Build cache as Image missing: type, name", + input: "format=image", + output: "cache 'name' is required", + shouldFail: true, + }, + { + name: "Launch cache as Image missing: name", + input: "type=launch;format=image", + output: "cache 'name' is required", + shouldFail: true, + }, + } + + for _, testcase := range successTestCases { + var cacheFlags CacheOpts + t.Logf("Testing cache type: %s", testcase.name) + err := cacheFlags.Set(testcase.input) + + if testcase.shouldFail { + h.AssertError(t, err, testcase.output) + } else { + h.AssertNil(t, err) + output := cacheFlags.String() + h.AssertEq(t, testcase.output, output) + } + } + }) + + it("with invalid options", func() { + testcases := []CacheOptTestCase{ + { + name: "Invalid cache type", + input: "type=invalid_cache;format=image;name=io.test.io/myorg/my-cache:build", + output: "invalid cache type 'invalid_cache'", + shouldFail: true, + }, + { + name: "Invalid cache format", + input: "type=launch;format=invalid_format;name=io.test.io/myorg/my-cache:build", + output: "invalid cache format 'invalid_format'", + shouldFail: true, + }, + { + name: "Not a key=value pair", + input: "launch;format=image;name=io.test.io/myorg/my-cache:build", + output: "invalid field 'launch' must be a key=value pair", + shouldFail: true, + }, + { + name: "Extra semicolon", + input: "type=launch;format=image;name=io.test.io/myorg/my-cache:build;", + output: "invalid field '' must be a key=value pair", + shouldFail: true, + }, + } + + for _, testcase := range testcases { + var cacheFlags CacheOpts + t.Logf("Testing cache type: %s", testcase.name) + err := cacheFlags.Set(testcase.input) + h.AssertError(t, err, testcase.output) + } + }) + }) + + when("volume cache format options are passed", func() { + it("with complete options", func() { + testcases := []CacheOptTestCase{ + { + name: "Build cache as Volume", + input: "type=build;format=volume;name=test-build-volume-cache", + output: "type=build;format=volume;name=test-build-volume-cache;type=launch;format=volume;", + }, + { + name: "Launch cache as Volume", + input: "type=launch;format=volume;name=test-launch-volume-cache", + output: "type=build;format=volume;type=launch;format=volume;name=test-launch-volume-cache;", + }, + } + + for _, testcase := range testcases { + var cacheFlags CacheOpts + t.Logf("Testing cache type: %s", testcase.name) + err := cacheFlags.Set(testcase.input) + h.AssertNil(t, err) + h.AssertEq(t, testcase.output, cacheFlags.String()) + } + }) + + it("with missing options", func() { + successTestCases := []CacheOptTestCase{ + { + name: "Launch cache as Volume missing: format", + input: "type=launch;name=test-launch-volume", + output: "type=build;format=volume;type=launch;format=volume;name=test-launch-volume;", + }, + { + name: "Launch cache as Volume missing: name", + input: "type=launch;format=volume", + output: "type=build;format=volume;type=launch;format=volume;", + }, + { + name: "Launch cache as Volume missing: format, name", + input: "type=launch", + output: "type=build;format=volume;type=launch;format=volume;", + }, + { + name: "Launch cache as Volume missing: type, name", + input: "format=volume", + output: "type=build;format=volume;type=launch;format=volume;", + }, + } + + for _, testcase := range successTestCases { + var cacheFlags CacheOpts + t.Logf("Testing cache type: %s", testcase.name) + err := cacheFlags.Set(testcase.input) + + if testcase.shouldFail { + h.AssertError(t, err, testcase.output) + } else { + h.AssertNil(t, err) + output := cacheFlags.String() + h.AssertEq(t, testcase.output, output) + } + } + }) + }) + + when("bind cache format options are passed", func() { + it("with complete options", func() { + var testcases []CacheOptTestCase + homeDir, err := os.UserHomeDir() + h.AssertNil(t, err) + cwd, err := os.Getwd() + h.AssertNil(t, err) + + if runtime.GOOS != "windows" { + testcases = []CacheOptTestCase{ + { + name: "Build cache as bind", + input: fmt.Sprintf("type=build;format=bind;source=%s/test-bind-build-cache", homeDir), + output: fmt.Sprintf("type=build;format=bind;source=%s/test-bind-build-cache/build-cache;type=launch;format=volume;", homeDir), + }, + { + name: "Build cache as bind with relative path", + input: "type=build;format=bind;source=./test-bind-build-cache-relative", + output: fmt.Sprintf("type=build;format=bind;source=%s/test-bind-build-cache-relative/build-cache;type=launch;format=volume;", cwd), + }, + { + name: "Launch cache as bind", + input: fmt.Sprintf("type=launch;format=bind;source=%s/test-bind-volume-cache", homeDir), + output: fmt.Sprintf("type=build;format=volume;type=launch;format=bind;source=%s/test-bind-volume-cache/launch-cache;", homeDir), + }, + { + name: "Case sensitivity test with uppercase path", + input: fmt.Sprintf("type=build;format=bind;source=%s/TestBindBuildCache", homeDir), + output: fmt.Sprintf("type=build;format=bind;source=%s/TestBindBuildCache/build-cache;type=launch;format=volume;", homeDir), + }, + { + name: "Case sensitivity test with mixed case path", + input: fmt.Sprintf("type=build;format=bind;source=%s/TeStBiNdBuildCaChe", homeDir), + output: fmt.Sprintf("type=build;format=bind;source=%s/TeStBiNdBuildCaChe/build-cache;type=launch;format=volume;", homeDir), + }, + } + } else { + testcases = []CacheOptTestCase{ + { + name: "Build cache as bind", + input: fmt.Sprintf("type=build;format=bind;source=%s\\test-bind-build-cache", homeDir), + output: fmt.Sprintf("type=build;format=bind;source=%s\\test-bind-build-cache\\build-cache;type=launch;format=volume;", homeDir), + }, + { + name: "Build cache as bind with relative path", + input: "type=build;format=bind;source=.\\test-bind-build-cache-relative", + output: fmt.Sprintf("type=build;format=bind;source=%s\\test-bind-build-cache-relative\\build-cache;type=launch;format=volume;", cwd), + }, + { + name: "Launch cache as bind", + input: fmt.Sprintf("type=launch;format=bind;source=%s\\test-bind-volume-cache", homeDir), + output: fmt.Sprintf("type=build;format=volume;type=launch;format=bind;source=%s\\test-bind-volume-cache\\launch-cache;", homeDir), + }, + // Case sensitivity test cases for Windows + { + name: "Case sensitivity test with uppercase path", + input: fmt.Sprintf("type=build;format=bind;source=%s\\TestBindBuildCache", homeDir), + output: fmt.Sprintf("type=build;format=bind;source=%s\\TestBindBuildCache\\build-cache;type=launch;format=volume;", homeDir), + }, + { + name: "Case sensitivity test with mixed case path", + input: fmt.Sprintf("type=build;format=bind;source=%s\\TeStBiNdBuildCaChe", homeDir), + output: fmt.Sprintf("type=build;format=bind;source=%s\\TeStBiNdBuildCaChe\\build-cache;type=launch;format=volume;", homeDir), + }, + } + } + + for _, testcase := range testcases { + var cacheFlags CacheOpts + t.Logf("Testing cache type: %s", testcase.name) + err := cacheFlags.Set(testcase.input) + h.AssertNil(t, err) + h.AssertEq(t, strings.ToLower(testcase.output), strings.ToLower(cacheFlags.String())) + } + }) + + it("with missing options", func() { + successTestCases := []CacheOptTestCase{ + { + name: "Launch cache as bind missing: source", + input: "type=launch;format=bind", + output: "cache 'source' is required", + shouldFail: true, + }, + { + name: "Launch cache as Volume missing: type, source", + input: "format=bind", + output: "cache 'source' is required", + shouldFail: true, + }, + } + + for _, testcase := range successTestCases { + var cacheFlags CacheOpts + t.Logf("Testing cache type: %s", testcase.name) + err := cacheFlags.Set(testcase.input) + + if testcase.shouldFail { + h.AssertError(t, err, testcase.output) + } else { + h.AssertNil(t, err) + output := cacheFlags.String() + h.AssertEq(t, testcase.output, output) + } + } + }) + }) +} diff --git a/internal/cache/consts.go b/pkg/cache/consts.go similarity index 91% rename from internal/cache/consts.go rename to pkg/cache/consts.go index 80ae0ef1ce..8a1098519b 100644 --- a/internal/cache/consts.go +++ b/pkg/cache/consts.go @@ -3,6 +3,7 @@ package cache const ( Image Type = iota Volume + Bind ) type Type int diff --git a/pkg/cache/image_cache.go b/pkg/cache/image_cache.go new file mode 100644 index 0000000000..3d370c8546 --- /dev/null +++ b/pkg/cache/image_cache.go @@ -0,0 +1,44 @@ +package cache + +import ( + "context" + + cerrdefs "github.com/containerd/errdefs" + "github.com/google/go-containerregistry/pkg/name" + dockerClient "github.com/moby/moby/client" +) + +type ImageCache struct { + docker DockerClient + image string +} + +type DockerClient interface { + ImageRemove(ctx context.Context, image string, options dockerClient.ImageRemoveOptions) (dockerClient.ImageRemoveResult, error) + VolumeRemove(ctx context.Context, volumeID string, options dockerClient.VolumeRemoveOptions) (dockerClient.VolumeRemoveResult, error) +} + +func NewImageCache(imageRef name.Reference, dockerClient DockerClient) *ImageCache { + return &ImageCache{ + image: imageRef.Name(), + docker: dockerClient, + } +} + +func (c *ImageCache) Name() string { + return c.image +} + +func (c *ImageCache) Clear(ctx context.Context) error { + _, err := c.docker.ImageRemove(ctx, c.Name(), dockerClient.ImageRemoveOptions{ + Force: true, + }) + if err != nil && !cerrdefs.IsNotFound(err) { + return err + } + return nil +} + +func (c *ImageCache) Type() Type { + return Image +} diff --git a/internal/cache/image_cache_test.go b/pkg/cache/image_cache_test.go similarity index 78% rename from internal/cache/image_cache_test.go rename to pkg/cache/image_cache_test.go index 904f6936fa..637ba6736b 100644 --- a/internal/cache/image_cache_test.go +++ b/pkg/cache/image_cache_test.go @@ -2,20 +2,17 @@ package cache_test import ( "context" - "math/rand" "testing" - "time" + + "github.com/buildpacks/pack/pkg/cache" "github.com/buildpacks/imgutil/local" - "github.com/docker/docker/api/types" - "github.com/docker/docker/api/types/filters" - "github.com/docker/docker/client" "github.com/google/go-containerregistry/pkg/name" "github.com/heroku/color" + "github.com/moby/moby/client" "github.com/sclevine/spec" "github.com/sclevine/spec/report" - "github.com/buildpacks/pack/internal/cache" h "github.com/buildpacks/pack/testhelpers" ) @@ -23,18 +20,17 @@ func TestImageCache(t *testing.T) { h.RequireDocker(t) color.Disable(true) defer color.Disable(false) - rand.Seed(time.Now().UTC().UnixNano()) spec.Run(t, "ImageCache", testImageCache, spec.Parallel(), spec.Report(report.Terminal{})) } func testImageCache(t *testing.T, when spec.G, it spec.S) { when("#NewImageCache", func() { - var dockerClient client.CommonAPIClient + var dockerClient *client.Client it.Before(func() { var err error - dockerClient, err = client.NewClientWithOpts(client.FromEnv, client.WithVersion("1.38")) + dockerClient, err = client.New(client.FromEnv) h.AssertNil(t, err) }) @@ -78,12 +74,12 @@ func testImageCache(t *testing.T, when spec.G, it spec.S) { when("#Type", func() { var ( - dockerClient client.CommonAPIClient + dockerClient client.APIClient ) it.Before(func() { var err error - dockerClient, err = client.NewClientWithOpts(client.FromEnv, client.WithVersion("1.38")) + dockerClient, err = client.New(client.FromEnv) h.AssertNil(t, err) }) @@ -99,14 +95,14 @@ func testImageCache(t *testing.T, when spec.G, it spec.S) { when("#Clear", func() { var ( imageName string - dockerClient client.CommonAPIClient + dockerClient client.APIClient subject *cache.ImageCache ctx context.Context ) it.Before(func() { var err error - dockerClient, err = client.NewClientWithOpts(client.FromEnv, client.WithVersion("1.38")) + dockerClient, err = client.New(client.FromEnv) h.AssertNil(t, err) ctx = context.TODO() @@ -128,14 +124,13 @@ func testImageCache(t *testing.T, when spec.G, it spec.S) { it("removes the image", func() { err := subject.Clear(ctx) h.AssertNil(t, err) - images, err := dockerClient.ImageList(context.TODO(), types.ImageListOptions{ - Filters: filters.NewArgs(filters.KeyValuePair{ - Key: "reference", - Value: imageName, - }), + result, err := dockerClient.ImageList(context.TODO(), client.ImageListOptions{ + Filters: client.Filters{ + "reference": {imageName: true}, + }, }) h.AssertNil(t, err) - h.AssertEq(t, len(images), 0) + h.AssertEq(t, len(result.Items), 0) }) }) diff --git a/pkg/cache/volume_cache.go b/pkg/cache/volume_cache.go new file mode 100644 index 0000000000..5109c30a99 --- /dev/null +++ b/pkg/cache/volume_cache.go @@ -0,0 +1,134 @@ +package cache + +import ( + "context" + "crypto/rand" + "crypto/sha256" + "fmt" + "os" + "strings" + + "github.com/chainguard-dev/kaniko/pkg/util/proc" + "github.com/google/go-containerregistry/pkg/name" + dockerClient "github.com/moby/moby/client" + + cerrdefs "github.com/containerd/errdefs" + + "github.com/buildpacks/pack/internal/config" + "github.com/buildpacks/pack/internal/paths" + "github.com/buildpacks/pack/pkg/logging" +) + +const EnvVolumeKey = "PACK_VOLUME_KEY" + +type VolumeCache struct { + docker DockerClient + volume string +} + +func NewVolumeCache(imageRef name.Reference, cacheType CacheInfo, suffix string, dockerClient DockerClient, logger logging.Logger) (*VolumeCache, error) { + var volumeName string + if cacheType.Source == "" { + volumeKey, err := getVolumeKey(imageRef, logger) + if err != nil { + return nil, err + } + sum := sha256.Sum256([]byte(imageRef.Name() + volumeKey)) + vol := paths.FilterReservedNames(fmt.Sprintf("%s-%x", sanitizedRef(imageRef), sum[:6])) + volumeName = fmt.Sprintf("pack-cache-%s.%s", vol, suffix) + } else { + volumeName = paths.FilterReservedNames(cacheType.Source) + } + + return &VolumeCache{ + volume: volumeName, + docker: dockerClient, + }, nil +} + +func getVolumeKey(imageRef name.Reference, logger logging.Logger) (string, error) { + var foundKey string + + // first, look for key in env + + foundKey = os.Getenv(EnvVolumeKey) + if foundKey != "" { + return foundKey, nil + } + + // then, look for key in existing config + + volumeKeysPath, err := config.DefaultVolumeKeysPath() + if err != nil { + return "", err + } + cfg, err := config.ReadVolumeKeys(volumeKeysPath) + if err != nil { + return "", err + } + + foundKey = cfg.VolumeKeys[imageRef.Name()] + if foundKey != "" { + return foundKey, nil + } + + // finally, create new key and store it in config + + // if we're running in a container, we should log a warning + // so that we don't always re-create the cache + if RunningInContainer() { + logger.Warnf("%s is unset; set this environment variable to a secret value to avoid creating a new volume cache on every build", EnvVolumeKey) + } + + newKey := randString(20) + if cfg.VolumeKeys == nil { + cfg.VolumeKeys = make(map[string]string) + } + cfg.VolumeKeys[imageRef.Name()] = newKey + if err = config.Write(cfg, volumeKeysPath); err != nil { + return "", err + } + + return newKey, nil +} + +// Returns a string iwith lowercase a-z, of length n +func randString(n int) string { + b := make([]byte, n) + _, err := rand.Read(b) + if err != nil { + panic(err) + } + for i := range b { + b[i] = 'a' + (b[i] % 26) + } + return string(b) +} + +func (c *VolumeCache) Name() string { + return c.volume +} + +func (c *VolumeCache) Clear(ctx context.Context) error { + _, err := c.docker.VolumeRemove(ctx, c.Name(), dockerClient.VolumeRemoveOptions{Force: true}) + if err != nil && !cerrdefs.IsNotFound(err) { + return err + } + return nil +} + +func (c *VolumeCache) Type() Type { + return Volume +} + +// note image names and volume names are validated using the same restrictions: +// see https://github.com/moby/moby/blob/f266f13965d5bfb1825afa181fe6c32f3a597fa3/daemon/names/names.go#L5 +func sanitizedRef(ref name.Reference) string { + result := strings.TrimPrefix(ref.Context().String(), ref.Context().RegistryStr()+"/") + result = strings.ReplaceAll(result, "/", "_") + return fmt.Sprintf("%s_%s", result, ref.Identifier()) +} + +var RunningInContainer = func() bool { + return proc.GetContainerRuntime(0, 0) != proc.RuntimeNotFound +} diff --git a/pkg/cache/volume_cache_test.go b/pkg/cache/volume_cache_test.go new file mode 100644 index 0000000000..6866d4a885 --- /dev/null +++ b/pkg/cache/volume_cache_test.go @@ -0,0 +1,355 @@ +package cache_test + +import ( + "bytes" + "context" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/buildpacks/pack/internal/config" + "github.com/buildpacks/pack/pkg/cache" + "github.com/buildpacks/pack/pkg/logging" + + "github.com/docker/docker/daemon/names" + "github.com/google/go-containerregistry/pkg/name" + "github.com/heroku/color" + "github.com/moby/moby/client" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + h "github.com/buildpacks/pack/testhelpers" +) + +func TestVolumeCache(t *testing.T) { + h.RequireDocker(t) + color.Disable(true) + defer color.Disable(false) + + spec.Run(t, "VolumeCache", testCache, spec.Sequential(), spec.Report(report.Terminal{})) +} + +func testCache(t *testing.T, when spec.G, it spec.S) { + var ( + dockerClient client.APIClient + outBuf bytes.Buffer + logger logging.Logger + ) + + it.Before(func() { + var err error + dockerClient, err = client.New(client.FromEnv) + h.AssertNil(t, err) + logger = logging.NewSimpleLogger(&outBuf) + }) + + when("#NewVolumeCache", func() { + when("volume cache name is empty", func() { + it("adds suffix to calculated name", func() { + ref, err := name.ParseReference("my/repo", name.WeakValidation) + h.AssertNil(t, err) + subject, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) + if !strings.HasSuffix(subject.Name(), ".some-suffix") { + t.Fatalf("Calculated volume name '%s' should end with '.some-suffix'", subject.Name()) + } + }) + + it("reusing the same cache for the same repo name", func() { + ref, err := name.ParseReference("my/repo", name.WeakValidation) + h.AssertNil(t, err) + + subject, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) + expected, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) + if subject.Name() != expected.Name() { + t.Fatalf("The same repo name should result in the same volume") + } + }) + + it("supplies different volumes for different tags", func() { + ref, err := name.ParseReference("my/repo:other-tag", name.WeakValidation) + h.AssertNil(t, err) + + subject, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) + + ref, err = name.ParseReference("my/repo", name.WeakValidation) + h.AssertNil(t, err) + notExpected, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) + if subject.Name() == notExpected.Name() { + t.Fatalf("Different image tags should result in different volumes") + } + }) + + it("supplies different volumes for different registries", func() { + ref, err := name.ParseReference("registry.com/my/repo:other-tag", name.WeakValidation) + h.AssertNil(t, err) + + subject, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) + + ref, err = name.ParseReference("my/repo", name.WeakValidation) + h.AssertNil(t, err) + notExpected, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) + if subject.Name() == notExpected.Name() { + t.Fatalf("Different image registries should result in different volumes") + } + }) + + it("resolves implied tag", func() { + ref, err := name.ParseReference("my/repo:latest", name.WeakValidation) + h.AssertNil(t, err) + + subject, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) + + ref, err = name.ParseReference("my/repo", name.WeakValidation) + h.AssertNil(t, err) + expected, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) + h.AssertEq(t, subject.Name(), expected.Name()) + }) + + it("resolves implied registry", func() { + ref, err := name.ParseReference("index.docker.io/my/repo", name.WeakValidation) + h.AssertNil(t, err) + + subject, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) + + ref, err = name.ParseReference("my/repo", name.WeakValidation) + h.AssertNil(t, err) + expected, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) + h.AssertEq(t, subject.Name(), expected.Name()) + }) + + it("includes human readable information", func() { + ref, err := name.ParseReference("myregistryhost:5000/fedora/httpd:version1.0", name.WeakValidation) + h.AssertNil(t, err) + + subject, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) + + h.AssertContains(t, subject.Name(), "fedora_httpd_version1.0") + h.AssertTrue(t, names.RestrictedNamePattern.MatchString(subject.Name())) + }) + + when("PACK_VOLUME_KEY", func() { + when("is set", func() { + it.After(func() { + h.AssertNil(t, os.Unsetenv("PACK_VOLUME_KEY")) + }) + + it("uses it to construct the volume name", func() { + ref, err := name.ParseReference("my/repo:some-tag", name.WeakValidation) + h.AssertNil(t, err) + + nameFromNewKey, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) // sources a new key + h.AssertNil(t, os.Setenv("PACK_VOLUME_KEY", "some-volume-key")) + nameFromEnvKey, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) // sources key from env + h.AssertNotEq(t, nameFromNewKey.Name(), nameFromEnvKey.Name()) + }) + }) + + when("is unset", func() { + var tmpPackHome string + + it.Before(func() { + var err error + tmpPackHome, err = os.MkdirTemp("", "") + h.AssertNil(t, err) + h.AssertNil(t, os.Setenv("PACK_HOME", tmpPackHome)) + }) + + it.After(func() { + h.AssertNil(t, os.RemoveAll(tmpPackHome)) + }) + + when("~/.pack/volume-keys.toml contains key for repo name", func() { + it("sources the key from ~/.pack/volume-keys.toml", func() { + ref, err := name.ParseReference("my/repo:some-tag", name.WeakValidation) + h.AssertNil(t, err) + + nameFromNewKey, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) // sources a new key + + cfgContents := ` +[volume-keys] +"index.docker.io/my/repo:some-tag" = "SOME_VOLUME_KEY" +` + h.AssertNil(t, os.WriteFile(filepath.Join(tmpPackHome, "volume-keys.toml"), []byte(cfgContents), 0755)) // overrides the key that was set + + nameFromConfigKey, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) // sources key from config + h.AssertNotEq(t, nameFromNewKey.Name(), nameFromConfigKey.Name()) + }) + }) + + when("~/.pack/volume-keys.toml missing key for repo name", func() { + it("generates a new key and saves it to ~/.pack/volume-keys.toml", func() { + ref, err := name.ParseReference("my/repo:some-tag", name.WeakValidation) + h.AssertNil(t, err) + + nameFromNewKey, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) // sources a new key + nameFromConfigKey, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) // sources same key from config + h.AssertEq(t, nameFromNewKey.Name(), nameFromConfigKey.Name()) + + cfg, err := config.ReadVolumeKeys(filepath.Join(tmpPackHome, "volume-keys.toml")) + h.AssertNil(t, err) + h.AssertNotNil(t, cfg.VolumeKeys["index.docker.io/my/repo:some-tag"]) + }) + + when("containerized pack", func() { + it.Before(func() { + cache.RunningInContainer = func() bool { + return true + } + }) + + it("logs a warning", func() { + ref, err := name.ParseReference("my/repo:some-tag", name.WeakValidation) + h.AssertNil(t, err) + + _, _ = cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) // sources a new key + _, _ = cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) // sources same key from config + h.AssertContains(t, outBuf.String(), "PACK_VOLUME_KEY is unset; set this environment variable to a secret value to avoid creating a new volume cache on every build") + h.AssertEq(t, strings.Count(outBuf.String(), "PACK_VOLUME_KEY is unset"), 1) // the second call to NewVolumeCache reads from the config + }) + }) + }) + }) + }) + }) + + when("volume cache name is not empty", func() { + volumeName := "test-volume-name" + cacheInfo := cache.CacheInfo{ + Format: cache.CacheVolume, + Source: volumeName, + } + + it("named volume created without suffix", func() { + ref, err := name.ParseReference("my/repo", name.WeakValidation) + h.AssertNil(t, err) + + subject, _ := cache.NewVolumeCache(ref, cacheInfo, "some-suffix", dockerClient, logger) + + if volumeName != subject.Name() { + t.Fatalf("Volume name '%s' should be same as the name specified '%s'", subject.Name(), volumeName) + } + }) + + it("reusing the same cache for the same repo name", func() { + ref, err := name.ParseReference("my/repo", name.WeakValidation) + h.AssertNil(t, err) + + subject, _ := cache.NewVolumeCache(ref, cacheInfo, "some-suffix", dockerClient, logger) + + expected, _ := cache.NewVolumeCache(ref, cacheInfo, "some-suffix", dockerClient, logger) + if subject.Name() != expected.Name() { + t.Fatalf("The same repo name should result in the same volume") + } + }) + + it("supplies different volumes for different registries", func() { + ref, err := name.ParseReference("registry.com/my/repo:other-tag", name.WeakValidation) + h.AssertNil(t, err) + + subject, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) + + ref, err = name.ParseReference("my/repo", name.WeakValidation) + h.AssertNil(t, err) + notExpected, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) + if subject.Name() == notExpected.Name() { + t.Fatalf("Different image registries should result in different volumes") + } + }) + + it("resolves implied tag", func() { + ref, err := name.ParseReference("my/repo:latest", name.WeakValidation) + h.AssertNil(t, err) + + subject, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) + + ref, err = name.ParseReference("my/repo", name.WeakValidation) + h.AssertNil(t, err) + expected, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) + h.AssertEq(t, subject.Name(), expected.Name()) + }) + + it("resolves implied registry", func() { + ref, err := name.ParseReference("index.docker.io/my/repo", name.WeakValidation) + h.AssertNil(t, err) + + subject, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) + + ref, err = name.ParseReference("my/repo", name.WeakValidation) + h.AssertNil(t, err) + expected, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) + h.AssertEq(t, subject.Name(), expected.Name()) + }) + + it("includes human readable information", func() { + ref, err := name.ParseReference("myregistryhost:5000/fedora/httpd:version1.0", name.WeakValidation) + h.AssertNil(t, err) + + subject, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) + + h.AssertContains(t, subject.Name(), "fedora_httpd_version1.0") + h.AssertTrue(t, names.RestrictedNamePattern.MatchString(subject.Name())) + }) + }) + }) + + when("#Clear", func() { + var ( + volumeName string + dockerClient client.APIClient + subject *cache.VolumeCache + ctx context.Context + ) + + it.Before(func() { + var err error + dockerClient, err = client.New(client.FromEnv) + h.AssertNil(t, err) + ctx = context.TODO() + + ref, err := name.ParseReference(h.RandString(10), name.WeakValidation) + h.AssertNil(t, err) + + subject, _ = cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) + volumeName = subject.Name() + }) + + when("there is a cache volume", func() { + it.Before(func() { + dockerClient.VolumeCreate(context.TODO(), client.VolumeCreateOptions{ + Name: volumeName, + }) + }) + + it("removes the volume", func() { + err := subject.Clear(ctx) + h.AssertNil(t, err) + + volumesResult, err := dockerClient.VolumeList(context.TODO(), client.VolumeListOptions{ + Filters: client.Filters{ + "name": {volumeName: true}, + }, + }) + h.AssertNil(t, err) + h.AssertEq(t, len(volumesResult.Items), 0) + }) + }) + + when("there is no cache volume", func() { + it("does not fail", func() { + err := subject.Clear(ctx) + h.AssertNil(t, err) + }) + }) + }) + + when("#Type", func() { + it("returns the cache type", func() { + ref, err := name.ParseReference("my/repo", name.WeakValidation) + h.AssertNil(t, err) + subject, _ := cache.NewVolumeCache(ref, cache.CacheInfo{}, "some-suffix", dockerClient, logger) + expected := cache.Volume + h.AssertEq(t, subject.Type(), expected) + }) + }) +} diff --git a/pkg/client/build.go b/pkg/client/build.go index 2f1be611bd..b4fc5c126e 100644 --- a/pkg/client/build.go +++ b/pkg/client/build.go @@ -1,62 +1,78 @@ package client import ( + "archive/tar" "context" "crypto/rand" + "crypto/sha256" + "encoding/hex" + "encoding/json" "fmt" - "io/ioutil" + "io" "os" "path/filepath" "sort" + "strconv" "strings" + "time" "github.com/Masterminds/semver" "github.com/buildpacks/imgutil" + "github.com/buildpacks/imgutil/layout" "github.com/buildpacks/imgutil/local" "github.com/buildpacks/imgutil/remote" - "github.com/buildpacks/lifecycle/platform" - "github.com/docker/docker/api/types" - "github.com/docker/docker/volume/mounts" + "github.com/buildpacks/lifecycle/platform/files" + "github.com/chainguard-dev/kaniko/pkg/util/proc" "github.com/google/go-containerregistry/pkg/name" + "github.com/moby/moby/client" "github.com/pkg/errors" ignore "github.com/sabhiram/go-gitignore" + "github.com/buildpacks/pack/buildpackage" "github.com/buildpacks/pack/internal/build" "github.com/buildpacks/pack/internal/builder" internalConfig "github.com/buildpacks/pack/internal/config" + "github.com/buildpacks/pack/internal/layer" pname "github.com/buildpacks/pack/internal/name" + "github.com/buildpacks/pack/internal/paths" "github.com/buildpacks/pack/internal/stack" "github.com/buildpacks/pack/internal/stringset" "github.com/buildpacks/pack/internal/style" "github.com/buildpacks/pack/internal/termui" "github.com/buildpacks/pack/pkg/archive" "github.com/buildpacks/pack/pkg/buildpack" + "github.com/buildpacks/pack/pkg/cache" "github.com/buildpacks/pack/pkg/dist" "github.com/buildpacks/pack/pkg/image" "github.com/buildpacks/pack/pkg/logging" projectTypes "github.com/buildpacks/pack/pkg/project/types" + v02 "github.com/buildpacks/pack/pkg/project/v02" ) const ( - minLifecycleVersionSupportingCreator = "0.7.4" - prevLifecycleVersionSupportingImage = "0.6.1" - minLifecycleVersionSupportingImage = "0.7.5" + minLifecycleVersionSupportingCreator = "0.7.4" + prevLifecycleVersionSupportingImage = "0.6.1" + minLifecycleVersionSupportingImage = "0.7.5" + minLifecycleVersionSupportingCreatorWithExtensions = "0.19.0" ) +var RunningInContainer = func() bool { + return proc.GetContainerRuntime(0, 0) != proc.RuntimeNotFound +} + // LifecycleExecutor executes the lifecycle which satisfies the Cloud Native Buildpacks Lifecycle specification. // Implementations of the Lifecycle must execute the following phases by calling the // phase-specific lifecycle binary in order: // -// Detection: /cnb/lifecycle/detector -// Analysis: /cnb/lifecycle/analyzer -// Cache Restoration: /cnb/lifecycle/restorer -// Build: /cnb/lifecycle/builder -// Export: /cnb/lifecycle/exporter +// Detection: /cnb/lifecycle/detector +// Analysis: /cnb/lifecycle/analyzer +// Cache Restoration: /cnb/lifecycle/restorer +// Build: /cnb/lifecycle/builder +// Export: /cnb/lifecycle/exporter // // or invoke the single creator binary: // -// Creator: /cnb/lifecycle/creator -// +// Creator: /cnb/lifecycle/creator type LifecycleExecutor interface { // Execute is responsible for invoking each of these binaries // with the desired configuration. @@ -92,8 +108,11 @@ type BuildOptions struct { // e.g. tcp://example.com:1234, unix:///run/user/1000/podman/podman.sock DockerHost string + // the target environment the OCI image is expected to be run in, i.e. production, test, development. + CNBExecutionEnv string + // Used to determine a run-image mirror if Run Image is empty. - // Used in combination with Builder metadata to determine to the the 'best' mirror. + // Used in combination with Builder metadata to determine to the 'best' mirror. // 'best' is defined as: // - if Publish is true, the best mirror matches registry we are publishing to. // - if Publish is false, the best mirror matches a registry specified in Image. @@ -105,6 +124,9 @@ type BuildOptions struct { // Buildpacks may both read and overwrite these values. Env map[string]string + // Used to configure various cache available options + Cache cache.CacheOpts + // Option only valid if Publish is true // Create an additional image that contains cache=true layers and push it to the registry. CacheImage string @@ -121,11 +143,19 @@ type BuildOptions struct { // Launch a terminal UI to depict the build process Interactive bool + // Disable System Buildpacks present in the builder + DisableSystemBuildpacks bool + // List of buildpack images or archives to add to a builder. // These buildpacks may overwrite those on the builder if they // share both an ID and Version with a buildpack on the builder. Buildpacks []string + // List of extension images or archives to add to a builder. + // These extensions may overwrite those on the builder if they + // share both an ID and Version with an extension on the builder. + Extensions []string + // Additional image tags to push to, each will contain contents identical to Image AdditionalTags []string @@ -140,6 +170,9 @@ type BuildOptions struct { // Process type that will be used when setting container start command. DefaultProcessType string + // Platform is the desired platform to build on (e.g., linux/amd64) + Platform string + // Strategy for updating local images before a build. PullPolicy image.PullPolicy @@ -149,6 +182,14 @@ type BuildOptions struct { // ProjectDescriptor describes the project and any configuration specific to the project ProjectDescriptor projectTypes.Descriptor + // List of buildpack images or archives to add to a builder. + // these buildpacks will be prepended to the builder's order + PreBuildpacks []string + + // List of buildpack images or archives to add to a builder. + // these buildpacks will be appended to the builder's order + PostBuildpacks []string + // The lifecycle image that will be used for the analysis, restore and export phases // when using an untrusted builder. LifecycleImage string @@ -159,17 +200,49 @@ type BuildOptions struct { // User's group id used to build the image GroupID int + // User's user id used to build the image + UserID int + // A previous image to set to a particular tag reference, digest reference, or (when performing a daemon build) image ID; PreviousImage string // TrustBuilder when true optimizes builds by running // all lifecycle phases in a single container. // This places registry credentials on the builder's build image. - // Only trust builders from reputable sources. + // Only trust builders from reputable sources. The optimized + // build happens only when both builder and buildpacks are + // trusted TrustBuilder IsTrustedBuilder + // TrustExtraBuildpacks when true optimizes builds by running + // all lifecycle phases in a single container. The optimized + // build happens only when both builder and buildpacks are + // trusted + TrustExtraBuildpacks bool + // Directory to output any SBOM artifacts SBOMDestinationDir string + + // Directory to output the report.toml metadata artifact + ReportDestinationDir string + + // Desired create time in the output image config + CreationTime *time.Time + + // Configuration to export to OCI layout format + LayoutConfig *LayoutConfig + + // Enable user namespace isolation for the build containers + EnableUsernsHost bool + + InsecureRegistries []string +} + +func (b *BuildOptions) Layout() bool { + if b.LayoutConfig != nil { + return b.LayoutConfig.Enable() + } + return false } // ProxyConfig specifies proxy setting to be set as environment variables in a container. @@ -201,13 +274,31 @@ type ContainerConfig struct { Volumes []string } -var IsSuggestedBuilderFunc = func(b string) bool { - for _, suggestedBuilder := range builder.SuggestedBuilders { - if b == suggestedBuilder.Image { - return true - } - } - return false +type LayoutConfig struct { + // Application image reference provided by the user + InputImage InputImageReference + + // Previous image reference provided by the user + PreviousInputImage InputImageReference + + // Local root path to save the run-image in OCI layout format + LayoutRepoDir string + + // Configure the OCI layout fetch mode to avoid saving layers on disk + Sparse bool +} + +func (l *LayoutConfig) Enable() bool { + return l.InputImage.Layout() +} + +type layoutPathConfig struct { + hostImagePath string + hostPreviousImagePath string + hostRunImagePath string + targetImagePath string + targetPreviousImagePath string + targetRunImagePath string } // Build configures settings for the build container(s) and lifecycle. @@ -215,10 +306,37 @@ var IsSuggestedBuilderFunc = func(b string) bool { // If any configuration is deemed invalid, or if any lifecycle phases fail, // an error will be returned and no image produced. func (c *Client) Build(ctx context.Context, opts BuildOptions) error { - imageRef, err := c.parseTagReference(opts.Image) + var pathsConfig layoutPathConfig + + if RunningInContainer() && (opts.PullPolicy != image.PullAlways) { + c.logger.Warnf("Detected pack is running in a container; if using a shared docker host, failing to pull build inputs from a remote registry is insecure - " + + "other tenants may have compromised build inputs stored in the daemon." + + "This configuration is insecure and may become unsupported in the future." + + "Re-run with '--pull-policy=always' to silence this warning.") + } + + if !opts.Publish && usesContainerdStorage(c.docker) { + c.logger.Warnf("Exporting to docker daemon (building without --publish) and daemon uses containerd storage; performance may be significantly degraded.\n" + + "For more information, see https://github.com/buildpacks/pack/issues/2272.") + } + + imageRef, err := c.parseReference(opts) if err != nil { return errors.Wrapf(err, "invalid image name '%s'", opts.Image) } + imgRegistry := imageRef.Context().RegistryStr() + imageName := imageRef.Name() + + if opts.Layout() { + pathsConfig, err = c.processLayoutPath(opts.LayoutConfig.InputImage, opts.LayoutConfig.PreviousInputImage) + if err != nil { + if opts.LayoutConfig.PreviousInputImage != nil { + return errors.Wrapf(err, "invalid layout paths image name '%s' or previous-image name '%s'", opts.LayoutConfig.InputImage.Name(), + opts.LayoutConfig.PreviousInputImage.Name()) + } + return errors.Wrapf(err, "invalid layout paths image name '%s'", opts.LayoutConfig.InputImage.Name()) + } + } appPath, err := c.processAppPath(opts.AppPath) if err != nil { @@ -232,34 +350,209 @@ func (c *Client) Build(ctx context.Context, opts BuildOptions) error { return errors.Wrapf(err, "invalid builder '%s'", opts.Builder) } - rawBuilderImage, err := c.imageFetcher.Fetch(ctx, builderRef.Name(), image.FetchOptions{Daemon: true, PullPolicy: opts.PullPolicy}) + requestedTarget := func() *dist.Target { + if opts.Platform == "" { + return nil + } + parts := strings.Split(opts.Platform, "/") + switch len(parts) { + case 0: + return nil + case 1: + return &dist.Target{OS: parts[0]} + case 2: + return &dist.Target{OS: parts[0], Arch: parts[1]} + default: + return &dist.Target{OS: parts[0], Arch: parts[1], ArchVariant: parts[2]} + } + }() + + rawBuilderImage, err := c.imageFetcher.Fetch( + ctx, + builderRef.Name(), + image.FetchOptions{ + Daemon: true, + Target: requestedTarget, + PullPolicy: opts.PullPolicy, + InsecureRegistries: opts.InsecureRegistries, + }, + ) if err != nil { return errors.Wrapf(err, "failed to fetch builder image '%s'", builderRef.Name()) } + var targetToUse *dist.Target + if requestedTarget != nil { + targetToUse = requestedTarget + } else { + targetToUse, err = getTargetFromBuilder(rawBuilderImage) + if err != nil { + return err + } + } + bldr, err := c.getBuilder(rawBuilderImage) if err != nil { return errors.Wrapf(err, "invalid builder %s", style.Symbol(opts.Builder)) } - runImageName := c.resolveRunImage(opts.RunImage, imageRef.Context().RegistryStr(), builderRef.Context().RegistryStr(), bldr.Stack(), opts.AdditionalMirrors, opts.Publish) - runImage, err := c.validateRunImage(ctx, runImageName, opts.PullPolicy, opts.Publish, bldr.StackID) + fetchOptions := image.FetchOptions{ + Daemon: !opts.Publish, + PullPolicy: opts.PullPolicy, + Target: targetToUse, + InsecureRegistries: opts.InsecureRegistries, + } + runImageName := c.resolveRunImage(opts.RunImage, imgRegistry, builderRef.Context().RegistryStr(), bldr.DefaultRunImage(), opts.AdditionalMirrors, opts.Publish, fetchOptions) + + if opts.Layout() { + targetRunImagePath, err := layout.ParseRefToPath(runImageName) + if err != nil { + return err + } + hostRunImagePath := filepath.Join(opts.LayoutConfig.LayoutRepoDir, targetRunImagePath) + targetRunImagePath = filepath.Join(paths.RootDir, "layout-repo", targetRunImagePath) + fetchOptions.LayoutOption = image.LayoutOption{ + Path: hostRunImagePath, + Sparse: opts.LayoutConfig.Sparse, + } + fetchOptions.Daemon = false + pathsConfig.targetRunImagePath = targetRunImagePath + pathsConfig.hostRunImagePath = hostRunImagePath + } + + runImage, warnings, err := c.validateRunImage(ctx, runImageName, fetchOptions, bldr.StackID) if err != nil { return errors.Wrapf(err, "invalid run-image '%s'", runImageName) } + for _, warning := range warnings { + c.logger.Warn(warning) + } var runMixins []string if _, err := dist.GetLabel(runImage, stack.MixinsLabel, &runMixins); err != nil { return err } - fetchedBPs, order, err := c.processBuildpacks(ctx, bldr.Image(), bldr.Buildpacks(), bldr.Order(), bldr.StackID, opts) + fetchedBPs, nInlineBPs, order, err := c.processBuildpacks(ctx, bldr.Buildpacks(), bldr.Order(), bldr.StackID, opts, targetToUse) if err != nil { return err } - if err := c.validateMixins(fetchedBPs, bldr, runImageName, runMixins); err != nil { - return errors.Wrap(err, "validating stack mixins") + fetchedExs, orderExtensions, err := c.processExtensions(ctx, bldr.Extensions(), opts, targetToUse) + if err != nil { + return err + } + + system, err := c.processSystem(bldr.System(), fetchedBPs, opts.DisableSystemBuildpacks) + if err != nil { + return err + } + + // Default mode: if the TrustBuilder option is not set, trust the known trusted builders. + if opts.TrustBuilder == nil { + opts.TrustBuilder = builder.IsKnownTrustedBuilder + } + + // Ensure the builder's platform APIs are supported + var builderPlatformAPIs builder.APISet + builderPlatformAPIs = append(builderPlatformAPIs, bldr.LifecycleDescriptor().APIs.Platform.Deprecated...) + builderPlatformAPIs = append(builderPlatformAPIs, bldr.LifecycleDescriptor().APIs.Platform.Supported...) + if !supportsPlatformAPI(builderPlatformAPIs) { + c.logger.Debugf("pack %s supports Platform API(s): %s", c.version, strings.Join(build.SupportedPlatformAPIVersions.AsStrings(), ", ")) + c.logger.Debugf("Builder %s supports Platform API(s): %s", style.Symbol(opts.Builder), strings.Join(builderPlatformAPIs.AsStrings(), ", ")) + return errors.Errorf("Builder %s is incompatible with this version of pack", style.Symbol(opts.Builder)) + } + + // Get the platform API version to use + lifecycleVersion := bldr.LifecycleDescriptor().Info.Version + useCreator := supportsCreator(lifecycleVersion) && opts.TrustBuilder(opts.Builder) + hasAdditionalBuildpacks := func() bool { + return len(fetchedBPs) != nInlineBPs + }() + hasExtensions := func() bool { + return len(fetchedExs) != 0 + }() + if hasExtensions { + c.logger.Warnf("Builder is trusted but additional modules were added; using the untrusted (5 phases) build flow") + useCreator = false + } + if hasAdditionalBuildpacks && !opts.TrustExtraBuildpacks { + c.logger.Warnf("Builder is trusted but additional modules were added; using the untrusted (5 phases) build flow") + useCreator = false + } + var ( + lifecycleOptsLifecycleImage string + lifecycleAPIs []string + ) + if !(useCreator) { + // fetch the lifecycle image + if supportsLifecycleImage(lifecycleVersion) { + lifecycleImageName := opts.LifecycleImage + if lifecycleImageName == "" { + lifecycleImageName = fmt.Sprintf("%s:%s", internalConfig.DefaultLifecycleImageRepo, lifecycleVersion.String()) + } + + lifecycleImage, err := c.imageFetcher.FetchForPlatform( + ctx, + lifecycleImageName, + image.FetchOptions{ + Daemon: true, + PullPolicy: opts.PullPolicy, + Target: targetToUse, + InsecureRegistries: opts.InsecureRegistries, + }, + ) + if err != nil { + return fmt.Errorf("fetching lifecycle image: %w", err) + } + + // if lifecyle container os isn't windows, use ephemeral lifecycle to add /workspace with correct ownership + imageOS, err := lifecycleImage.OS() + if err != nil { + return errors.Wrap(err, "getting lifecycle image OS") + } + if imageOS != "windows" { + // obtain uid/gid from builder to use when extending lifecycle image + uid, gid, err := userAndGroupIDs(rawBuilderImage) + if err != nil { + return fmt.Errorf("obtaining build uid/gid from builder image: %w", err) + } + + c.logger.Debugf("Creating ephemeral lifecycle from %s with uid %d and gid %d. With workspace dir %s", lifecycleImage.Name(), uid, gid, opts.Workspace) + // extend lifecycle image with mountpoints, and use it instead of current lifecycle image + lifecycleImage, err = c.createEphemeralLifecycle(lifecycleImage, opts.Workspace, uid, gid) + if err != nil { + return err + } + c.logger.Debugf("Selecting ephemeral lifecycle image %s for build", lifecycleImage.Name()) + // cleanup the extended lifecycle image when done + defer c.docker.ImageRemove(context.Background(), lifecycleImage.Name(), client.ImageRemoveOptions{Force: true}) + } + + lifecycleOptsLifecycleImage = lifecycleImage.Name() + labels, err := lifecycleImage.Labels() + if err != nil { + return fmt.Errorf("reading labels of lifecycle image: %w", err) + } + + lifecycleAPIs, err = extractSupportedLifecycleApis(labels) + if err != nil { + return fmt.Errorf("reading api versions of lifecycle image: %w", err) + } + } + } + + usingPlatformAPI, err := build.FindLatestSupported(append( + bldr.LifecycleDescriptor().APIs.Platform.Deprecated, + bldr.LifecycleDescriptor().APIs.Platform.Supported...), + lifecycleAPIs) + if err != nil { + return fmt.Errorf("finding latest supported Platform API: %w", err) + } + if usingPlatformAPI.LessThan("0.12") { + if err = c.validateMixins(fetchedBPs, bldr, runImageName, runMixins); err != nil { + return fmt.Errorf("validating stack mixins: %w", err) + } } buildEnvs := map[string]string{} @@ -271,28 +564,43 @@ func (c *Client) Build(ctx context.Context, opts BuildOptions) error { buildEnvs[k] = v } - ephemeralBuilder, err := c.createEphemeralBuilder(rawBuilderImage, buildEnvs, order, fetchedBPs) + origBuilderName := rawBuilderImage.Name() + ephemeralBuilder, err := c.createEphemeralBuilder( + rawBuilderImage, + buildEnvs, + order, + fetchedBPs, + orderExtensions, + fetchedExs, + usingPlatformAPI.LessThan("0.12"), + opts.RunImage, + system, + opts.DisableSystemBuildpacks, + ) if err != nil { return err } - defer c.docker.ImageRemove(context.Background(), ephemeralBuilder.Name(), types.ImageRemoveOptions{Force: true}) - - var builderPlatformAPIs builder.APISet - builderPlatformAPIs = append(builderPlatformAPIs, ephemeralBuilder.LifecycleDescriptor().APIs.Platform.Deprecated...) - builderPlatformAPIs = append(builderPlatformAPIs, ephemeralBuilder.LifecycleDescriptor().APIs.Platform.Supported...) + defer func() { + if ephemeralBuilder.Name() == origBuilderName { + return + } + _, _ = c.docker.ImageRemove(context.Background(), ephemeralBuilder.Name(), client.ImageRemoveOptions{Force: true}) + }() - if !supportsPlatformAPI(builderPlatformAPIs) { - c.logger.Debugf("pack %s supports Platform API(s): %s", c.version, strings.Join(build.SupportedPlatformAPIVersions.AsStrings(), ", ")) - c.logger.Debugf("Builder %s supports Platform API(s): %s", style.Symbol(opts.Builder), strings.Join(builderPlatformAPIs.AsStrings(), ", ")) - return errors.Errorf("Builder %s is incompatible with this version of pack", style.Symbol(opts.Builder)) + if len(bldr.OrderExtensions()) > 0 || len(ephemeralBuilder.OrderExtensions()) > 0 { + if targetToUse.OS == "windows" { + return fmt.Errorf("builder contains image extensions which are not supported for Windows builds") + } + if opts.PullPolicy != image.PullAlways { + return fmt.Errorf("pull policy must be 'always' when builder contains image extensions") + } } - imgOS, err := rawBuilderImage.OS() - if err != nil { - return errors.Wrapf(err, "getting builder OS") + if opts.Layout() { + opts.ContainerConfig.Volumes = appendLayoutVolumes(opts.ContainerConfig.Volumes, pathsConfig) } - processedVolumes, warnings, err := processVolumes(imgOS, opts.ContainerConfig.Volumes) + processedVolumes, warnings, err := processVolumes(targetToUse.OS, opts.ContainerConfig.Volumes) if err != nil { return err } @@ -311,100 +619,279 @@ func (c *Client) Build(ctx context.Context, opts BuildOptions) error { return err } - projectMetadata := platform.ProjectMetadata{} + projectMetadata := files.ProjectMetadata{} if c.experimental { version := opts.ProjectDescriptor.Project.Version sourceURL := opts.ProjectDescriptor.Project.SourceURL if version != "" || sourceURL != "" { - projectMetadata.Source = &platform.ProjectSource{ + projectMetadata.Source = &files.ProjectSource{ Type: "project", Version: map[string]interface{}{"declared": version}, Metadata: map[string]interface{}{"url": sourceURL}, } + } else { + projectMetadata.Source = v02.GitMetadata(opts.AppPath) } } - // Default mode: if the TrustBuilder option is not set, trust the suggested builders. - if opts.TrustBuilder == nil { - opts.TrustBuilder = IsSuggestedBuilderFunc - } - lifecycleOpts := build.LifecycleOptions{ - AppPath: appPath, - Image: imageRef, - Builder: ephemeralBuilder, - LifecycleImage: ephemeralBuilder.Name(), - RunImage: runImageName, - ProjectMetadata: projectMetadata, - ClearCache: opts.ClearCache, - Publish: opts.Publish, - TrustBuilder: opts.TrustBuilder(opts.Builder), - UseCreator: false, - DockerHost: opts.DockerHost, - CacheImage: opts.CacheImage, - HTTPProxy: proxyConfig.HTTPProxy, - HTTPSProxy: proxyConfig.HTTPSProxy, - NoProxy: proxyConfig.NoProxy, - Network: opts.ContainerConfig.Network, - AdditionalTags: opts.AdditionalTags, - Volumes: processedVolumes, - DefaultProcessType: opts.DefaultProcessType, - FileFilter: fileFilter, - Workspace: opts.Workspace, - GID: opts.GroupID, - PreviousImage: opts.PreviousImage, - Interactive: opts.Interactive, - Termui: termui.NewTermui(imageRef.Name(), ephemeralBuilder, runImageName), - SBOMDestinationDir: opts.SBOMDestinationDir, - } - - lifecycleVersion := ephemeralBuilder.LifecycleDescriptor().Info.Version - // Technically the creator is supported as of platform API version 0.3 (lifecycle version 0.7.0+) but earlier versions - // have bugs that make using the creator problematic. - lifecycleSupportsCreator := !lifecycleVersion.LessThan(semver.MustParse(minLifecycleVersionSupportingCreator)) - - if lifecycleSupportsCreator && opts.TrustBuilder(opts.Builder) { + AppPath: appPath, + Image: imageRef, + Builder: ephemeralBuilder, + BuilderImage: builderRef.Name(), + LifecycleImage: ephemeralBuilder.Name(), + RunImage: runImageName, + ProjectMetadata: projectMetadata, + ClearCache: opts.ClearCache, + Publish: opts.Publish, + TrustBuilder: opts.TrustBuilder(opts.Builder), + UseCreator: useCreator, + UseCreatorWithExtensions: supportsCreatorWithExtensions(lifecycleVersion), + DockerHost: opts.DockerHost, + Cache: opts.Cache, + CacheImage: opts.CacheImage, + HTTPProxy: proxyConfig.HTTPProxy, + HTTPSProxy: proxyConfig.HTTPSProxy, + NoProxy: proxyConfig.NoProxy, + Network: opts.ContainerConfig.Network, + AdditionalTags: opts.AdditionalTags, + Volumes: processedVolumes, + DefaultProcessType: opts.DefaultProcessType, + FileFilter: fileFilter, + Workspace: opts.Workspace, + GID: opts.GroupID, + UID: opts.UserID, + PreviousImage: opts.PreviousImage, + Interactive: opts.Interactive, + Termui: termui.NewTermui(imageName, ephemeralBuilder, runImageName), + ReportDestinationDir: opts.ReportDestinationDir, + SBOMDestinationDir: opts.SBOMDestinationDir, + CreationTime: opts.CreationTime, + Layout: opts.Layout(), + Keychain: c.keychain, + EnableUsernsHost: opts.EnableUsernsHost, + ExecutionEnvironment: opts.CNBExecutionEnv, + InsecureRegistries: opts.InsecureRegistries, + } + + switch { + case useCreator: lifecycleOpts.UseCreator = true - // no need to fetch a lifecycle image, it won't be used - if err := c.lifecycleExecutor.Execute(ctx, lifecycleOpts); err != nil { - return errors.Wrap(err, "executing lifecycle") - } - - return c.logImageNameAndSha(ctx, opts.Publish, imageRef) + case supportsLifecycleImage(lifecycleVersion): + lifecycleOpts.LifecycleImage = lifecycleOptsLifecycleImage + lifecycleOpts.LifecycleApis = lifecycleAPIs + case !opts.TrustBuilder(opts.Builder): + return errors.Errorf("Lifecycle %s does not have an associated lifecycle image. Builder must be trusted.", lifecycleVersion.String()) } - if !opts.TrustBuilder(opts.Builder) { - if lifecycleImageSupported(imgOS, lifecycleVersion) { - lifecycleImageName := opts.LifecycleImage - if lifecycleImageName == "" { - lifecycleImageName = fmt.Sprintf("%s:%s", internalConfig.DefaultLifecycleImageRepo, lifecycleVersion.String()) + lifecycleOpts.FetchRunImageWithLifecycleLayer = func(runImageName string) (string, error) { + ephemeralRunImageName := fmt.Sprintf("pack.local/run-image/%x:latest", randString(10)) + runImage, err := c.imageFetcher.Fetch(ctx, runImageName, fetchOptions) + if err != nil { + return "", err + } + ephemeralRunImage, err := local.NewImage(ephemeralRunImageName, c.docker, local.FromBaseImage(runImage.Name())) + if err != nil { + return "", err + } + tmpDir, err := os.MkdirTemp("", "extend-run-image-scratch") // we need to write to disk because manifest.json is last in the tar + if err != nil { + return "", err + } + defer os.RemoveAll(tmpDir) + lifecycleImageTar, err := func() (string, error) { + lifecycleImageTar := filepath.Join(tmpDir, "lifecycle-image.tar") + lifecycleImageReader, err := c.docker.ImageSave(context.Background(), []string{lifecycleOpts.LifecycleImage}) // this is fast because the lifecycle image is based on distroless static + if err != nil { + return "", err } - - imgArch, err := rawBuilderImage.Architecture() + defer lifecycleImageReader.Close() + lifecycleImageWriter, err := os.Create(lifecycleImageTar) if err != nil { - return errors.Wrapf(err, "getting builder architecture") + return "", err } - - lifecycleImage, err := c.imageFetcher.Fetch( - ctx, - lifecycleImageName, - image.FetchOptions{Daemon: true, PullPolicy: opts.PullPolicy, Platform: fmt.Sprintf("%s/%s", imgOS, imgArch)}, + defer lifecycleImageWriter.Close() + if _, err = io.Copy(lifecycleImageWriter, lifecycleImageReader); err != nil { + return "", err + } + return lifecycleImageTar, nil + }() + if err != nil { + return "", err + } + advanceTarToEntryWithName := func(tarReader *tar.Reader, wantName string) (*tar.Header, error) { + var ( + header *tar.Header + err error ) + for { + header, err = tarReader.Next() + if err == io.EOF { + break + } + if err != nil { + return nil, err + } + if header.Name != wantName { + continue + } + return header, nil + } + return nil, fmt.Errorf("failed to find header with name: %s", wantName) + } + lifecycleLayerName, err := func() (string, error) { + lifecycleImageReader, err := os.Open(lifecycleImageTar) if err != nil { - return errors.Wrap(err, "fetching lifecycle image") + return "", err + } + defer lifecycleImageReader.Close() + tarReader := tar.NewReader(lifecycleImageReader) + if _, err = advanceTarToEntryWithName(tarReader, "manifest.json"); err != nil { + return "", err + } + type descriptor struct { + Layers []string + } + type manifestJSON []descriptor + var manifestContents manifestJSON + if err = json.NewDecoder(tarReader).Decode(&manifestContents); err != nil { + return "", err + } + if len(manifestContents) < 1 { + return "", errors.New("missing manifest entries") + } + // we can assume the lifecycle layer is the last in the tar, except if the lifecycle has been extended as an ephemeral lifecycle + layerOffset := 1 + if strings.Contains(lifecycleOpts.LifecycleImage, "pack.local/lifecycle") { + layerOffset = 2 } - lifecycleOpts.LifecycleImage = lifecycleImage.Name() - } else { - return errors.Errorf("Lifecycle %s does not have an associated lifecycle image. Builder must be trusted.", lifecycleVersion.String()) + if (len(manifestContents[0].Layers) - layerOffset) < 0 { + return "", errors.New("Lifecycle image did not contain expected layer count") + } + + return manifestContents[0].Layers[len(manifestContents[0].Layers)-layerOffset], nil + }() + if err != nil { + return "", err + } + if lifecycleLayerName == "" { + return "", errors.New("failed to find lifecycle layer") + } + lifecycleLayerTar, err := func() (string, error) { + lifecycleImageReader, err := os.Open(lifecycleImageTar) + if err != nil { + return "", err + } + defer lifecycleImageReader.Close() + tarReader := tar.NewReader(lifecycleImageReader) + var header *tar.Header + if header, err = advanceTarToEntryWithName(tarReader, lifecycleLayerName); err != nil { + return "", err + } + lifecycleLayerTar := filepath.Join(filepath.Dir(lifecycleImageTar), filepath.Dir(lifecycleLayerName)+".tar") // this will be either /layer.tar (docker < 25.x) OR blobs/sha256.tar (docker 25.x and later OR containerd storage enabled) + if err = os.MkdirAll(filepath.Dir(lifecycleLayerTar), 0755); err != nil { + return "", err + } + lifecycleLayerWriter, err := os.OpenFile(lifecycleLayerTar, os.O_CREATE|os.O_RDWR, os.FileMode(header.Mode)) + if err != nil { + return "", err + } + defer lifecycleLayerWriter.Close() + if _, err = io.Copy(lifecycleLayerWriter, tarReader); err != nil { + return "", err + } + return lifecycleLayerTar, nil + }() + if err != nil { + return "", err + } + diffID, err := func() (string, error) { + lifecycleLayerReader, err := os.Open(lifecycleLayerTar) + if err != nil { + return "", err + } + defer lifecycleLayerReader.Close() + hasher := sha256.New() + if _, err = io.Copy(hasher, lifecycleLayerReader); err != nil { + return "", err + } + // it's weird that this doesn't match lifecycleLayerTar + return hex.EncodeToString(hasher.Sum(nil)), nil + }() + if err != nil { + return "", err + } + if err = ephemeralRunImage.AddLayerWithDiffID(lifecycleLayerTar, "sha256:"+diffID); err != nil { + return "", err + } + if err = ephemeralRunImage.Save(); err != nil { + return "", err + } + return ephemeralRunImageName, nil + } + + if err = c.lifecycleExecutor.Execute(ctx, lifecycleOpts); err != nil { + return fmt.Errorf("executing lifecycle: %w", err) + } + return c.logImageNameAndSha(ctx, opts.Publish, imageRef, opts.InsecureRegistries) +} + +func usesContainerdStorage(docker DockerClient) bool { + result, err := docker.Info(context.Background(), client.InfoOptions{}) + if err != nil { + return false + } + + for _, driverStatus := range result.Info.DriverStatus { + if driverStatus[0] == "driver-type" && driverStatus[1] == "io.containerd.snapshotter.v1" { + return true } } - if err := c.lifecycleExecutor.Execute(ctx, lifecycleOpts); err != nil { - return errors.Wrap(err, "executing lifecycle. This may be the result of using an untrusted builder") + return false +} + +func getTargetFromBuilder(builderImage imgutil.Image) (*dist.Target, error) { + builderOS, err := builderImage.OS() + if err != nil { + return nil, fmt.Errorf("failed to get builder OS: %w", err) } + builderArch, err := builderImage.Architecture() + if err != nil { + return nil, fmt.Errorf("failed to get builder architecture: %w", err) + } + builderArchVariant, err := builderImage.Variant() + if err != nil { + return nil, fmt.Errorf("failed to get builder architecture variant: %w", err) + } + return &dist.Target{ + OS: builderOS, + Arch: builderArch, + ArchVariant: builderArchVariant, + }, nil +} + +func extractSupportedLifecycleApis(labels map[string]string) ([]string, error) { + // sample contents of labels: + // {io.buildpacks.builder.metadata:\"{\"lifecycle\":{\"version\":\"0.15.3\"},\"api\":{\"buildpack\":\"0.2\",\"platform\":\"0.3\"}}", + // io.buildpacks.lifecycle.apis":"{\"buildpack\":{\"deprecated\":[],\"supported\":[\"0.2\",\"0.3\",\"0.4\",\"0.5\",\"0.6\",\"0.7\",\"0.8\",\"0.9\"]},\"platform\":{\"deprecated\":[],\"supported\":[\"0.3\",\"0.4\",\"0.5\",\"0.6\",\"0.7\",\"0.8\",\"0.9\",\"0.10\"]}}\",\"io.buildpacks.lifecycle.version\":\"0.15.3\"}") - return c.logImageNameAndSha(ctx, opts.Publish, imageRef) + // This struct is defined in lifecycle-repository/tools/image/main.go#Descriptor -- we could consider moving it from the main package to an importable location. + var bpPlatformAPI struct { + Platform struct { + Deprecated []string + Supported []string + } + } + if len(labels["io.buildpacks.lifecycle.apis"]) > 0 { + err := json.Unmarshal([]byte(labels["io.buildpacks.lifecycle.apis"]), &bpPlatformAPI) + if err != nil { + return nil, err + } + return append(bpPlatformAPI.Platform.Deprecated, bpPlatformAPI.Platform.Supported...), nil + } + return []string{}, nil } func getFileFilter(descriptor projectTypes.Descriptor) (func(string) bool, error) { @@ -422,7 +909,17 @@ func getFileFilter(descriptor projectTypes.Descriptor) (func(string) bool, error return nil, nil } -func lifecycleImageSupported(builderOS string, lifecycleVersion *builder.Version) bool { +func supportsCreator(lifecycleVersion *builder.Version) bool { + // Technically the creator is supported as of platform API version 0.3 (lifecycle version 0.7.0+) but earlier versions + // have bugs that make using the creator problematic. + return !lifecycleVersion.LessThan(semver.MustParse(minLifecycleVersionSupportingCreator)) +} + +func supportsCreatorWithExtensions(lifecycleVersion *builder.Version) bool { + return !lifecycleVersion.LessThan(semver.MustParse(minLifecycleVersionSupportingCreatorWithExtensions)) +} + +func supportsLifecycleImage(lifecycleVersion *builder.Version) bool { return lifecycleVersion.Equal(builder.VersionMustParse(prevLifecycleVersionSupportingImage)) || !lifecycleVersion.LessThan(semver.MustParse(minLifecycleVersionSupportingImage)) } @@ -453,7 +950,7 @@ func (c *Client) getBuilder(img imgutil.Image) (*builder.Builder, error) { if err != nil { return nil, err } - if bldr.Stack().RunImage.Image == "" { + if bldr.Stack().RunImage.Image == "" && len(bldr.RunImages()) == 0 { return nil, errors.New("builder metadata is missing run-image") } @@ -471,25 +968,27 @@ func (c *Client) getBuilder(img imgutil.Image) (*builder.Builder, error) { return bldr, nil } -func (c *Client) validateRunImage(context context.Context, name string, pullPolicy image.PullPolicy, publish bool, expectedStack string) (imgutil.Image, error) { +func (c *Client) validateRunImage(context context.Context, name string, opts image.FetchOptions, expectedStack string) (runImage imgutil.Image, warnings []string, err error) { if name == "" { - return nil, errors.New("run image must be specified") + return nil, nil, errors.New("run image must be specified") } - img, err := c.imageFetcher.Fetch(context, name, image.FetchOptions{Daemon: !publish, PullPolicy: pullPolicy}) + img, err := c.imageFetcher.Fetch(context, name, opts) if err != nil { - return nil, err + return nil, nil, err } stackID, err := img.Label("io.buildpacks.stack.id") if err != nil { - return nil, err + return nil, nil, err } + if stackID != expectedStack { - return nil, fmt.Errorf("run-image stack id '%s' does not match builder stack '%s'", stackID, expectedStack) + warnings = append(warnings, "deprecated usage of stack") } - return img, nil + + return img, warnings, err } -func (c *Client) validateMixins(additionalBuildpacks []buildpack.Buildpack, bldr *builder.Builder, runImageName string, runMixins []string) error { +func (c *Client) validateMixins(additionalBuildpacks []buildpack.BuildModule, bldr *builder.Builder, runImageName string, runMixins []string) error { if err := stack.ValidateMixins(bldr.Image().Name(), bldr.Mixins(), runImageName, runMixins); err != nil { return err } @@ -539,23 +1038,24 @@ func assembleAvailableMixins(buildMixins, runMixins []string) []string { // allBuildpacks aggregates all buildpacks declared on the image with additional buildpacks passed in. They are sorted // by ID then Version. -func allBuildpacks(builderImage imgutil.Image, additionalBuildpacks []buildpack.Buildpack) ([]dist.BuildpackDescriptor, error) { - var all []dist.BuildpackDescriptor - var bpLayers dist.BuildpackLayers +func allBuildpacks(builderImage imgutil.Image, additionalBuildpacks []buildpack.BuildModule) ([]buildpack.Descriptor, error) { + var all []buildpack.Descriptor + var bpLayers dist.ModuleLayers if _, err := dist.GetLabel(builderImage, dist.BuildpackLayersLabel, &bpLayers); err != nil { return nil, err } for id, bps := range bpLayers { for ver, bp := range bps { desc := dist.BuildpackDescriptor{ - Info: dist.BuildpackInfo{ + WithInfo: dist.ModuleInfo{ ID: id, Version: ver, }, - Stacks: bp.Stacks, - Order: bp.Order, + WithStacks: bp.Stacks, + WithTargets: bp.Targets, + WithOrder: bp.Order, } - all = append(all, desc) + all = append(all, &desc) } } for _, bp := range additionalBuildpacks { @@ -563,10 +1063,10 @@ func allBuildpacks(builderImage imgutil.Image, additionalBuildpacks []buildpack. } sort.Slice(all, func(i, j int) bool { - if all[i].Info.ID != all[j].Info.ID { - return all[i].Info.ID < all[j].Info.ID + if all[i].Info().ID != all[j].Info().ID { + return all[i].Info().ID < all[j].Info().ID } - return all[i].Info.Version < all[j].Info.Version + return all[i].Info().Version < all[j].Info().Version }) return all, nil @@ -611,6 +1111,53 @@ func (c *Client) processAppPath(appPath string) (string, error) { return resolvedAppPath, nil } +// processLayoutPath given an image reference and a previous image reference this method calculates the +// local full path and the expected path in the lifecycle container for both images provides. Those values +// can be used to mount the correct volumes +func (c *Client) processLayoutPath(inputImageRef, previousImageRef InputImageReference) (layoutPathConfig, error) { + var ( + hostImagePath, hostPreviousImagePath, targetImagePath, targetPreviousImagePath string + err error + ) + hostImagePath, err = fullImagePath(inputImageRef, true) + if err != nil { + return layoutPathConfig{}, err + } + targetImagePath, err = layout.ParseRefToPath(inputImageRef.Name()) + if err != nil { + return layoutPathConfig{}, err + } + targetImagePath = filepath.Join(paths.RootDir, "layout-repo", targetImagePath) + c.logger.Debugf("local image path %s will be mounted into the container at path %s", hostImagePath, targetImagePath) + + if previousImageRef != nil && previousImageRef.Name() != "" { + hostPreviousImagePath, err = fullImagePath(previousImageRef, false) + if err != nil { + return layoutPathConfig{}, err + } + targetPreviousImagePath, err = layout.ParseRefToPath(previousImageRef.Name()) + if err != nil { + return layoutPathConfig{}, err + } + targetPreviousImagePath = filepath.Join(paths.RootDir, "layout-repo", targetPreviousImagePath) + c.logger.Debugf("local previous image path %s will be mounted into the container at path %s", hostPreviousImagePath, targetPreviousImagePath) + } + return layoutPathConfig{ + hostImagePath: hostImagePath, + targetImagePath: targetImagePath, + hostPreviousImagePath: hostPreviousImagePath, + targetPreviousImagePath: targetPreviousImagePath, + }, nil +} + +func (c *Client) parseReference(opts BuildOptions) (name.Reference, error) { + if !opts.Layout() { + return c.parseTagReference(opts.Image) + } + base := filepath.Base(opts.Image) + return c.parseTagReference(base) +} + func (c *Client) processProxyConfig(config *ProxyConfig) ProxyConfig { var ( httpProxy, httpsProxy, noProxy string @@ -640,83 +1187,70 @@ func (c *Client) processProxyConfig(config *ProxyConfig) ProxyConfig { // // Visual examples: // -// BUILDER ORDER -// ---------- -// - group: -// - A -// - B -// - group: -// - A +// BUILDER ORDER +// ---------- +// - group: +// - A +// - B +// - group: +// - A // -// WITH DECLARED: "from=builder", X -// ---------- -// - group: -// - A -// - B -// - X -// - group: -// - A -// - X +// WITH DECLARED: "from=builder", X +// ---------- +// - group: +// - A +// - B +// - X +// - group: +// - A +// - X // -// WITH DECLARED: X, "from=builder", Y -// ---------- -// - group: -// - X -// - A -// - B -// - Y -// - group: -// - X -// - A -// - Y +// WITH DECLARED: X, "from=builder", Y +// ---------- +// - group: +// - X +// - A +// - B +// - Y +// - group: +// - X +// - A +// - Y // -// WITH DECLARED: X -// ---------- -// - group: -// - X +// WITH DECLARED: X +// ---------- +// - group: +// - X // -// WITH DECLARED: A -// ---------- -// - group: -// - A -func (c *Client) processBuildpacks(ctx context.Context, builderImage imgutil.Image, builderBPs []dist.BuildpackInfo, builderOrder dist.Order, stackID string, opts BuildOptions) (fetchedBPs []buildpack.Buildpack, order dist.Order, err error) { - pullPolicy := opts.PullPolicy - publish := opts.Publish - registry := opts.Registry +// WITH DECLARED: A +// ---------- +// - group: +// - A +func (c *Client) processBuildpacks(ctx context.Context, builderBPs []dist.ModuleInfo, builderOrder dist.Order, stackID string, opts BuildOptions, targetToUse *dist.Target) (fetchedBPs []buildpack.BuildModule, nInlineBPs int, order dist.Order, err error) { relativeBaseDir := opts.RelativeBaseDir declaredBPs := opts.Buildpacks - // declare buildpacks provided by project descriptor when no buildpacks are declared + // Buildpacks from --buildpack override buildpacks from project descriptor if len(declaredBPs) == 0 && len(opts.ProjectDescriptor.Build.Buildpacks) != 0 { relativeBaseDir = opts.ProjectDescriptorBaseDir for _, bp := range opts.ProjectDescriptor.Build.Buildpacks { - switch { - case bp.ID != "" && bp.Script.Inline != "" && bp.URI == "": - if bp.Script.API == "" { - return nil, nil, errors.New("Missing API version for inline buildpack") - } - - pathToInlineBuildpack, err := createInlineBuildpack(bp, stackID) - if err != nil { - return nil, nil, errors.Wrap(err, "Could not create temporary inline buildpack") - } - declaredBPs = append(declaredBPs, pathToInlineBuildpack) - case bp.URI != "": - declaredBPs = append(declaredBPs, bp.URI) - case bp.ID != "" && bp.Version != "": - declaredBPs = append(declaredBPs, fmt.Sprintf("%s@%s", bp.ID, bp.Version)) - default: - return nil, nil, errors.New("Invalid buildpack defined in project descriptor") + buildpackLocator, isInline, err := getBuildpackLocator(bp, stackID) + if err != nil { + return nil, 0, nil, err + } + if isInline { + nInlineBPs++ } + declaredBPs = append(declaredBPs, buildpackLocator) } } - order = dist.Order{{Group: []dist.BuildpackRef{}}} + order = dist.Order{{Group: []dist.ModuleRef{}}} for _, bp := range declaredBPs { locatorType, err := buildpack.GetLocatorType(bp, relativeBaseDir, builderBPs) if err != nil { - return nil, nil, err + return nil, 0, nil, err } switch locatorType { @@ -726,7 +1260,7 @@ func (c *Client) processBuildpacks(ctx context.Context, builderImage imgutil.Ima order = builderOrder case len(order) > 1: // This should only ever be possible if they are using from=builder twice which we don't allow - return nil, nil, errors.New("buildpacks from builder can only be defined once") + return nil, 0, nil, errors.New("buildpacks from builder can only be defined once") default: newOrder := dist.Order{} groupToAdd := order[0].Group @@ -737,41 +1271,176 @@ func (c *Client) processBuildpacks(ctx context.Context, builderImage imgutil.Ima order = newOrder } - case buildpack.IDLocator: - id, version := buildpack.ParseIDLocator(bp) - order = appendBuildpackToOrder(order, dist.BuildpackInfo{ - ID: id, - Version: version, - }) default: - imageOS, err := builderImage.OS() + newFetchedBPs, moduleInfo, err := c.fetchBuildpack(ctx, bp, relativeBaseDir, builderBPs, opts, buildpack.KindBuildpack, targetToUse) + if err != nil { + return fetchedBPs, 0, order, err + } + fetchedBPs = append(fetchedBPs, newFetchedBPs...) + order = appendBuildpackToOrder(order, *moduleInfo) + } + } + + if (len(order) == 0 || len(order[0].Group) == 0) && len(builderOrder) > 0 { + preBuildpacks := opts.PreBuildpacks + postBuildpacks := opts.PostBuildpacks + // Pre-buildpacks from --pre-buildpack override pre-buildpacks from project descriptor + if len(preBuildpacks) == 0 && len(opts.ProjectDescriptor.Build.Pre.Buildpacks) > 0 { + for _, bp := range opts.ProjectDescriptor.Build.Pre.Buildpacks { + buildpackLocator, isInline, err := getBuildpackLocator(bp, stackID) + if err != nil { + return nil, 0, nil, errors.Wrap(err, "get pre-buildpack locator") + } + if isInline { + nInlineBPs++ + } + preBuildpacks = append(preBuildpacks, buildpackLocator) + } + } + // Post-buildpacks from --post-buildpack override post-buildpacks from project descriptor + if len(postBuildpacks) == 0 && len(opts.ProjectDescriptor.Build.Post.Buildpacks) > 0 { + for _, bp := range opts.ProjectDescriptor.Build.Post.Buildpacks { + buildpackLocator, isInline, err := getBuildpackLocator(bp, stackID) + if err != nil { + return nil, 0, nil, errors.Wrap(err, "get post-buildpack locator") + } + if isInline { + nInlineBPs++ + } + postBuildpacks = append(postBuildpacks, buildpackLocator) + } + } + + if len(preBuildpacks) > 0 || len(postBuildpacks) > 0 { + order = builderOrder + for _, bp := range preBuildpacks { + newFetchedBPs, moduleInfo, err := c.fetchBuildpack(ctx, bp, relativeBaseDir, builderBPs, opts, buildpack.KindBuildpack, targetToUse) + if err != nil { + return fetchedBPs, 0, order, err + } + fetchedBPs = append(fetchedBPs, newFetchedBPs...) + order = prependBuildpackToOrder(order, *moduleInfo) + } + + for _, bp := range postBuildpacks { + newFetchedBPs, moduleInfo, err := c.fetchBuildpack(ctx, bp, relativeBaseDir, builderBPs, opts, buildpack.KindBuildpack, targetToUse) + if err != nil { + return fetchedBPs, 0, order, err + } + fetchedBPs = append(fetchedBPs, newFetchedBPs...) + order = appendBuildpackToOrder(order, *moduleInfo) + } + } + } + + return fetchedBPs, nInlineBPs, order, nil +} + +func (c *Client) fetchBuildpack(ctx context.Context, bp string, relativeBaseDir string, builderBPs []dist.ModuleInfo, opts BuildOptions, kind string, targetToUse *dist.Target) ([]buildpack.BuildModule, *dist.ModuleInfo, error) { + pullPolicy := opts.PullPolicy + publish := opts.Publish + registry := opts.Registry + + locatorType, err := buildpack.GetLocatorType(bp, relativeBaseDir, builderBPs) + if err != nil { + return nil, nil, err + } + + fetchedBPs := []buildpack.BuildModule{} + var moduleInfo *dist.ModuleInfo + switch locatorType { + case buildpack.IDLocator: + id, version := buildpack.ParseIDLocator(bp) + moduleInfo = &dist.ModuleInfo{ + ID: id, + Version: version, + } + default: + downloadOptions := buildpack.DownloadOptions{ + RegistryName: registry, + Target: targetToUse, + RelativeBaseDir: relativeBaseDir, + Daemon: !publish, + PullPolicy: pullPolicy, + } + if kind == buildpack.KindExtension { + downloadOptions.ModuleKind = kind + } + mainBP, depBPs, err := c.buildpackDownloader.Download(ctx, bp, downloadOptions) + if err != nil { + return nil, nil, errors.Wrap(err, "downloading buildpack") + } + fetchedBPs = append(append(fetchedBPs, mainBP), depBPs...) + mainBPInfo := mainBP.Descriptor().Info() + moduleInfo = &mainBPInfo + + packageCfgPath := filepath.Join(bp, "package.toml") + _, err = os.Stat(packageCfgPath) + if err == nil { + fetchedDeps, err := c.fetchBuildpackDependencies(ctx, bp, packageCfgPath, downloadOptions) if err != nil { - return fetchedBPs, order, errors.Wrapf(err, "getting OS from %s", style.Symbol(builderImage.Name())) - } - mainBP, depBPs, err := c.buildpackDownloader.Download(ctx, bp, buildpack.DownloadOptions{ - RegistryName: registry, - ImageOS: imageOS, - RelativeBaseDir: relativeBaseDir, - Daemon: !publish, - PullPolicy: pullPolicy, + return nil, nil, errors.Wrapf(err, "fetching package.toml dependencies (path=%s)", style.Symbol(packageCfgPath)) + } + fetchedBPs = append(fetchedBPs, fetchedDeps...) + } + } + return fetchedBPs, moduleInfo, nil +} + +func (c *Client) fetchBuildpackDependencies(ctx context.Context, bp string, packageCfgPath string, downloadOptions buildpack.DownloadOptions) ([]buildpack.BuildModule, error) { + packageReader := buildpackage.NewConfigReader() + packageCfg, err := packageReader.Read(packageCfgPath) + if err == nil { + fetchedBPs := []buildpack.BuildModule{} + for _, dep := range packageCfg.Dependencies { + mainBP, deps, err := c.buildpackDownloader.Download(ctx, dep.URI, buildpack.DownloadOptions{ + RegistryName: downloadOptions.RegistryName, + Target: downloadOptions.Target, + Daemon: downloadOptions.Daemon, + PullPolicy: downloadOptions.PullPolicy, + RelativeBaseDir: filepath.Join(bp, packageCfg.Buildpack.URI), }) + if err != nil { - return fetchedBPs, order, errors.Wrap(err, "downloading buildpack") + return nil, errors.Wrapf(err, "fetching dependencies (uri=%s,image=%s)", style.Symbol(dep.URI), style.Symbol(dep.ImageName)) } - fetchedBPs = append(append(fetchedBPs, mainBP), depBPs...) - order = appendBuildpackToOrder(order, mainBP.Descriptor().Info) + + fetchedBPs = append(append(fetchedBPs, mainBP), deps...) } + return fetchedBPs, nil } + return nil, err +} + +func getBuildpackLocator(bp projectTypes.Buildpack, stackID string) (locator string, isInline bool, err error) { + switch { + case bp.ID != "" && bp.Script.Inline != "" && bp.URI == "": + if bp.Script.API == "" { + return "", false, errors.New("Missing API version for inline buildpack") + } - return fetchedBPs, order, nil + pathToInlineBuildpack, err := createInlineBuildpack(bp, stackID) + if err != nil { + return "", false, errors.Wrap(err, "Could not create temporary inline buildpack") + } + return pathToInlineBuildpack, true, nil + case bp.URI != "": + return bp.URI, false, nil + case bp.ID != "" && bp.Version != "": + return fmt.Sprintf("%s@%s", bp.ID, bp.Version), false, nil + case bp.ID != "" && bp.Version == "": + return bp.ID, false, nil + default: + return "", false, errors.New("Invalid buildpack definition") + } } -func appendBuildpackToOrder(order dist.Order, bpInfo dist.BuildpackInfo) (newOrder dist.Order) { +func appendBuildpackToOrder(order dist.Order, bpInfo dist.ModuleInfo) (newOrder dist.Order) { for _, orderEntry := range order { newEntry := orderEntry - newEntry.Group = append(newEntry.Group, dist.BuildpackRef{ - BuildpackInfo: bpInfo, - Optional: false, + newEntry.Group = append(newEntry.Group, dist.ModuleRef{ + ModuleInfo: bpInfo, + Optional: false, }) newOrder = append(newOrder, newEntry) } @@ -779,16 +1448,177 @@ func appendBuildpackToOrder(order dist.Order, bpInfo dist.BuildpackInfo) (newOrd return newOrder } -func (c *Client) createEphemeralBuilder(rawBuilderImage imgutil.Image, env map[string]string, order dist.Order, buildpacks []buildpack.Buildpack) (*builder.Builder, error) { +func prependBuildpackToOrder(order dist.Order, bpInfo dist.ModuleInfo) (newOrder dist.Order) { + for _, orderEntry := range order { + newEntry := orderEntry + newGroup := []dist.ModuleRef{{ + ModuleInfo: bpInfo, + Optional: false, + }} + newEntry.Group = append(newGroup, newEntry.Group...) + newOrder = append(newOrder, newEntry) + } + + return newOrder +} + +func (c *Client) processExtensions(ctx context.Context, builderExs []dist.ModuleInfo, opts BuildOptions, targetToUse *dist.Target) (fetchedExs []buildpack.BuildModule, orderExtensions dist.Order, err error) { + relativeBaseDir := opts.RelativeBaseDir + declaredExs := opts.Extensions + + orderExtensions = dist.Order{{Group: []dist.ModuleRef{}}} + for _, ex := range declaredExs { + locatorType, err := buildpack.GetLocatorType(ex, relativeBaseDir, builderExs) + if err != nil { + return nil, nil, err + } + + switch locatorType { + case buildpack.RegistryLocator: + return nil, nil, errors.New("RegistryLocator type is not valid for extensions") + case buildpack.FromBuilderLocator: + return nil, nil, errors.New("from builder is not supported for extensions") + default: + newFetchedExs, moduleInfo, err := c.fetchBuildpack(ctx, ex, relativeBaseDir, builderExs, opts, buildpack.KindExtension, targetToUse) + if err != nil { + return fetchedExs, orderExtensions, err + } + fetchedExs = append(fetchedExs, newFetchedExs...) + orderExtensions = prependBuildpackToOrder(orderExtensions, *moduleInfo) + } + } + + return fetchedExs, orderExtensions, nil +} + +func userAndGroupIDs(img imgutil.Image) (int, int, error) { + sUID, err := img.Env(builder.EnvUID) + if err != nil { + return 0, 0, errors.Wrap(err, "reading builder env variables") + } else if sUID == "" { + return 0, 0, fmt.Errorf("image %s missing required env var %s", style.Symbol(img.Name()), style.Symbol(builder.EnvUID)) + } + + sGID, err := img.Env(builder.EnvGID) + if err != nil { + return 0, 0, errors.Wrap(err, "reading builder env variables") + } else if sGID == "" { + return 0, 0, fmt.Errorf("image %s missing required env var %s", style.Symbol(img.Name()), style.Symbol(builder.EnvGID)) + } + + var uid, gid int + uid, err = strconv.Atoi(sUID) + if err != nil { + return 0, 0, fmt.Errorf("failed to parse %s, value %s should be an integer", style.Symbol(builder.EnvUID), style.Symbol(sUID)) + } + + gid, err = strconv.Atoi(sGID) + if err != nil { + return 0, 0, fmt.Errorf("failed to parse %s, value %s should be an integer", style.Symbol(builder.EnvGID), style.Symbol(sGID)) + } + + return uid, gid, nil +} + +func workspacePathForOS(os, workspace string) string { + if workspace == "" { + workspace = "workspace" + } + if os == "windows" { + // note we don't use ephemeral lifecycle when os is windows.. + return "c:\\" + workspace + } + return "/" + workspace +} + +func (c *Client) addUserMountpoints(lifecycleImage imgutil.Image, dest string, workspace string, uid int, gid int) (string, error) { + // today only workspace needs to be added, easy to add future dirs if required. + + imageOS, err := lifecycleImage.OS() + if err != nil { + return "", errors.Wrap(err, "getting image OS") + } + layerWriterFactory, err := layer.NewWriterFactory(imageOS) + if err != nil { + return "", err + } + + workspace = workspacePathForOS(imageOS, workspace) + + fh, err := os.Create(filepath.Join(dest, "dirs.tar")) + if err != nil { + return "", err + } + defer fh.Close() + + lw := layerWriterFactory.NewWriter(fh) + defer lw.Close() + + for _, path := range []string{workspace} { + if err := lw.WriteHeader(&tar.Header{ + Typeflag: tar.TypeDir, + Name: path, + Mode: 0755, + ModTime: archive.NormalizedDateTime, + Uid: uid, + Gid: gid, + }); err != nil { + return "", errors.Wrapf(err, "creating %s mountpoint dir in layer", style.Symbol(path)) + } + } + + return fh.Name(), nil +} + +func (c *Client) createEphemeralLifecycle(lifecycleImage imgutil.Image, workspace string, uid int, gid int) (imgutil.Image, error) { + lifecycleImage.Rename(fmt.Sprintf("pack.local/lifecycle/%x:latest", randString(10))) + + tmpDir, err := os.MkdirTemp("", "create-lifecycle-scratch") + if err != nil { + return nil, err + } + defer os.RemoveAll(tmpDir) + dirsTar, err := c.addUserMountpoints(lifecycleImage, tmpDir, workspace, uid, gid) + if err != nil { + return nil, err + } + if err := lifecycleImage.AddLayer(dirsTar); err != nil { + return nil, errors.Wrap(err, "adding mountpoint dirs layer") + } + + err = lifecycleImage.Save() + if err != nil { + return nil, err + } + + return lifecycleImage, nil +} + +func (c *Client) createEphemeralBuilder( + rawBuilderImage imgutil.Image, + env map[string]string, + order dist.Order, + buildpacks []buildpack.BuildModule, + orderExtensions dist.Order, + extensions []buildpack.BuildModule, + validateMixins bool, + runImage string, + system dist.System, + disableSystem bool, +) (*builder.Builder, error) { + if !ephemeralBuilderNeeded(env, order, buildpacks, orderExtensions, extensions, runImage) && !disableSystem { + return builder.New(rawBuilderImage, rawBuilderImage.Name(), builder.WithoutSave()) + } + origBuilderName := rawBuilderImage.Name() - bldr, err := builder.New(rawBuilderImage, fmt.Sprintf("pack.local/builder/%x:latest", randString(10))) + bldr, err := builder.New(rawBuilderImage, fmt.Sprintf("pack.local/builder/%x:latest", randString(10)), builder.WithRunImage(runImage)) if err != nil { return nil, errors.Wrapf(err, "invalid builder %s", style.Symbol(origBuilderName)) } bldr.SetEnv(env) for _, bp := range buildpacks { - bpInfo := bp.Descriptor().Info + bpInfo := bp.Descriptor().Info() c.logger.Debugf("Adding buildpack %s version %s to builder", style.Symbol(bpInfo.ID), style.Symbol(bpInfo.Version)) bldr.AddBuildpack(bp) } @@ -797,12 +1627,54 @@ func (c *Client) createEphemeralBuilder(rawBuilderImage imgutil.Image, env map[s bldr.SetOrder(order) } + for _, ex := range extensions { + exInfo := ex.Descriptor().Info() + c.logger.Debugf("Adding extension %s version %s to builder", style.Symbol(exInfo.ID), style.Symbol(exInfo.Version)) + bldr.AddExtension(ex) + } + if len(orderExtensions) > 0 && len(orderExtensions[0].Group) > 0 { + c.logger.Debug("Setting custom order for extensions") + bldr.SetOrderExtensions(orderExtensions) + } + + bldr.SetValidateMixins(validateMixins) + bldr.SetSystem(system) + if err := bldr.Save(c.logger, builder.CreatorMetadata{Version: c.version}); err != nil { return nil, err } return bldr, nil } +func ephemeralBuilderNeeded( + env map[string]string, + order dist.Order, + buildpacks []buildpack.BuildModule, + orderExtensions dist.Order, + extensions []buildpack.BuildModule, + runImage string, +) bool { + if len(env) > 0 { + return true + } + if len(order) > 0 && len(order[0].Group) > 0 { + return true + } + if len(buildpacks) > 0 { + return true + } + if len(orderExtensions) > 0 && len(orderExtensions[0].Group) > 0 { + return true + } + if len(extensions) > 0 { + return true + } + if runImage != "" { + return true + } + return false +} + // Returns a string iwith lowercase a-z, of length n func randString(n int) string { b := make([]byte, n) @@ -816,55 +1688,20 @@ func randString(n int) string { return string(b) } -func processVolumes(imgOS string, volumes []string) (processed []string, warnings []string, err error) { - parserOS := mounts.OSLinux - if imgOS == "windows" { - parserOS = mounts.OSWindows - } - parser := mounts.NewParser(parserOS) - for _, v := range volumes { - volume, err := parser.ParseMountRaw(v, "") - if err != nil { - return nil, nil, errors.Wrapf(err, "platform volume %q has invalid format", v) - } - - sensitiveDirs := []string{"/cnb", "/layers"} - if imgOS == "windows" { - sensitiveDirs = []string{`c:/cnb`, `c:\cnb`, `c:/layers`, `c:\layers`} - } - for _, p := range sensitiveDirs { - if strings.HasPrefix(strings.ToLower(volume.Spec.Target), p) { - warnings = append(warnings, fmt.Sprintf("Mounting to a sensitive directory %s", style.Symbol(volume.Spec.Target))) - } - } - - processed = append(processed, fmt.Sprintf("%s:%s:%s", volume.Spec.Source, volume.Spec.Target, processMode(volume.Mode))) - } - return processed, warnings, nil -} - -func processMode(mode string) string { - if mode == "" { - return "ro" - } - - return mode -} - -func (c *Client) logImageNameAndSha(ctx context.Context, publish bool, imageRef name.Reference) error { +func (c *Client) logImageNameAndSha(ctx context.Context, publish bool, imageRef name.Reference, insecureRegistries []string) error { // The image name and sha are printed in the lifecycle logs, and there is no need to print it again, unless output is suppressed. if !logging.IsQuiet(c.logger) { return nil } - img, err := c.imageFetcher.Fetch(ctx, imageRef.Name(), image.FetchOptions{Daemon: !publish, PullPolicy: image.PullNever}) + img, err := c.imageFetcher.Fetch(ctx, imageRef.Name(), image.FetchOptions{Daemon: !publish, PullPolicy: image.PullNever, InsecureRegistries: insecureRegistries}) if err != nil { - return errors.Wrap(err, "fetching built image") + return fmt.Errorf("fetching built image: %w", err) } id, err := img.Identifier() if err != nil { - return errors.Wrap(err, "reading image sha") + return fmt.Errorf("reading image sha: %w", err) } // Remove tag, if it exists, from the image name @@ -890,7 +1727,7 @@ func parseDigestFromImageID(id imgutil.Identifier) string { } func createInlineBuildpack(bp projectTypes.Buildpack, stackID string) (string, error) { - pathToInlineBuilpack, err := ioutil.TempDir("", "inline-cnb") + pathToInlineBuilpack, err := os.MkdirTemp("", "inline-cnb") if err != nil { return pathToInlineBuilpack, err } @@ -899,7 +1736,7 @@ func createInlineBuildpack(bp projectTypes.Buildpack, stackID string) (string, e bp.Version = "0.0.0" } - if err = createBuildpackTOML(pathToInlineBuilpack, bp.ID, bp.Version, bp.Script.API, []dist.Stack{{ID: stackID}}, nil); err != nil { + if err = createBuildpackTOML(pathToInlineBuilpack, bp.ID, bp.Version, bp.Script.API, []dist.Stack{{ID: stackID}}, []dist.Target{}, nil); err != nil { return pathToInlineBuilpack, err } @@ -936,3 +1773,53 @@ exit 0 return pathToInlineBuilpack, nil } + +// fullImagePath parses the inputImageReference provided by the user and creates the directory +// structure if create value is true +func fullImagePath(inputImageRef InputImageReference, create bool) (string, error) { + imagePath, err := inputImageRef.FullName() + if err != nil { + return "", errors.Wrapf(err, "evaluating image %s destination path", inputImageRef.Name()) + } + + if create { + if err := os.MkdirAll(imagePath, os.ModePerm); err != nil { + return "", errors.Wrapf(err, "creating %s layout application destination", imagePath) + } + } + + return imagePath, nil +} + +// appendLayoutVolumes mount host volume into the build container, in the form ':[:]' +// the volumes mounted are: +// - The path where the user wants the image to be exported in OCI layout format +// - The previous image path if it exits +// - The run-image path +func appendLayoutVolumes(volumes []string, config layoutPathConfig) []string { + if config.hostPreviousImagePath != "" { + volumes = append(volumes, readOnlyVolume(config.hostPreviousImagePath, config.targetPreviousImagePath), + readOnlyVolume(config.hostRunImagePath, config.targetRunImagePath), + writableVolume(config.hostImagePath, config.targetImagePath)) + } else { + volumes = append(volumes, readOnlyVolume(config.hostRunImagePath, config.targetRunImagePath), + writableVolume(config.hostImagePath, config.targetImagePath)) + } + return volumes +} + +func writableVolume(hostPath, targetPath string) string { + tp := targetPath + if !filepath.IsAbs(targetPath) { + tp = filepath.Join(string(filepath.Separator), targetPath) + } + return fmt.Sprintf("%s:%s:rw", hostPath, tp) +} + +func readOnlyVolume(hostPath, targetPath string) string { + tp := targetPath + if !filepath.IsAbs(targetPath) { + tp = filepath.Join(string(filepath.Separator), targetPath) + } + return fmt.Sprintf("%s:%s", hostPath, tp) +} diff --git a/pkg/client/build_test.go b/pkg/client/build_test.go index d0dd8e1dc2..002fb80588 100644 --- a/pkg/client/build_test.go +++ b/pkg/client/build_test.go @@ -5,17 +5,15 @@ import ( "context" "crypto/sha256" "encoding/hex" + "encoding/json" "fmt" "io" - "io/ioutil" - "math/rand" "net/http" "os" "path/filepath" "runtime" "strings" "testing" - "time" "github.com/Masterminds/semver" "github.com/buildpacks/imgutil" @@ -23,16 +21,14 @@ import ( "github.com/buildpacks/imgutil/local" "github.com/buildpacks/imgutil/remote" "github.com/buildpacks/lifecycle/api" - "github.com/buildpacks/lifecycle/platform" - dockerclient "github.com/docker/docker/client" + "github.com/buildpacks/lifecycle/platform/files" "github.com/google/go-containerregistry/pkg/name" "github.com/heroku/color" + dockerclient "github.com/moby/moby/client" "github.com/onsi/gomega/ghttp" - "github.com/pkg/errors" "github.com/sclevine/spec" "github.com/sclevine/spec/report" - "github.com/buildpacks/pack/internal/build" "github.com/buildpacks/pack/internal/builder" cfg "github.com/buildpacks/pack/internal/config" ifakes "github.com/buildpacks/pack/internal/fakes" @@ -50,7 +46,6 @@ import ( func TestBuild(t *testing.T) { color.Disable(true) defer color.Disable(false) - rand.Seed(time.Now().UTC().UnixNano()) spec.Run(t, "build", testBuild, spec.Report(report.Terminal{})) } @@ -61,8 +56,10 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { fakeLifecycle *ifakes.FakeLifecycle defaultBuilderStackID = "some.stack.id" defaultWindowsBuilderStackID = "some.windows.stack.id" + builderImageWithSystem *fakes.Image defaultBuilderImage *fakes.Image defaultWindowsBuilderImage *fakes.Image + builderImageWithSystemName = "example.com/default/builder-with-system:tag" defaultBuilderName = "example.com/default/builder:tag" defaultWindowsBuilderName = "example.com/windows-default/builder:tag" defaultRunImageName = "default/run" @@ -75,23 +72,36 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { outBuf bytes.Buffer logger *logging.LogWithWriters fakeLifecycleImage *fakes.Image + + withExtensionsLabel bool ) + it.Before(func() { var err error fakeImageFetcher = ifakes.NewFakeImageFetcher() fakeLifecycle = &ifakes.FakeLifecycle{} - tmpDir, err = ioutil.TempDir("", "build-test") + tmpDir, err = os.MkdirTemp("", "build-test") h.AssertNil(t, err) - defaultBuilderImage = newFakeBuilderImage(t, tmpDir, defaultBuilderName, defaultBuilderStackID, defaultRunImageName, builder.DefaultLifecycleVersion, newLinuxImage) + defaultBuilderImage = newFakeBuilderImage(t, tmpDir, defaultBuilderName, defaultBuilderStackID, defaultRunImageName, builder.DefaultLifecycleVersion, newLinuxImage, false) h.AssertNil(t, defaultBuilderImage.SetLabel("io.buildpacks.stack.mixins", `["mixinA", "build:mixinB", "mixinX", "build:mixinY"]`)) fakeImageFetcher.LocalImages[defaultBuilderImage.Name()] = defaultBuilderImage + if withExtensionsLabel { + h.AssertNil(t, defaultBuilderImage.SetLabel("io.buildpacks.buildpack.order-extensions", `[{"group":[{"id":"some-extension-id","version":"some-extension-version"}]}]`)) + } + + builderImageWithSystem = newFakeBuilderImage(t, tmpDir, builderImageWithSystemName, defaultBuilderStackID, defaultRunImageName, builder.DefaultLifecycleVersion, newLinuxImage, true) + h.AssertNil(t, builderImageWithSystem.SetLabel("io.buildpacks.stack.mixins", `["mixinA", "build:mixinB", "mixinX", "build:mixinY"]`)) + fakeImageFetcher.LocalImages[builderImageWithSystem.Name()] = builderImageWithSystem - defaultWindowsBuilderImage = newFakeBuilderImage(t, tmpDir, defaultWindowsBuilderName, defaultWindowsBuilderStackID, defaultWindowsRunImageName, builder.DefaultLifecycleVersion, newWindowsImage) + defaultWindowsBuilderImage = newFakeBuilderImage(t, tmpDir, defaultWindowsBuilderName, defaultWindowsBuilderStackID, defaultWindowsRunImageName, builder.DefaultLifecycleVersion, newWindowsImage, false) h.AssertNil(t, defaultWindowsBuilderImage.SetLabel("io.buildpacks.stack.mixins", `["mixinA", "build:mixinB", "mixinX", "build:mixinY"]`)) fakeImageFetcher.LocalImages[defaultWindowsBuilderImage.Name()] = defaultWindowsBuilderImage + if withExtensionsLabel { + h.AssertNil(t, defaultWindowsBuilderImage.SetLabel("io.buildpacks.buildpack.order-extensions", `[{"group":[{"id":"some-extension-id","version":"some-extension-version"}]}]`)) + } fakeDefaultWindowsRunImage = newWindowsImage("default/win-run", "", nil) h.AssertNil(t, fakeDefaultWindowsRunImage.SetLabel("io.buildpacks.stack.id", defaultWindowsBuilderStackID)) @@ -116,12 +126,12 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { fakeLifecycleImage = newLinuxImage(fmt.Sprintf("%s:%s", cfg.DefaultLifecycleImageRepo, builder.DefaultLifecycleVersion), "", nil) fakeImageFetcher.LocalImages[fakeLifecycleImage.Name()] = fakeLifecycleImage - docker, err := dockerclient.NewClientWithOpts(dockerclient.FromEnv, dockerclient.WithVersion("1.38")) + docker, err := dockerclient.New(dockerclient.FromEnv) h.AssertNil(t, err) logger = logging.NewLogWithWriters(&outBuf, &outBuf) - dlCacheDir, err := ioutil.TempDir(tmpDir, "dl-cache") + dlCacheDir, err := os.MkdirTemp(tmpDir, "dl-cache") h.AssertNil(t, err) blobDownloader := blob.NewDownloader(logger, dlCacheDir) @@ -138,6 +148,7 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { it.After(func() { h.AssertNilE(t, defaultBuilderImage.Cleanup()) + h.AssertNilE(t, builderImageWithSystem.Cleanup()) h.AssertNilE(t, fakeDefaultRunImage.Cleanup()) h.AssertNilE(t, fakeMirror1.Cleanup()) h.AssertNilE(t, fakeMirror2.Cleanup()) @@ -146,6 +157,18 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { }) when("#Build", func() { + when("ephemeral builder is not needed", func() { + it("does not create one", func() { + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ + Builder: defaultBuilderName, + Image: "example.com/some/repo:tag", + })) + h.AssertEq(t, fakeLifecycle.Opts.Builder.Name(), defaultBuilderName) + bldr := fakeLifecycle.Opts.Builder.(*builder.Builder) + h.AssertNotNil(t, bldr.Save(logger, builder.CreatorMetadata{})) // it shouldn't be possible to save this builder, as that would overwrite the original builder + }) + }) + when("Workspace option", func() { it("uses the specified dir", func() { h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ @@ -220,6 +243,7 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { defaultRunImageName, "0.3.0", newLinuxImage, + false, ) h.AssertNil(t, builderWithoutLifecycleImageOrCreator.SetLabel("io.buildpacks.stack.mixins", `["mixinA", "build:mixinB", "mixinX", "build:mixinY"]`)) fakeImageFetcher.LocalImages[builderWithoutLifecycleImageOrCreator.Name()] = builderWithoutLifecycleImageOrCreator @@ -346,7 +370,7 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { ) it.Before(func() { - tmpDir, err = ioutil.TempDir("", "build-symlink-test") + tmpDir, err = os.MkdirTemp("", "build-symlink-test") h.AssertNil(t, err) appDirPath := filepath.Join(tmpDir, appDirName) @@ -456,6 +480,9 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { }, nil, nil, + nil, + nil, + dist.System{}, newLinuxImage, ) @@ -513,14 +540,14 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, fakeRunImage.SetLabel("io.buildpacks.stack.id", "other.stack")) }) - it("errors", func() { - h.AssertError(t, subject.Build(context.TODO(), BuildOptions{ + it("warning", func() { + err := subject.Build(context.TODO(), BuildOptions{ Image: "some/app", Builder: defaultBuilderName, RunImage: "custom/run", - }), - "invalid run-image 'custom/run': run-image stack id 'other.stack' does not match builder stack 'some.stack.id'", - ) + }) + h.AssertNil(t, err) + h.AssertContains(t, outBuf.String(), "Warning: deprecated usage of stack") }) }) @@ -684,13 +711,13 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { it("builder order is overwritten", func() { additionalBP := ifakes.CreateBuildpackTar(t, tmpDir, dist.BuildpackDescriptor{ - API: api.MustParse("0.3"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ ID: "buildpack.add.1.id", Version: "buildpack.add.1.version", }, - Stacks: []dist.Stack{{ID: defaultBuilderStackID}}, - Order: nil, + WithStacks: []dist.Stack{{ID: defaultBuilderStackID}}, + WithOrder: nil, }) h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ @@ -752,23 +779,23 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { when("from=builder is set first", func() { it("builder order is prepended", func() { additionalBP1 := ifakes.CreateBuildpackTar(t, tmpDir, dist.BuildpackDescriptor{ - API: api.MustParse("0.3"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ ID: "buildpack.add.1.id", Version: "buildpack.add.1.version", }, - Stacks: []dist.Stack{{ID: defaultBuilderStackID}}, - Order: nil, + WithStacks: []dist.Stack{{ID: defaultBuilderStackID}}, + WithOrder: nil, }) additionalBP2 := ifakes.CreateBuildpackTar(t, tmpDir, dist.BuildpackDescriptor{ - API: api.MustParse("0.3"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ ID: "buildpack.add.2.id", Version: "buildpack.add.2.version", }, - Stacks: []dist.Stack{{ID: defaultBuilderStackID}}, - Order: nil, + WithStacks: []dist.Stack{{ID: defaultBuilderStackID}}, + WithOrder: nil, }) h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ @@ -816,23 +843,23 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { when("from=builder is set in middle", func() { it("builder order is appended", func() { additionalBP1 := ifakes.CreateBuildpackTar(t, tmpDir, dist.BuildpackDescriptor{ - API: api.MustParse("0.3"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ ID: "buildpack.add.1.id", Version: "buildpack.add.1.version", }, - Stacks: []dist.Stack{{ID: defaultBuilderStackID}}, - Order: nil, + WithStacks: []dist.Stack{{ID: defaultBuilderStackID}}, + WithOrder: nil, }) additionalBP2 := ifakes.CreateBuildpackTar(t, tmpDir, dist.BuildpackDescriptor{ - API: api.MustParse("0.3"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ ID: "buildpack.add.2.id", Version: "buildpack.add.2.version", }, - Stacks: []dist.Stack{{ID: defaultBuilderStackID}}, - Order: nil, + WithStacks: []dist.Stack{{ID: defaultBuilderStackID}}, + WithOrder: nil, }) h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ @@ -881,23 +908,23 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { when("from=builder is set last", func() { it("builder order is appended", func() { additionalBP1 := ifakes.CreateBuildpackTar(t, tmpDir, dist.BuildpackDescriptor{ - API: api.MustParse("0.3"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ ID: "buildpack.add.1.id", Version: "buildpack.add.1.version", }, - Stacks: []dist.Stack{{ID: defaultBuilderStackID}}, - Order: nil, + WithStacks: []dist.Stack{{ID: defaultBuilderStackID}}, + WithOrder: nil, }) additionalBP2 := ifakes.CreateBuildpackTar(t, tmpDir, dist.BuildpackDescriptor{ - API: api.MustParse("0.3"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ ID: "buildpack.add.2.id", Version: "buildpack.add.2.version", }, - Stacks: []dist.Stack{{ID: defaultBuilderStackID}}, - Order: nil, + WithStacks: []dist.Stack{{ID: defaultBuilderStackID}}, + WithOrder: nil, }) h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ @@ -946,21 +973,21 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { when("meta-buildpack is used", func() { it("resolves buildpack from builder", func() { buildpackTar := ifakes.CreateBuildpackTar(t, tmpDir, dist.BuildpackDescriptor{ - API: api.MustParse("0.3"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ ID: "metabuildpack.id", Version: "metabuildpack.version", }, - Stacks: nil, - Order: dist.Order{{ - Group: []dist.BuildpackRef{{ - BuildpackInfo: dist.BuildpackInfo{ + WithStacks: nil, + WithOrder: dist.Order{{ + Group: []dist.ModuleRef{{ + ModuleInfo: dist.ModuleInfo{ ID: "buildpack.1.id", Version: "buildpack.1.version", }, Optional: false, }, { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "buildpack.2.id", Version: "buildpack.2.version", }, @@ -980,85 +1007,143 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { }) }) - when("buildpackage image is used", func() { - var fakePackage *fakes.Image + when("meta-buildpack folder is used", func() { + it("resolves buildpack", func() { + metaBuildpackFolder := filepath.Join(tmpDir, "meta-buildpack") + err := os.Mkdir(metaBuildpackFolder, os.ModePerm) + h.AssertNil(t, err) - it.Before(func() { - metaBuildpackTar := ifakes.CreateBuildpackTar(t, tmpDir, dist.BuildpackDescriptor{ - API: api.MustParse("0.3"), - Info: dist.BuildpackInfo{ - ID: "meta.buildpack.id", - Version: "meta.buildpack.version", - Homepage: "http://meta.buildpack", - }, - Stacks: nil, - Order: dist.Order{{ - Group: []dist.BuildpackRef{{ - BuildpackInfo: dist.BuildpackInfo{ - ID: "child.buildpack.id", - Version: "child.buildpack.version", - }, - Optional: false, - }}, - }}, + err = os.WriteFile(filepath.Join(metaBuildpackFolder, "buildpack.toml"), []byte(` +api = "0.2" + +[buildpack] + id = "local/meta-bp" + version = "local-meta-bp-version" + name = "Local Meta-Buildpack" + +[[order]] +[[order.group]] +id = "local/meta-bp-dep" +version = "local-meta-bp-version" + `), 0644) + h.AssertNil(t, err) + + err = os.WriteFile(filepath.Join(metaBuildpackFolder, "package.toml"), []byte(` +[buildpack] +uri = "." + +[[dependencies]] +uri = "../meta-buildpack-dependency" + `), 0644) + h.AssertNil(t, err) + + metaBuildpackDependencyFolder := filepath.Join(tmpDir, "meta-buildpack-dependency") + err = os.Mkdir(metaBuildpackDependencyFolder, os.ModePerm) + h.AssertNil(t, err) + + err = os.WriteFile(filepath.Join(metaBuildpackDependencyFolder, "buildpack.toml"), []byte(` +api = "0.2" + +[buildpack] + id = "local/meta-bp-dep" + version = "local-meta-bp-version" + name = "Local Meta-Buildpack Dependency" + +[[stacks]] + id = "*" + `), 0644) + h.AssertNil(t, err) + + err = subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + ClearCache: true, + Buildpacks: []string{metaBuildpackFolder}, }) - childBuildpackTar := ifakes.CreateBuildpackTar(t, tmpDir, dist.BuildpackDescriptor{ - API: api.MustParse("0.3"), - Info: dist.BuildpackInfo{ - ID: "child.buildpack.id", - Version: "child.buildpack.version", - Homepage: "http://child.buildpack", - }, - Stacks: []dist.Stack{ - {ID: defaultBuilderStackID}, - }, + h.AssertNil(t, err) + h.AssertEq(t, fakeLifecycle.Opts.Builder.Name(), defaultBuilderImage.Name()) + + bldr, err := builder.FromImage(defaultBuilderImage) + h.AssertNil(t, err) + + buildpack1Info := dist.ModuleInfo{ID: "buildpack.1.id", Version: "buildpack.1.version"} + buildpack2Info := dist.ModuleInfo{ID: "buildpack.2.id", Version: "buildpack.2.version"} + metaBuildpackInfo := dist.ModuleInfo{ID: "local/meta-bp", Version: "local-meta-bp-version", Name: "Local Meta-Buildpack"} + metaBuildpackDependencyInfo := dist.ModuleInfo{ID: "local/meta-bp-dep", Version: "local-meta-bp-version", Name: "Local Meta-Buildpack Dependency"} + h.AssertEq(t, bldr.Buildpacks(), []dist.ModuleInfo{ + buildpack1Info, + buildpack2Info, + metaBuildpackInfo, + metaBuildpackDependencyInfo, }) + }) - bpLayers := dist.BuildpackLayers{ - "meta.buildpack.id": { - "meta.buildpack.version": { - API: api.MustParse("0.3"), - Order: dist.Order{{ - Group: []dist.BuildpackRef{{ - BuildpackInfo: dist.BuildpackInfo{ - ID: "child.buildpack.id", - Version: "child.buildpack.version", - }, - Optional: false, - }}, - }}, - LayerDiffID: diffIDForFile(t, metaBuildpackTar), - }, - }, - "child.buildpack.id": { - "child.buildpack.version": { - API: api.MustParse("0.3"), - Stacks: []dist.Stack{ - {ID: defaultBuilderStackID}, - }, - LayerDiffID: diffIDForFile(t, childBuildpackTar), - }, - }, - } + it("fails if buildpack dependency could not be fetched", func() { + metaBuildpackFolder := filepath.Join(tmpDir, "meta-buildpack") + err := os.Mkdir(metaBuildpackFolder, os.ModePerm) + h.AssertNil(t, err) - md := buildpack.Metadata{ - BuildpackInfo: dist.BuildpackInfo{ - ID: "meta.buildpack.id", - Version: "meta.buildpack.version", - }, - Stacks: []dist.Stack{ - {ID: defaultBuilderStackID}, - }, - } + err = os.WriteFile(filepath.Join(metaBuildpackFolder, "buildpack.toml"), []byte(` +api = "0.2" + +[buildpack] + id = "local/meta-bp" + version = "local-meta-bp-version" + name = "Local Meta-Buildpack" + +[[order]] +[[order.group]] +id = "local/meta-bp-dep" +version = "local-meta-bp-version" + `), 0644) + h.AssertNil(t, err) + + err = os.WriteFile(filepath.Join(metaBuildpackFolder, "package.toml"), []byte(` +[buildpack] +uri = "." + +[[dependencies]] +uri = "../meta-buildpack-dependency" + +[[dependencies]] +uri = "../not-a-valid-dependency" + `), 0644) + h.AssertNil(t, err) + + metaBuildpackDependencyFolder := filepath.Join(tmpDir, "meta-buildpack-dependency") + err = os.Mkdir(metaBuildpackDependencyFolder, os.ModePerm) + h.AssertNil(t, err) - fakePackage = fakes.NewImage("example.com/some/package", "", nil) - h.AssertNil(t, dist.SetLabel(fakePackage, "io.buildpacks.buildpack.layers", bpLayers)) - h.AssertNil(t, dist.SetLabel(fakePackage, "io.buildpacks.buildpackage.metadata", md)) + err = os.WriteFile(filepath.Join(metaBuildpackDependencyFolder, "buildpack.toml"), []byte(` +api = "0.2" - h.AssertNil(t, fakePackage.AddLayer(metaBuildpackTar)) - h.AssertNil(t, fakePackage.AddLayer(childBuildpackTar)) +[buildpack] + id = "local/meta-bp-dep" + version = "local-meta-bp-version" + name = "Local Meta-Buildpack Dependency" +[[stacks]] + id = "*" + `), 0644) + h.AssertNil(t, err) + + err = subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + ClearCache: true, + Buildpacks: []string{metaBuildpackFolder}, + }) + h.AssertError(t, err, fmt.Sprintf("fetching package.toml dependencies (path='%s')", filepath.Join(metaBuildpackFolder, "package.toml"))) + h.AssertError(t, err, "fetching dependencies (uri='../not-a-valid-dependency',image='')") + }) + }) + + when("buildpackage image is used", func() { + var fakePackage *fakes.Image + + it.Before(func() { + fakePackage = makeFakePackage(t, tmpDir, defaultBuilderStackID) fakeImageFetcher.LocalImages[fakePackage.Name()] = fakePackage }) @@ -1077,12 +1162,12 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { bldr, err := builder.FromImage(defaultBuilderImage) h.AssertNil(t, err) h.AssertEq(t, bldr.Order(), dist.Order{ - {Group: []dist.BuildpackRef{ - {BuildpackInfo: dist.BuildpackInfo{ID: "meta.buildpack.id", Version: "meta.buildpack.version"}}, + {Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "meta.buildpack.id", Version: "meta.buildpack.version"}}, }}, // Child buildpacks should not be added to order }) - h.AssertEq(t, bldr.Buildpacks(), []dist.BuildpackInfo{ + h.AssertEq(t, bldr.Buildpacks(), []dist.ModuleInfo{ { ID: "buildpack.1.id", Version: "buildpack.1.version", @@ -1100,6 +1185,8 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { Version: "child.buildpack.version", }, }) + args := fakeImageFetcher.FetchCalls[fakePackage.Name()] + h.AssertEq(t, args.Target.ValuesAsPlatform(), "linux/amd64") }) it("fails when no metadata label on package", func() { @@ -1144,6 +1231,29 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { ) }) + when("from project descriptor", func() { + when("id - no version is provided", func() { + it("resolves version", func() { + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + ClearCache: true, + ProjectDescriptor: projectTypes.Descriptor{ + Build: projectTypes.Build{Buildpacks: []projectTypes.Buildpack{{ID: "buildpack.1.id"}}}, + }, + })) + h.AssertEq(t, fakeLifecycle.Opts.Builder.Name(), defaultBuilderImage.Name()) + + assertOrderEquals(`[[order]] + + [[order.group]] + id = "buildpack.1.id" + version = "buildpack.1.version" +`) + }) + }) + }) + when("buildpacks include URIs", func() { var buildpackTgz string @@ -1172,19 +1282,19 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { h.AssertEq(t, fakeLifecycle.Opts.Builder.Name(), defaultBuilderImage.Name()) bldr, err := builder.FromImage(defaultBuilderImage) h.AssertNil(t, err) - buildpack1Info := dist.BuildpackInfo{ID: "buildpack.1.id", Version: "buildpack.1.version"} - buildpack2Info := dist.BuildpackInfo{ID: "buildpack.2.id", Version: "buildpack.2.version"} - dirBuildpackInfo := dist.BuildpackInfo{ID: "bp.one", Version: "1.2.3", Homepage: "http://one.buildpack"} - tgzBuildpackInfo := dist.BuildpackInfo{ID: "some-other-buildpack-id", Version: "some-other-buildpack-version"} + buildpack1Info := dist.ModuleInfo{ID: "buildpack.1.id", Version: "buildpack.1.version"} + buildpack2Info := dist.ModuleInfo{ID: "buildpack.2.id", Version: "buildpack.2.version"} + dirBuildpackInfo := dist.ModuleInfo{ID: "bp.one", Version: "1.2.3", Homepage: "http://one.buildpack"} + tgzBuildpackInfo := dist.ModuleInfo{ID: "some-other-buildpack-id", Version: "some-other-buildpack-version"} h.AssertEq(t, bldr.Order(), dist.Order{ - {Group: []dist.BuildpackRef{ - {BuildpackInfo: buildpack1Info}, - {BuildpackInfo: buildpack2Info}, - {BuildpackInfo: dirBuildpackInfo}, - {BuildpackInfo: tgzBuildpackInfo}, + {Group: []dist.ModuleRef{ + {ModuleInfo: buildpack1Info}, + {ModuleInfo: buildpack2Info}, + {ModuleInfo: dirBuildpackInfo}, + {ModuleInfo: tgzBuildpackInfo}, }}, }) - h.AssertEq(t, bldr.Buildpacks(), []dist.BuildpackInfo{ + h.AssertEq(t, bldr.Buildpacks(), []dist.ModuleInfo{ buildpack1Info, buildpack2Info, dirBuildpackInfo, @@ -1192,7 +1302,7 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { }) }) - when("uri is a http url", func() { + when("uri is an http url", func() { var server *ghttp.Server it.Before(func() { @@ -1223,13 +1333,13 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { bldr, err := builder.FromImage(defaultBuilderImage) h.AssertNil(t, err) h.AssertEq(t, bldr.Order(), dist.Order{ - {Group: []dist.BuildpackRef{ - {BuildpackInfo: dist.BuildpackInfo{ID: "buildpack.1.id", Version: "buildpack.1.version"}}, - {BuildpackInfo: dist.BuildpackInfo{ID: "buildpack.2.id", Version: "buildpack.2.version"}}, - {BuildpackInfo: dist.BuildpackInfo{ID: "some-other-buildpack-id", Version: "some-other-buildpack-version"}}, + {Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "buildpack.1.id", Version: "buildpack.1.version"}}, + {ModuleInfo: dist.ModuleInfo{ID: "buildpack.2.id", Version: "buildpack.2.version"}}, + {ModuleInfo: dist.ModuleInfo{ID: "some-other-buildpack-id", Version: "some-other-buildpack-version"}}, }}, }) - h.AssertEq(t, bldr.Buildpacks(), []dist.BuildpackInfo{ + h.AssertEq(t, bldr.Buildpacks(), []dist.ModuleInfo{ {ID: "buildpack.1.id", Version: "buildpack.1.version"}, {ID: "buildpack.2.id", Version: "buildpack.2.version"}, {ID: "some-other-buildpack-id", Version: "some-other-buildpack-version"}, @@ -1255,11 +1365,85 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { bldr, err := builder.FromImage(defaultBuilderImage) h.AssertNil(t, err) h.AssertEq(t, bldr.Order(), dist.Order{ - {Group: []dist.BuildpackRef{ - {BuildpackInfo: dist.BuildpackInfo{ID: "some-other-buildpack-id", Version: "some-other-buildpack-version"}}, + {Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "some-other-buildpack-id", Version: "some-other-buildpack-version"}}, + }}, + }) + h.AssertEq(t, bldr.Buildpacks(), []dist.ModuleInfo{ + {ID: "buildpack.1.id", Version: "buildpack.1.version"}, + {ID: "buildpack.2.id", Version: "buildpack.2.version"}, + {ID: "some-other-buildpack-id", Version: "some-other-buildpack-version"}, + }) + }) + + it("adds the pre buildpack from the project descriptor", func() { + err := subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + ClearCache: true, + ProjectDescriptor: projectTypes.Descriptor{ + Build: projectTypes.Build{ + Pre: projectTypes.GroupAddition{ + Buildpacks: []projectTypes.Buildpack{{ + URI: server.URL(), + }}, + }, + }, + }, + }) + + h.AssertNil(t, err) + h.AssertEq(t, fakeLifecycle.Opts.Builder.Name(), defaultBuilderImage.Name()) + bldr, err := builder.FromImage(defaultBuilderImage) + h.AssertNil(t, err) + h.AssertEq(t, bldr.Order(), dist.Order{ + {Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "some-other-buildpack-id", Version: "some-other-buildpack-version"}}, + {ModuleInfo: dist.ModuleInfo{ID: "buildpack.1.id", Version: "buildpack.1.version"}}, + }}, + {Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "some-other-buildpack-id", Version: "some-other-buildpack-version"}}, + {ModuleInfo: dist.ModuleInfo{ID: "buildpack.2.id", Version: "buildpack.2.version"}}, + }}, + }) + h.AssertEq(t, bldr.Buildpacks(), []dist.ModuleInfo{ + {ID: "buildpack.1.id", Version: "buildpack.1.version"}, + {ID: "buildpack.2.id", Version: "buildpack.2.version"}, + {ID: "some-other-buildpack-id", Version: "some-other-buildpack-version"}, + }) + }) + + it("adds the post buildpack from the project descriptor", func() { + err := subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + ClearCache: true, + ProjectDescriptor: projectTypes.Descriptor{ + Build: projectTypes.Build{ + Post: projectTypes.GroupAddition{ + Buildpacks: []projectTypes.Buildpack{{ + URI: server.URL(), + }}, + }, + }, + }, + }) + + h.AssertNil(t, err) + h.AssertEq(t, fakeLifecycle.Opts.Builder.Name(), defaultBuilderImage.Name()) + bldr, err := builder.FromImage(defaultBuilderImage) + h.AssertNil(t, err) + h.AssertEq(t, bldr.Order(), dist.Order{ + {Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "buildpack.1.id", Version: "buildpack.1.version"}}, + {ModuleInfo: dist.ModuleInfo{ID: "some-other-buildpack-id", Version: "some-other-buildpack-version"}}, + }}, + {Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "buildpack.2.id", Version: "buildpack.2.version"}}, + {ModuleInfo: dist.ModuleInfo{ID: "some-other-buildpack-id", Version: "some-other-buildpack-version"}}, }}, }) - h.AssertEq(t, bldr.Buildpacks(), []dist.BuildpackInfo{ + h.AssertEq(t, bldr.Buildpacks(), []dist.ModuleInfo{ {ID: "buildpack.1.id", Version: "buildpack.1.version"}, {ID: "buildpack.2.id", Version: "buildpack.2.version"}, {ID: "some-other-buildpack-id", Version: "some-other-buildpack-version"}, @@ -1267,22 +1451,114 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { }) }) + when("pre and post buildpacks", func() { + it("added from the project descriptor", func() { + err := subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + ClearCache: true, + ProjectDescriptor: projectTypes.Descriptor{ + Build: projectTypes.Build{ + Pre: projectTypes.GroupAddition{ + Buildpacks: []projectTypes.Buildpack{{ID: "buildpack.2.id", Version: "buildpack.2.version"}}, + }, + Post: projectTypes.GroupAddition{ + Buildpacks: []projectTypes.Buildpack{{ID: "buildpack.2.id", Version: "buildpack.2.version"}}, + }, + }, + }, + }) + + h.AssertNil(t, err) + h.AssertEq(t, fakeLifecycle.Opts.Builder.Name(), defaultBuilderImage.Name()) + bldr, err := builder.FromImage(defaultBuilderImage) + h.AssertNil(t, err) + h.AssertEq(t, bldr.Order(), dist.Order{ + {Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "buildpack.2.id", Version: "buildpack.2.version"}}, + {ModuleInfo: dist.ModuleInfo{ID: "buildpack.1.id", Version: "buildpack.1.version"}}, + {ModuleInfo: dist.ModuleInfo{ID: "buildpack.2.id", Version: "buildpack.2.version"}}, + }}, + {Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "buildpack.2.id", Version: "buildpack.2.version"}}, + {ModuleInfo: dist.ModuleInfo{ID: "buildpack.2.id", Version: "buildpack.2.version"}}, + {ModuleInfo: dist.ModuleInfo{ID: "buildpack.2.id", Version: "buildpack.2.version"}}, + }}, + }) + h.AssertEq(t, bldr.Buildpacks(), []dist.ModuleInfo{ + {ID: "buildpack.1.id", Version: "buildpack.1.version"}, + {ID: "buildpack.2.id", Version: "buildpack.2.version"}, + }) + }) + + it("not added from the project descriptor", func() { + err := subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + ClearCache: true, + Buildpacks: []string{ + "buildpack.1.id@buildpack.1.version", + }, + ProjectDescriptor: projectTypes.Descriptor{ + Build: projectTypes.Build{ + Pre: projectTypes.GroupAddition{ + Buildpacks: []projectTypes.Buildpack{{ID: "some-other-buildpack-id", Version: "some-other-buildpack-version"}}, + }, + Post: projectTypes.GroupAddition{ + Buildpacks: []projectTypes.Buildpack{{ID: "yet-other-buildpack-id", Version: "yet-other-buildpack-version"}}, + }, + }, + }, + }) + + h.AssertNil(t, err) + h.AssertEq(t, fakeLifecycle.Opts.Builder.Name(), defaultBuilderImage.Name()) + bldr, err := builder.FromImage(defaultBuilderImage) + h.AssertNil(t, err) + h.AssertEq(t, bldr.Order(), dist.Order{ + {Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "buildpack.1.id", Version: "buildpack.1.version"}}, + }}, + }) + h.AssertEq(t, bldr.Buildpacks(), []dist.ModuleInfo{ + {ID: "buildpack.1.id", Version: "buildpack.1.version"}, + {ID: "buildpack.2.id", Version: "buildpack.2.version"}, + }) + }) + }) + when("added buildpack's mixins are not satisfied", func() { it.Before(func() { h.AssertNil(t, defaultBuilderImage.SetLabel("io.buildpacks.stack.mixins", `["mixinX", "build:mixinY"]`)) h.AssertNil(t, fakeDefaultRunImage.SetLabel("io.buildpacks.stack.mixins", `["mixinX", "run:mixinZ"]`)) }) - it("returns an error", func() { - err := subject.Build(context.TODO(), BuildOptions{ + it("succeeds", func() { + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ Image: "some/app", Builder: defaultBuilderName, Buildpacks: []string{ buildpackTgz, // requires mixinA, build:mixinB, run:mixinC }, + })) + }) + + when("platform API < 0.12", func() { + it.Before(func() { + setAPIs(t, defaultBuilderImage, []string{"0.8"}, []string{"0.11"}) }) - h.AssertError(t, err, "validating stack mixins: buildpack 'some-other-buildpack-id@some-other-buildpack-version' requires missing mixin(s): build:mixinB, mixinA, run:mixinC") + it("returns an error", func() { + err := subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + Buildpacks: []string{ + buildpackTgz, // requires mixinA, build:mixinB, run:mixinC + }, + }) + + h.AssertError(t, err, "validating stack mixins: buildpack 'some-other-buildpack-id@some-other-buildpack-version' requires missing mixin(s): build:mixinB, mixinA, run:mixinC") + }) }) }) @@ -1293,7 +1569,7 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "project-desc") + tmpDir, err = os.MkdirTemp("", "project-desc") h.AssertNil(t, err) }) @@ -1326,11 +1602,11 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { bldr, err := builder.FromImage(defaultBuilderImage) h.AssertNil(t, err) h.AssertEq(t, bldr.Order(), dist.Order{ - {Group: []dist.BuildpackRef{ - {BuildpackInfo: dist.BuildpackInfo{ID: "my/inline", Version: "0.0.0"}}, + {Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "my/inline", Version: "0.0.0"}}, }}, }) - h.AssertEq(t, bldr.Buildpacks(), []dist.BuildpackInfo{ + h.AssertEq(t, bldr.Buildpacks(), []dist.ModuleInfo{ {ID: "buildpack.1.id", Version: "buildpack.1.version"}, {ID: "buildpack.2.id", Version: "buildpack.2.version"}, {ID: "my/inline", Version: "0.0.0"}, @@ -1362,11 +1638,11 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { bldr, err := builder.FromImage(defaultBuilderImage) h.AssertNil(t, err) h.AssertEq(t, bldr.Order(), dist.Order{ - {Group: []dist.BuildpackRef{ - {BuildpackInfo: dist.BuildpackInfo{ID: "my/inline", Version: "1.0.0-my-version"}}, + {Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "my/inline", Version: "1.0.0-my-version"}}, }}, }) - h.AssertEq(t, bldr.Buildpacks(), []dist.BuildpackInfo{ + h.AssertEq(t, bldr.Buildpacks(), []dist.ModuleInfo{ {ID: "buildpack.1.id", Version: "buildpack.1.version"}, {ID: "buildpack.2.id", Version: "buildpack.2.version"}, {ID: "my/inline", Version: "1.0.0-my-version"}, @@ -1412,7 +1688,7 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { ProjectDescriptorBaseDir: tmpDir, }) - h.AssertEq(t, "Invalid buildpack defined in project descriptor", err.Error()) + h.AssertEq(t, "Invalid buildpack definition", err.Error()) }) it("ignores script if there is a URI", func() { @@ -1451,7 +1727,7 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "registry") + tmpDir, err = os.MkdirTemp("", "registry") h.AssertNil(t, err) packHome = filepath.Join(tmpDir, ".pack") @@ -1476,17 +1752,17 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, err) childBuildpackTar := ifakes.CreateBuildpackTar(t, tmpDir, dist.BuildpackDescriptor{ - API: api.MustParse("0.3"), - Info: dist.BuildpackInfo{ + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ ID: "example/foo", Version: "1.0.0", }, - Stacks: []dist.Stack{ + WithStacks: []dist.Stack{ {ID: defaultBuilderStackID}, }, }) - bpLayers := dist.BuildpackLayers{ + bpLayers := dist.ModuleLayers{ "example/foo": { "1.0.0": { API: api.MustParse("0.3"), @@ -1499,7 +1775,7 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { } md := buildpack.Metadata{ - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "example/foo", Version: "1.0.0", }, @@ -1538,11 +1814,11 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { bldr, err := builder.FromImage(defaultBuilderImage) h.AssertNil(t, err) h.AssertEq(t, bldr.Order(), dist.Order{ - {Group: []dist.BuildpackRef{ - {BuildpackInfo: dist.BuildpackInfo{ID: "example/foo", Version: "1.0.0"}}, + {Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "example/foo", Version: "1.0.0"}}, }}, }) - h.AssertEq(t, bldr.Buildpacks(), []dist.BuildpackInfo{ + h.AssertEq(t, bldr.Buildpacks(), []dist.ModuleInfo{ {ID: "buildpack.1.id", Version: "buildpack.1.version"}, {ID: "buildpack.2.id", Version: "buildpack.2.version"}, {ID: "example/foo", Version: "1.0.0"}, @@ -1552,30 +1828,116 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { }) }) - when("ProjectDescriptor", func() { - when("project metadata", func() { - when("not experimental", func() { - it("does not set project source", func() { - err := subject.Build(context.TODO(), BuildOptions{ - Image: "some/app", - Builder: defaultBuilderName, - ClearCache: true, - ProjectDescriptor: projectTypes.Descriptor{ - Project: projectTypes.Project{ - Version: "1.2.3", - SourceURL: "https://example.com", - }, - }, - }) + when("Extensions option", func() { + it.Before(func() { + subject.experimental = true + defaultBuilderImage.SetLabel("io.buildpacks.buildpack.order-extensions", `[{"group":[{"id":"extension.1.id","version":"extension.1.version"}]}, {"group":[{"id":"extension.2.id","version":"extension.2.version"}]}]`) + defaultWindowsBuilderImage.SetLabel("io.buildpacks.buildpack.order-extensions", `[{"group":[{"id":"extension.1.id","version":"extension.1.version"}]}, {"group":[{"id":"extension.2.id","version":"extension.2.version"}]}]`) + }) - h.AssertNil(t, err) - h.AssertNil(t, fakeLifecycle.Opts.ProjectMetadata.Source) - }) - }) + assertOrderEquals := func(content string) { + t.Helper() - when("is experimental", func() { - it.Before(func() { - subject.experimental = true + orderLayer, err := defaultBuilderImage.FindLayerWithPath("/cnb/order.toml") + h.AssertNil(t, err) + h.AssertOnTarEntry(t, orderLayer, "/cnb/order.toml", h.ContentEquals(content)) + } + + it("builder order-extensions is overwritten", func() { + additionalEx := ifakes.CreateExtensionTar(t, tmpDir, dist.ExtensionDescriptor{ + WithAPI: api.MustParse("0.7"), + WithInfo: dist.ModuleInfo{ + ID: "extension.add.1.id", + Version: "extension.add.1.version", + }, + }) + + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + ClearCache: true, + Extensions: []string{additionalEx}, + })) + h.AssertEq(t, fakeLifecycle.Opts.Builder.Name(), defaultBuilderImage.Name()) + + assertOrderEquals(`[[order]] + + [[order.group]] + id = "buildpack.1.id" + version = "buildpack.1.version" + +[[order]] + + [[order.group]] + id = "buildpack.2.id" + version = "buildpack.2.version" + +[[order-extensions]] + + [[order-extensions.group]] + id = "extension.add.1.id" + version = "extension.add.1.version" +`) + }) + + when("id - no version is provided", func() { + it("resolves version", func() { + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + ClearCache: true, + Extensions: []string{"extension.1.id"}, + })) + h.AssertEq(t, fakeLifecycle.Opts.Builder.Name(), defaultBuilderImage.Name()) + + assertOrderEquals(`[[order]] + + [[order.group]] + id = "buildpack.1.id" + version = "buildpack.1.version" + +[[order]] + + [[order.group]] + id = "buildpack.2.id" + version = "buildpack.2.version" + +[[order-extensions]] + + [[order-extensions.group]] + id = "extension.1.id" + version = "extension.1.version" +`) + }) + }) + }) + + //TODO: "all buildpacks are added to ephemeral builder" test after extractPackaged() is completed. + + when("ProjectDescriptor", func() { + when("project metadata", func() { + when("not experimental", func() { + it("does not set project source", func() { + err := subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + ClearCache: true, + ProjectDescriptor: projectTypes.Descriptor{ + Project: projectTypes.Project{ + Version: "1.2.3", + SourceURL: "https://example.com", + }, + }, + }) + + h.AssertNil(t, err) + h.AssertNil(t, fakeLifecycle.Opts.ProjectMetadata.Source) + }) + }) + + when("is experimental", func() { + it.Before(func() { + subject.experimental = true }) when("missing information", func() { @@ -1607,7 +1969,7 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, err) h.AssertNotNil(t, fakeLifecycle.Opts.ProjectMetadata.Source) - h.AssertEq(t, fakeLifecycle.Opts.ProjectMetadata.Source, &platform.ProjectSource{ + h.AssertEq(t, fakeLifecycle.Opts.ProjectMetadata.Source, &files.ProjectSource{ Type: "project", Version: map[string]interface{}{"declared": "1.2.3"}, Metadata: map[string]interface{}{"url": "https://example.com"}, @@ -1651,6 +2013,7 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { defaultRunImageName, "0.3.0", newLinuxImage, + false, ) h.AssertNil(t, builderWithoutLifecycleImageOrCreator.SetLabel("io.buildpacks.stack.mixins", `["mixinA", "build:mixinB", "mixinX", "build:mixinY"]`)) fakeImageFetcher.LocalImages[builderWithoutLifecycleImageOrCreator.Name()] = builderWithoutLifecycleImageOrCreator @@ -1670,16 +2033,19 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { })) h.AssertEq(t, fakeLifecycle.Opts.Publish, true) - args := fakeImageFetcher.FetchCalls["default/run"] - h.AssertEq(t, args.Daemon, false) - - args = fakeImageFetcher.FetchCalls[defaultBuilderName] + args := fakeImageFetcher.FetchCalls[defaultBuilderName] h.AssertEq(t, args.Daemon, true) + + args = fakeImageFetcher.FetchCalls["default/run"] + h.AssertEq(t, args.Daemon, false) + h.AssertEq(t, args.Target.ValuesAsPlatform(), "linux/amd64") }) when("builder is untrusted", func() { when("lifecycle image is available", func() { it("uses the 5 phases with the lifecycle image", func() { + origLifecyleName := fakeLifecycleImage.Name() + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ Image: "some/app", Builder: defaultBuilderName, @@ -1687,12 +2053,23 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { TrustBuilder: func(string) bool { return false }, })) h.AssertEq(t, fakeLifecycle.Opts.UseCreator, false) - h.AssertEq(t, fakeLifecycle.Opts.LifecycleImage, fakeLifecycleImage.Name()) - - args := fakeImageFetcher.FetchCalls[fakeLifecycleImage.Name()] + h.AssertContains(t, fakeLifecycle.Opts.LifecycleImage, "pack.local/lifecycle") + args := fakeImageFetcher.FetchCalls[origLifecyleName] + h.AssertNotNil(t, args) h.AssertEq(t, args.Daemon, true) h.AssertEq(t, args.PullPolicy, image.PullAlways) - h.AssertEq(t, args.Platform, "linux/amd64") + h.AssertEq(t, args.Target.ValuesAsPlatform(), "linux/amd64") + }) + it("parses the versions correctly", func() { + fakeLifecycleImage.SetLabel("io.buildpacks.lifecycle.apis", "{\"platform\":{\"deprecated\":[\"0.1\",\"0.2\",\"0.3\",\"0.4\",\"0.5\",\"0.6\"],\"supported\":[\"0.7\",\"0.8\",\"0.9\",\"0.10\",\"0.11\",\"0.12\"]}}") + + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + Publish: true, + TrustBuilder: func(string) bool { return false }, + })) + h.AssertSliceContainsInOrder(t, fakeLifecycle.Opts.LifecycleApis, "0.1", "0.2", "0.3", "0.4", "0.5", "0.6", "0.7", "0.8", "0.9", "0.10", "0.11", "0.12") }) }) @@ -1722,6 +2099,109 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { args := fakeImageFetcher.FetchCalls[fakeLifecycleImage.Name()] h.AssertNil(t, args) }) + + when("additional buildpacks were added", func() { + it("uses creator when additional buildpacks are provided and TrustExtraBuildpacks is set", func() { + additionalBP := ifakes.CreateBuildpackTar(t, tmpDir, dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ + ID: "buildpack.add.1.id", + Version: "buildpack.add.1.version", + }, + WithStacks: []dist.Stack{{ID: defaultBuilderStackID}}, + WithOrder: nil, + }) + + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + Publish: true, + TrustBuilder: func(string) bool { return true }, + TrustExtraBuildpacks: true, + Buildpacks: []string{additionalBP}, + })) + h.AssertEq(t, fakeLifecycle.Opts.UseCreator, true) + }) + + it("uses the 5 phases with the lifecycle image", func() { + additionalBP := ifakes.CreateBuildpackTar(t, tmpDir, dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ + ID: "buildpack.add.1.id", + Version: "buildpack.add.1.version", + }, + WithStacks: []dist.Stack{{ID: defaultBuilderStackID}}, + WithOrder: nil, + }) + + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + Publish: true, + TrustBuilder: func(string) bool { return true }, + Buildpacks: []string{additionalBP}, + })) + h.AssertEq(t, fakeLifecycle.Opts.UseCreator, false) + h.AssertEq(t, fakeLifecycle.Opts.LifecycleImage, fakeLifecycleImage.Name()) + + h.AssertContains(t, outBuf.String(), "Builder is trusted but additional modules were added; using the untrusted (5 phases) build flow") + }) + + when("from project descriptor", func() { + it("uses the 5 phases with the lifecycle image", func() { + additionalBP := ifakes.CreateBuildpackTar(t, tmpDir, dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ + ID: "buildpack.add.1.id", + Version: "buildpack.add.1.version", + }, + WithStacks: []dist.Stack{{ID: defaultBuilderStackID}}, + WithOrder: nil, + }) + + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + Publish: true, + TrustBuilder: func(string) bool { return true }, + ProjectDescriptor: projectTypes.Descriptor{Build: projectTypes.Build{ + Buildpacks: []projectTypes.Buildpack{{ + URI: additionalBP, + }}, + }}, + })) + h.AssertEq(t, fakeLifecycle.Opts.UseCreator, false) + h.AssertEq(t, fakeLifecycle.Opts.LifecycleImage, fakeLifecycleImage.Name()) + + h.AssertContains(t, outBuf.String(), "Builder is trusted but additional modules were added; using the untrusted (5 phases) build flow") + }) + + when("inline buildpack", func() { + it("uses the creator with the provided builder", func() { + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + Publish: true, + TrustBuilder: func(string) bool { return true }, + ProjectDescriptor: projectTypes.Descriptor{Build: projectTypes.Build{ + Buildpacks: []projectTypes.Buildpack{{ + ID: "buildpack.add.1.id", + Version: "buildpack.add.1.version", + Script: projectTypes.Script{ + API: "0.10", + Inline: "echo hello", + }, + }}, + }}, + })) + h.AssertEq(t, fakeLifecycle.Opts.UseCreator, true) + + args := fakeImageFetcher.FetchCalls[fakeLifecycleImage.Name()] + h.AssertNil(t, args) + }) + }) + }) + }) }) when("lifecycle doesn't support creator", func() { @@ -1764,6 +2244,7 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { when("builder is untrusted", func() { when("lifecycle image is available", func() { it("uses the 5 phases with the lifecycle image", func() { + origLifecyleName := fakeLifecycleImage.Name() h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ Image: "some/app", Builder: defaultBuilderName, @@ -1771,24 +2252,12 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { TrustBuilder: func(string) bool { return false }, })) h.AssertEq(t, fakeLifecycle.Opts.UseCreator, false) - h.AssertEq(t, fakeLifecycle.Opts.LifecycleImage, fakeLifecycleImage.Name()) - - args := fakeImageFetcher.FetchCalls[fakeLifecycleImage.Name()] + h.AssertContains(t, fakeLifecycle.Opts.LifecycleImage, "pack.local/lifecycle") + args := fakeImageFetcher.FetchCalls[origLifecyleName] + h.AssertNotNil(t, args) h.AssertEq(t, args.Daemon, true) h.AssertEq(t, args.PullPolicy, image.PullAlways) - h.AssertEq(t, args.Platform, "linux/amd64") - }) - - it("suggests that being untrusted may be the root of a failure", func() { - subject.lifecycleExecutor = &executeFailsLifecycle{} - err := subject.Build(context.TODO(), BuildOptions{ - Image: "some/app", - Builder: defaultBuilderName, - Publish: false, - TrustBuilder: func(string) bool { return false }, - }) - - h.AssertError(t, err, "may be the result of using an untrusted builder") + h.AssertEq(t, args.Target.ValuesAsPlatform(), "linux/amd64") }) }) @@ -1840,6 +2309,84 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { }) }) + when("Platform option", func() { + var fakePackage imgutil.Image + + it.Before(func() { + fakePackage = makeFakePackage(t, tmpDir, defaultBuilderStackID) + fakeImageFetcher.LocalImages[fakePackage.Name()] = fakePackage + }) + + when("provided", func() { + it("uses the provided platform to pull the builder, run image, packages, and lifecycle image", func() { + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + Buildpacks: []string{ + "example.com/some/package", + }, + Platform: "linux/arm64", + PullPolicy: image.PullAlways, + })) + + args := fakeImageFetcher.FetchCalls[defaultBuilderName] + h.AssertEq(t, args.Daemon, true) + h.AssertEq(t, args.PullPolicy, image.PullAlways) + h.AssertEq(t, args.Target.ValuesAsPlatform(), "linux/arm64") + + args = fakeImageFetcher.FetchCalls["default/run"] + h.AssertEq(t, args.Daemon, true) + h.AssertEq(t, args.PullPolicy, image.PullAlways) + h.AssertEq(t, args.Target.ValuesAsPlatform(), "linux/arm64") + + args = fakeImageFetcher.FetchCalls[fakePackage.Name()] + h.AssertEq(t, args.Daemon, true) + h.AssertEq(t, args.PullPolicy, image.PullAlways) + h.AssertEq(t, args.Target.ValuesAsPlatform(), "linux/arm64") + + args = fakeImageFetcher.FetchCalls[fmt.Sprintf("%s:%s", cfg.DefaultLifecycleImageRepo, builder.DefaultLifecycleVersion)] + h.AssertEq(t, args.Daemon, true) + h.AssertEq(t, args.PullPolicy, image.PullAlways) + h.AssertEq(t, args.Target.ValuesAsPlatform(), "linux/arm64") + }) + }) + + when("not provided", func() { + it("defaults to builder os/arch", func() { + // defaultBuilderImage has linux/amd64 + + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + Buildpacks: []string{ + "example.com/some/package", + }, + PullPolicy: image.PullAlways, + })) + + args := fakeImageFetcher.FetchCalls[defaultBuilderName] + h.AssertEq(t, args.Daemon, true) + h.AssertEq(t, args.PullPolicy, image.PullAlways) + h.AssertEq(t, args.Target, (*dist.Target)(nil)) + + args = fakeImageFetcher.FetchCalls["default/run"] + h.AssertEq(t, args.Daemon, true) + h.AssertEq(t, args.PullPolicy, image.PullAlways) + h.AssertEq(t, args.Target.ValuesAsPlatform(), "linux/amd64") + + args = fakeImageFetcher.FetchCalls[fakePackage.Name()] + h.AssertEq(t, args.Daemon, true) + h.AssertEq(t, args.PullPolicy, image.PullAlways) + h.AssertEq(t, args.Target.ValuesAsPlatform(), "linux/amd64") + + args = fakeImageFetcher.FetchCalls[fmt.Sprintf("%s:%s", cfg.DefaultLifecycleImageRepo, builder.DefaultLifecycleVersion)] + h.AssertEq(t, args.Daemon, true) + h.AssertEq(t, args.PullPolicy, image.PullAlways) + h.AssertEq(t, args.Target.ValuesAsPlatform(), "linux/amd64") + }) + }) + }) + when("PullPolicy", func() { when("never", func() { it("uses the local builder and run images without updating", func() { @@ -1857,10 +2404,42 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { h.AssertEq(t, args.Daemon, true) h.AssertEq(t, args.PullPolicy, image.PullNever) - args = fakeImageFetcher.FetchCalls["buildpacksio/lifecycle:0.13.3"] + args = fakeImageFetcher.FetchCalls[fmt.Sprintf("%s:%s", cfg.DefaultLifecycleImageRepo, builder.DefaultLifecycleVersion)] h.AssertEq(t, args.Daemon, true) h.AssertEq(t, args.PullPolicy, image.PullNever) - h.AssertEq(t, args.Platform, "linux/amd64") + h.AssertEq(t, args.Target.ValuesAsPlatform(), "linux/amd64") + }) + }) + + when("containerized pack", func() { + it.Before(func() { + RunningInContainer = func() bool { + return true + } + }) + + when("--pull-policy=always", func() { + it("does not warn", func() { + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + PullPolicy: image.PullAlways, + })) + + h.AssertNotContains(t, outBuf.String(), "failing to pull build inputs from a remote registry is insecure") + }) + }) + + when("not --pull-policy=always", func() { + it("warns", func() { + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + PullPolicy: image.PullNever, + })) + + h.AssertContains(t, outBuf.String(), "failing to pull build inputs from a remote registry is insecure") + }) }) }) @@ -2001,6 +2580,9 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { }, nil, nil, + nil, + nil, + dist.System{}, newLinuxImage, ) @@ -2051,6 +2633,9 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { }, nil, nil, + nil, + nil, + dist.System{}, newLinuxImage, ) @@ -2105,6 +2690,9 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { }, nil, nil, + nil, + nil, + dist.System{}, newLinuxImage, ) @@ -2161,6 +2749,9 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { }, nil, nil, + nil, + nil, + dist.System{}, newLinuxImage, ) @@ -2183,6 +2774,40 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { }) }) }) + + when("use creator with extensions", func() { + when("lifecycle is old", func() { + it("false", func() { + oldLifecycleBuilder := newFakeBuilderImage(t, tmpDir, "example.com/old-lifecycle-builder:tag", defaultBuilderStackID, defaultRunImageName, "0.18.0", newLinuxImage, false) + defer oldLifecycleBuilder.Cleanup() + fakeImageFetcher.LocalImages[oldLifecycleBuilder.Name()] = oldLifecycleBuilder + + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: oldLifecycleBuilder.Name(), + TrustBuilder: func(string) bool { return true }, + })) + + h.AssertEq(t, fakeLifecycle.Opts.UseCreatorWithExtensions, false) + }) + }) + + when("lifecycle is new", func() { + it("true", func() { + newLifecycleBuilder := newFakeBuilderImage(t, tmpDir, "example.com/new-lifecycle-builder:tag", defaultBuilderStackID, defaultRunImageName, "0.19.0", newLinuxImage, false) + defer newLifecycleBuilder.Cleanup() + fakeImageFetcher.LocalImages[newLifecycleBuilder.Name()] = newLifecycleBuilder + + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: newLifecycleBuilder.Name(), + TrustBuilder: func(string) bool { return true }, + })) + + h.AssertEq(t, fakeLifecycle.Opts.UseCreatorWithExtensions, true) + }) + }) + }) }) when("validating mixins", func() { @@ -2192,13 +2817,26 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, fakeDefaultRunImage.SetLabel("io.buildpacks.stack.mixins", `["mixinB"]`)) }) - it("returns an error", func() { - err := subject.Build(context.TODO(), BuildOptions{ + it("succeeds", func() { + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ Image: "some/app", Builder: defaultBuilderName, + })) + }) + + when("platform API < 0.12", func() { + it.Before(func() { + setAPIs(t, defaultBuilderImage, []string{"0.8"}, []string{"0.11"}) }) - h.AssertError(t, err, "validating stack mixins: 'default/run' missing required mixin(s): mixinA") + it("returns an error", func() { + err := subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + }) + + h.AssertError(t, err, "validating stack mixins: 'default/run' missing required mixin(s): mixinA") + }) }) }) @@ -2208,13 +2846,26 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, fakeDefaultRunImage.SetLabel("io.buildpacks.stack.mixins", "")) }) - it("returns an error", func() { - err := subject.Build(context.TODO(), BuildOptions{ + it("succeeds", func() { + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ Image: "some/app", Builder: defaultBuilderName, + })) + }) + + when("platform API < 0.12", func() { + it.Before(func() { + setAPIs(t, defaultBuilderImage, []string{"0.8"}, []string{"0.11"}) }) - h.AssertError(t, err, "validating stack mixins: buildpack 'buildpack.1.id@buildpack.1.version' requires missing mixin(s): build:mixinY, mixinX, run:mixinZ") + it("returns an error", func() { + err := subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + }) + + h.AssertError(t, err, "validating stack mixins: buildpack 'buildpack.1.id@buildpack.1.version' requires missing mixin(s): build:mixinY, mixinX, run:mixinZ") + }) }) }) }) @@ -2272,7 +2923,11 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { Volumes: []string{":::"}, }, }) - h.AssertError(t, err, `platform volume ":::" has invalid format: invalid volume specification: ':::'`) + if runtime.GOOS == "darwin" { + h.AssertError(t, err, `platform volume ":::" has invalid format: invalid spec: :::: empty section between colons`) + } else { + h.AssertError(t, err, `platform volume ":::" has invalid format: invalid volume specification: ':::'`) + } }) }) @@ -2284,6 +2939,8 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { "/cnb/nested", "/layers", "/layers/nested", + "/workspace", + "/workspace/bindings", } { p := p it(fmt.Sprintf("warns when mounting to '%s'", p), func() { @@ -2308,7 +2965,7 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { }) when("linux container", func() { it("drive is transformed", func() { - dir, _ := ioutil.TempDir("", "pack-test-mount") + dir, _ := os.MkdirTemp("", "pack-test-mount") volume := fmt.Sprintf("%v:/x", dir) err := subject.Build(context.TODO(), BuildOptions{ Image: "some/app", @@ -2356,7 +3013,7 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { when("mounting onto cnb spec'd dir", func() { for _, p := range []string{ - `/cnb`, `/cnb/buildpacks`, `/layers`, + `/cnb`, `/cnb/buildpacks`, `/layers`, `/workspace`, } { p := p it(fmt.Sprintf("warns when mounting to '%s'", p), func() { @@ -2377,7 +3034,7 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { }) when("windows container", func() { it("drive is mounted", func() { - dir, _ := ioutil.TempDir("", "pack-test-mount") + dir, _ := os.MkdirTemp("", "pack-test-mount") volume := fmt.Sprintf("%v:c:\\x", dir) err := subject.Build(context.TODO(), BuildOptions{ Image: "some/app", @@ -2426,7 +3083,7 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { when("mounting onto cnb spec'd dir", func() { for _, p := range []string{ - `c:\cnb`, `c:\cnb\buildpacks`, `c:\layers`, + `c:\cnb`, `c:\cnb\buildpacks`, `c:\layers`, `c:\workspace`, } { p := p it(fmt.Sprintf("warns when mounting to '%s'", p), func() { @@ -2507,9 +3164,301 @@ func testBuild(t *testing.T, when spec.G, it spec.S) { h.AssertEq(t, fakeLifecycle.Opts.SBOMDestinationDir, "some-destination-dir") }) }) + + when("report destination dir option", func() { + it("passthroughs to lifecycle", func() { + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ + Builder: defaultBuilderName, + Image: "example.com/some/repo:tag", + ReportDestinationDir: "a-destination-dir", + })) + h.AssertEq(t, fakeLifecycle.Opts.ReportDestinationDir, "a-destination-dir") + }) + }) + + when("there are extensions", func() { + withExtensionsLabel = true + + when("default configuration", func() { + it("succeeds", func() { + err := subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + }) + + h.AssertNil(t, err) + h.AssertEq(t, fakeLifecycle.Opts.BuilderImage, defaultBuilderName) + }) + }) + + when("os", func() { + when("windows", func() { + it.Before(func() { + h.SkipIf(t, runtime.GOOS != "windows", "Skipped on non-windows") + }) + + it("errors", func() { + err := subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultWindowsBuilderName, + }) + + h.AssertNotNil(t, err) + }) + }) + + when("linux", func() { + it("succeeds", func() { + err := subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + }) + + h.AssertNil(t, err) + h.AssertEq(t, fakeLifecycle.Opts.BuilderImage, defaultBuilderName) + }) + }) + }) + + when("pull policy", func() { + when("always", func() { + it("succeeds", func() { + err := subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + PullPolicy: image.PullAlways, + }) + + h.AssertNil(t, err) + h.AssertEq(t, fakeLifecycle.Opts.BuilderImage, defaultBuilderName) + }) + }) + + when("other", func() { + it("errors", func() { + err := subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: defaultBuilderName, + PullPolicy: image.PullNever, + }) + + h.AssertNotNil(t, err) + }) + }) + }) + }) + + when("export to OCI layout", func() { + var ( + inputImageReference, inputPreviousImageReference InputImageReference + layoutConfig *LayoutConfig + hostImagePath, hostPreviousImagePath, hostRunImagePath string + ) + + it.Before(func() { + h.SkipIf(t, runtime.GOOS == "windows", "skip on windows") + + remoteRunImage := fakes.NewImage("default/run", "", nil) + h.AssertNil(t, remoteRunImage.SetLabel("io.buildpacks.stack.id", defaultBuilderStackID)) + h.AssertNil(t, remoteRunImage.SetLabel("io.buildpacks.stack.mixins", `["mixinA", "mixinX", "run:mixinZ"]`)) + fakeImageFetcher.RemoteImages[remoteRunImage.Name()] = remoteRunImage + + hostImagePath = filepath.Join(tmpDir, "my-app") + inputImageReference = ParseInputImageReference(fmt.Sprintf("oci:%s", hostImagePath)) + layoutConfig = &LayoutConfig{ + InputImage: inputImageReference, + LayoutRepoDir: filepath.Join(tmpDir, "local-repo"), + } + }) + + when("previous image is not provided", func() { + when("sparse is false", func() { + it("saves run-image locally in oci layout and mount volumes", func() { + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ + Image: inputImageReference.Name(), + Builder: defaultBuilderName, + LayoutConfig: layoutConfig, + })) + + args := fakeImageFetcher.FetchCalls["default/run"] + h.AssertEq(t, args.LayoutOption.Sparse, false) + h.AssertContains(t, args.LayoutOption.Path, layoutConfig.LayoutRepoDir) + + h.AssertEq(t, fakeLifecycle.Opts.Layout, true) + // verify the host path are mounted as volumes + h.AssertSliceContainsMatch(t, fakeLifecycle.Opts.Volumes, hostImagePath, hostRunImagePath) + }) + }) + + when("sparse is true", func() { + it.Before(func() { + layoutConfig.Sparse = true + }) + + it("saves run-image locally (no layers) in oci layout and mount volumes", func() { + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ + Image: inputImageReference.Name(), + Builder: defaultBuilderName, + LayoutConfig: layoutConfig, + })) + + args := fakeImageFetcher.FetchCalls["default/run"] + h.AssertEq(t, args.LayoutOption.Sparse, true) + h.AssertContains(t, args.LayoutOption.Path, layoutConfig.LayoutRepoDir) + + h.AssertEq(t, fakeLifecycle.Opts.Layout, true) + // verify the host path are mounted as volumes + h.AssertSliceContainsMatch(t, fakeLifecycle.Opts.Volumes, hostImagePath, hostRunImagePath) + }) + }) + }) + + when("previous image is provided", func() { + it.Before(func() { + hostPreviousImagePath = filepath.Join(tmpDir, "my-previous-app") + inputPreviousImageReference = ParseInputImageReference(fmt.Sprintf("oci:%s", hostPreviousImagePath)) + layoutConfig.PreviousInputImage = inputPreviousImageReference + }) + + it("mount previous image volume", func() { + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ + Image: inputImageReference.Name(), + PreviousImage: inputPreviousImageReference.Name(), + Builder: defaultBuilderName, + LayoutConfig: layoutConfig, + })) + + h.AssertEq(t, fakeLifecycle.Opts.Layout, true) + // verify the host path are mounted as volumes + h.AssertSliceContainsMatch(t, fakeLifecycle.Opts.Volumes, hostImagePath, hostPreviousImagePath, hostRunImagePath) + }) + }) + }) + + when("there are system buildpacks", func() { + assertSystemEquals := func(content string) { + t.Helper() + + systemLayer, err := builderImageWithSystem.FindLayerWithPath("/cnb/system.toml") + h.AssertNil(t, err) + h.AssertOnTarEntry(t, systemLayer, "/cnb/system.toml", h.ContentEquals(content)) + } + + it("uses the system buildpacks defined in the builder", func() { + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: builderImageWithSystemName, + })) + h.AssertEq(t, fakeLifecycle.Opts.Builder.Name(), builderImageWithSystem.Name()) + h.AssertTrue(t, len(fakeLifecycle.Opts.Builder.System().Pre.Buildpacks) == 1) + h.AssertTrue(t, len(fakeLifecycle.Opts.Builder.System().Post.Buildpacks) == 1) + assertSystemEquals(`[system] + [system.pre] + + [[system.pre.buildpacks]] + id = "buildpack.1.id" + version = "buildpack.1.version" + [system.post] + + [[system.post.buildpacks]] + id = "buildpack.2.id" + version = "buildpack.2.version" +`) + }) + + it("removes system buildpacks from builder when --disable-system-buildpacks", func() { + h.AssertNil(t, subject.Build(context.TODO(), BuildOptions{ + Image: "some/app", + Builder: builderImageWithSystemName, + DisableSystemBuildpacks: true, + })) + h.AssertEq(t, fakeLifecycle.Opts.Builder.Name(), builderImageWithSystem.Name()) + h.AssertTrue(t, len(fakeLifecycle.Opts.Builder.System().Pre.Buildpacks) == 0) + h.AssertTrue(t, len(fakeLifecycle.Opts.Builder.System().Post.Buildpacks) == 0) + }) + }) }) } +func makeFakePackage(t *testing.T, tmpDir string, stackID string) *fakes.Image { + metaBuildpackTar := ifakes.CreateBuildpackTar(t, tmpDir, dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ + ID: "meta.buildpack.id", + Version: "meta.buildpack.version", + Homepage: "http://meta.buildpack", + }, + WithStacks: nil, + WithOrder: dist.Order{{ + Group: []dist.ModuleRef{{ + ModuleInfo: dist.ModuleInfo{ + ID: "child.buildpack.id", + Version: "child.buildpack.version", + }, + Optional: false, + }}, + }}, + }) + + childBuildpackTar := ifakes.CreateBuildpackTar(t, tmpDir, dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ + ID: "child.buildpack.id", + Version: "child.buildpack.version", + Homepage: "http://child.buildpack", + }, + WithStacks: []dist.Stack{ + {ID: stackID}, + }, + }) + + bpLayers := dist.ModuleLayers{ + "meta.buildpack.id": { + "meta.buildpack.version": { + API: api.MustParse("0.3"), + Order: dist.Order{{ + Group: []dist.ModuleRef{{ + ModuleInfo: dist.ModuleInfo{ + ID: "child.buildpack.id", + Version: "child.buildpack.version", + }, + Optional: false, + }}, + }}, + LayerDiffID: diffIDForFile(t, metaBuildpackTar), + }, + }, + "child.buildpack.id": { + "child.buildpack.version": { + API: api.MustParse("0.3"), + Stacks: []dist.Stack{ + {ID: stackID}, + }, + LayerDiffID: diffIDForFile(t, childBuildpackTar), + }, + }, + } + + md := buildpack.Metadata{ + ModuleInfo: dist.ModuleInfo{ + ID: "meta.buildpack.id", + Version: "meta.buildpack.version", + }, + Stacks: []dist.Stack{ + {ID: stackID}, + }, + } + + fakePackage := fakes.NewImage("example.com/some/package", "", nil) + h.AssertNil(t, dist.SetLabel(fakePackage, "io.buildpacks.buildpack.layers", bpLayers)) + h.AssertNil(t, dist.SetLabel(fakePackage, "io.buildpacks.buildpackage.metadata", md)) + + h.AssertNil(t, fakePackage.AddLayer(metaBuildpackTar)) + h.AssertNil(t, fakePackage.AddLayer(childBuildpackTar)) + + return fakePackage +} + func diffIDForFile(t *testing.T, path string) string { file, err := os.Open(path) h.AssertNil(t, err) @@ -2535,7 +3484,38 @@ func newWindowsImage(name, topLayerSha string, identifier imgutil.Identifier) *f return result } -func newFakeBuilderImage(t *testing.T, tmpDir, builderName, defaultBuilderStackID, runImageName, lifecycleVersion string, osImageCreator ifakes.FakeImageCreator) *fakes.Image { +func newFakeBuilderImage(t *testing.T, tmpDir, builderName, defaultBuilderStackID, runImageName, lifecycleVersion string, osImageCreator ifakes.FakeImageCreator, withSystem bool) *fakes.Image { + var supportedBuildpackAPIs builder.APISet + for _, v := range api.Buildpack.Supported { + supportedBuildpackAPIs = append(supportedBuildpackAPIs, v) + } + var supportedPlatformAPIs builder.APISet + for _, v := range api.Platform.Supported { + supportedPlatformAPIs = append(supportedPlatformAPIs, v) + } + + system := dist.System{} + if withSystem { + system.Pre.Buildpacks = append(system.Pre.Buildpacks, []dist.ModuleRef{ + { + Optional: false, + ModuleInfo: dist.ModuleInfo{ + ID: "buildpack.1.id", + Version: "buildpack.1.version", + }, + }, + }...) + system.Post.Buildpacks = append(system.Post.Buildpacks, []dist.ModuleRef{ + { + Optional: false, + ModuleInfo: dist.ModuleInfo{ + ID: "buildpack.2.id", + Version: "buildpack.2.version", + }, + }, + }...) + } + return ifakes.NewFakeBuilderImage(t, tmpDir, builderName, @@ -2543,10 +3523,14 @@ func newFakeBuilderImage(t *testing.T, tmpDir, builderName, defaultBuilderStackI "1234", "5678", builder.Metadata{ - Buildpacks: []dist.BuildpackInfo{ + Buildpacks: []dist.ModuleInfo{ {ID: "buildpack.1.id", Version: "buildpack.1.version"}, {ID: "buildpack.2.id", Version: "buildpack.2.version"}, }, + Extensions: []dist.ModuleInfo{ + {ID: "extension.1.id", Version: "extension.1.version"}, + {ID: "extension.2.id", Version: "extension.2.version"}, + }, Stack: builder.StackMetadata{ RunImage: builder.RunImageMetadata{ Image: runImageName, @@ -2564,15 +3548,15 @@ func newFakeBuilderImage(t *testing.T, tmpDir, builderName, defaultBuilderStackI }, APIs: builder.LifecycleAPIs{ Buildpack: builder.APIVersions{ - Supported: builder.APISet{api.MustParse("0.2"), api.MustParse("0.3"), api.MustParse("0.4")}, + Supported: supportedBuildpackAPIs, }, Platform: builder.APIVersions{ - Supported: builder.APISet{api.MustParse("0.3"), api.MustParse("0.4")}, + Supported: supportedPlatformAPIs, }, }, }, }, - dist.BuildpackLayers{ + dist.ModuleLayers{ "buildpack.1.id": { "buildpack.1.version": { API: api.MustParse("0.3"), @@ -2597,29 +3581,75 @@ func newFakeBuilderImage(t *testing.T, tmpDir, builderName, defaultBuilderStackI }, }, dist.Order{{ - Group: []dist.BuildpackRef{{ - BuildpackInfo: dist.BuildpackInfo{ + Group: []dist.ModuleRef{{ + ModuleInfo: dist.ModuleInfo{ ID: "buildpack.1.id", Version: "buildpack.1.version", }, }}, }, { - Group: []dist.BuildpackRef{{ - BuildpackInfo: dist.BuildpackInfo{ + Group: []dist.ModuleRef{{ + ModuleInfo: dist.ModuleInfo{ ID: "buildpack.2.id", Version: "buildpack.2.version", }, }}, }}, + dist.ModuleLayers{ + "extension.1.id": { + "extension.1.version": { + API: api.MustParse("0.3"), + }, + }, + "extension.2.id": { + "extension.2.version": { + API: api.MustParse("0.3"), + }, + }, + }, + dist.Order{{ + Group: []dist.ModuleRef{{ + ModuleInfo: dist.ModuleInfo{ + ID: "extension.1.id", + Version: "extension.1.version", + }, + }}, + }, { + Group: []dist.ModuleRef{{ + ModuleInfo: dist.ModuleInfo{ + ID: "extension.2.id", + Version: "extension.2.version", + }, + }}, + }}, + system, osImageCreator, ) } -type executeFailsLifecycle struct { - Opts build.LifecycleOptions -} - -func (f *executeFailsLifecycle) Execute(_ context.Context, opts build.LifecycleOptions) error { - f.Opts = opts - return errors.New("") +func setAPIs(t *testing.T, image *fakes.Image, buildpackAPIs []string, platformAPIs []string) { + builderMDLabelName := "io.buildpacks.builder.metadata" + var supportedBuildpackAPIs builder.APISet + for _, v := range buildpackAPIs { + supportedBuildpackAPIs = append(supportedBuildpackAPIs, api.MustParse(v)) + } + var supportedPlatformAPIs builder.APISet + for _, v := range platformAPIs { + supportedPlatformAPIs = append(supportedPlatformAPIs, api.MustParse(v)) + } + builderMDLabel, err := image.Label(builderMDLabelName) + h.AssertNil(t, err) + var builderMD builder.Metadata + h.AssertNil(t, json.Unmarshal([]byte(builderMDLabel), &builderMD)) + builderMD.Lifecycle.APIs = builder.LifecycleAPIs{ + Buildpack: builder.APIVersions{ + Supported: supportedBuildpackAPIs, + }, + Platform: builder.APIVersions{ + Supported: supportedPlatformAPIs, + }, + } + builderMDLabelBytes, err := json.Marshal(&builderMD) + h.AssertNil(t, err) + h.AssertNil(t, image.SetLabel(builderMDLabelName, string(builderMDLabelBytes))) } diff --git a/pkg/client/client.go b/pkg/client/client.go index b3f076d624..97f5bb8fd4 100644 --- a/pkg/client/client.go +++ b/pkg/client/client.go @@ -1,11 +1,11 @@ /* -Package client provides all the functionally provided by pack as a library through a go api. +Package client provides all the functionality provided by pack as a library through a go api. -Prerequisites +# Prerequisites In order to use most functionality, you will need an OCI runtime such as Docker or podman installed. -References +# References This package provides functionality to create and manipulate all artifacts outlined in the Cloud Native Buildpacks specification. An introduction to these artifacts and their usage can be found at https://buildpacks.io/docs/. @@ -22,25 +22,36 @@ import ( "github.com/buildpacks/imgutil" "github.com/buildpacks/imgutil/local" "github.com/buildpacks/imgutil/remote" - dockerClient "github.com/docker/docker/client" "github.com/google/go-containerregistry/pkg/authn" + dockerClient "github.com/moby/moby/client" "github.com/pkg/errors" - "github.com/buildpacks/pack" "github.com/buildpacks/pack/internal/build" iconfig "github.com/buildpacks/pack/internal/config" "github.com/buildpacks/pack/internal/style" "github.com/buildpacks/pack/pkg/blob" "github.com/buildpacks/pack/pkg/buildpack" + "github.com/buildpacks/pack/pkg/dist" "github.com/buildpacks/pack/pkg/image" + "github.com/buildpacks/pack/pkg/index" "github.com/buildpacks/pack/pkg/logging" ) -//go:generate mockgen -package testmocks -destination ../testmocks/mock_docker_client.go github.com/docker/docker/client CommonAPIClient +const ( + // Env variable to set the root folder for manifest list local storage + xdgRuntimePath = "XDG_RUNTIME_DIR" +) + +var ( + // Version is the version of `pack`. It is injected at compile time. + Version = "0.0.0" +) + +//go:generate mockgen -package testmocks -destination ../testmocks/mock_docker_client.go github.com/moby/moby/client APIClient //go:generate mockgen -package testmocks -destination ../testmocks/mock_image_fetcher.go github.com/buildpacks/pack/pkg/client ImageFetcher -// ImageFetcher is an interface representing the ability to fetch local and images. +// ImageFetcher is an interface representing the ability to fetch local and remote images. type ImageFetcher interface { // Fetch fetches an image by resolving it both remotely and locally depending on provided parameters. // The pull behavior is dictated by the pullPolicy, which can have the following behavior @@ -52,6 +63,18 @@ type ImageFetcher interface { // PullIfNotPresent and daemon = false, gives us the same behavior as PullAlways. // There is a single invalid configuration, PullNever and daemon = false, this will always fail. Fetch(ctx context.Context, name string, options image.FetchOptions) (imgutil.Image, error) + + // CheckReadAccess verifies if an image is accessible with read permissions + // When FetchOptions.Daemon is true and the image doesn't exist in the daemon, + // the behavior is dictated by the pull policy, which can have the following behavior + // - PullNever: returns false + // - PullAlways Or PullIfNotPresent: it will check read access for the remote image. + // When FetchOptions.Daemon is false it will check read access for the remote image. + CheckReadAccess(repo string, options image.FetchOptions) bool + + // FetchForPlatform fetches an image and resolves it to a platform-specific digest before fetching. + // This ensures that multi-platform images are always resolved to the correct platform-specific manifest. + FetchForPlatform(ctx context.Context, name string, options image.FetchOptions) (imgutil.Image, error) } //go:generate mockgen -package testmocks -destination ../testmocks/mock_blob_downloader.go github.com/buildpacks/pack/pkg/client BlobDownloader @@ -69,7 +92,23 @@ type BlobDownloader interface { type ImageFactory interface { // NewImage initializes an image object with required settings so that it // can be written either locally or to a registry. - NewImage(repoName string, local bool, imageOS string) (imgutil.Image, error) + NewImage(repoName string, local bool, target dist.Target) (imgutil.Image, error) +} + +//go:generate mockgen -package testmocks -destination ../testmocks/mock_index_factory.go github.com/buildpacks/pack/pkg/client IndexFactory + +// IndexFactory is an interface representing the ability to create a ImageIndex/ManifestList. +type IndexFactory interface { + // Exists return true if the given index exits in the local storage + Exists(repoName string) bool + // CreateIndex creates ManifestList locally + CreateIndex(repoName string, opts ...imgutil.IndexOption) (imgutil.ImageIndex, error) + // LoadIndex loads ManifestList from local storage with the given name + LoadIndex(reponame string, opts ...imgutil.IndexOption) (imgutil.ImageIndex, error) + // FetchIndex fetches ManifestList from Registry with the given name + FetchIndex(name string, opts ...imgutil.IndexOption) (imgutil.ImageIndex, error) + // FindIndex will find Index locally then on remote + FindIndex(name string, opts ...imgutil.IndexOption) (imgutil.ImageIndex, error) } //go:generate mockgen -package testmocks -destination ../testmocks/mock_buildpack_downloader.go github.com/buildpacks/pack/pkg/client BuildpackDownloader @@ -77,7 +116,7 @@ type ImageFactory interface { // BuildpackDownloader is an interface for downloading and extracting buildpacks from various sources type BuildpackDownloader interface { // Download parses a buildpack URI and downloads the buildpack and any dependencies buildpacks from the appropriate source - Download(ctx context.Context, buildpackURI string, opts buildpack.DownloadOptions) (buildpack.Buildpack, []buildpack.Buildpack, error) + Download(ctx context.Context, buildpackURI string, opts buildpack.DownloadOptions) (buildpack.BuildModule, []buildpack.BuildModule, error) } // Client is an orchestration object, it contains all parameters needed to @@ -85,11 +124,12 @@ type BuildpackDownloader interface { // All settings on this object should be changed through ClientOption functions. type Client struct { logger logging.Logger - docker dockerClient.CommonAPIClient + docker DockerClient keychain authn.Keychain imageFactory ImageFactory imageFetcher ImageFetcher + indexFactory IndexFactory downloader BlobDownloader lifecycleExecutor LifecycleExecutor buildpackDownloader BuildpackDownloader @@ -99,6 +139,47 @@ type Client struct { version string } +func (c *Client) processSystem(system dist.System, buildpacks []buildpack.BuildModule, disableSystem bool) (dist.System, error) { + if disableSystem { + return dist.System{}, nil + } + + if len(buildpacks) == 0 { + return system, nil + } + + resolved := dist.System{} + + // Create a map of available buildpacks for faster lookup + availableBPs := make(map[string]bool) + for _, bp := range buildpacks { + bpInfo := bp.Descriptor().Info() + availableBPs[bpInfo.ID+"@"+bpInfo.Version] = true + } + + // Process pre-buildpacks + for _, preBp := range system.Pre.Buildpacks { + key := preBp.ID + "@" + preBp.Version + if availableBPs[key] { + resolved.Pre.Buildpacks = append(resolved.Pre.Buildpacks, preBp) + } else if !preBp.Optional { + return dist.System{}, errors.Errorf("required system buildpack %s@%s is not available", preBp.ID, preBp.Version) + } + } + + // Process post-buildpacks + for _, postBp := range system.Post.Buildpacks { + key := postBp.ID + "@" + postBp.Version + if availableBPs[key] { + resolved.Post.Buildpacks = append(resolved.Post.Buildpacks, postBp) + } else if !postBp.Optional { + return dist.System{}, errors.Errorf("required system buildpack %s@%s is not available", postBp.ID, postBp.Version) + } + } + + return resolved, nil +} + // Option is a type of function that mutate settings on the client. // Values in these functions are set through currying. type Option func(c *Client) @@ -117,6 +198,13 @@ func WithImageFactory(f ImageFactory) Option { } } +// WithIndexFactory supply your own index factory +func WithIndexFactory(f IndexFactory) Option { + return func(c *Client) { + c.indexFactory = f + } +} + // WithFetcher supply your own Fetcher. // A Fetcher retrieves both local and remote images to make them available. func WithFetcher(f ImageFetcher) Option { @@ -151,7 +239,7 @@ func WithCacheDir(path string) Option { } // WithDockerClient supply your own docker client. -func WithDockerClient(docker dockerClient.CommonAPIClient) Option { +func WithDockerClient(docker DockerClient) Option { return func(c *Client) { c.docker = docker } @@ -183,7 +271,7 @@ const DockerAPIVersion = "1.38" // NewClient allocates and returns a Client configured with the specified options. func NewClient(opts ...Option) (*Client, error) { client := &Client{ - version: pack.Version, + version: Version, keychain: authn.DefaultKeychain, } @@ -197,9 +285,8 @@ func NewClient(opts ...Option) (*Client, error) { if client.docker == nil { var err error - client.docker, err = dockerClient.NewClientWithOpts( + client.docker, err = dockerClient.New( dockerClient.FromEnv, - dockerClient.WithVersion(DockerAPIVersion), ) if err != nil { return nil, errors.Wrap(err, "creating docker client") @@ -225,6 +312,18 @@ func NewClient(opts ...Option) (*Client, error) { } } + if client.indexFactory == nil { + packHome, err := iconfig.PackHome() + if err != nil { + return nil, errors.Wrap(err, "getting pack home") + } + indexRootStoragePath := filepath.Join(packHome, "manifests") + if xdgPath, ok := os.LookupEnv(xdgRuntimePath); ok { + indexRootStoragePath = xdgPath + } + client.indexFactory = index.NewIndexFactory(client.keychain, indexRootStoragePath) + } + if client.buildpackDownloader == nil { client.buildpackDownloader = buildpack.NewDownloader( client.logger, @@ -260,12 +359,18 @@ func (r *registryResolver) Resolve(registryName, bpName string) (string, error) } type imageFactory struct { - dockerClient dockerClient.CommonAPIClient + dockerClient local.DockerClient keychain authn.Keychain } -func (f *imageFactory) NewImage(repoName string, daemon bool, imageOS string) (imgutil.Image, error) { - platform := imgutil.Platform{OS: imageOS} +func (f *imageFactory) NewImage(repoName string, daemon bool, target dist.Target) (imgutil.Image, error) { + platform := imgutil.Platform{OS: target.OS, Architecture: target.Arch, Variant: target.ArchVariant} + + if len(target.Distributions) > 0 { + // We need to set platform distribution information so that it will be reflected in the image config. + // We assume the given target's distributions were already expanded, we should be dealing with just 1 distribution name and version. + platform.OSVersion = target.Distributions[0].Version + } if daemon { return local.NewImage(repoName, f.dockerClient, local.WithDefaultPlatform(platform)) diff --git a/pkg/client/client_test.go b/pkg/client/client_test.go index 0b40ec0a8f..4d3b476dc4 100644 --- a/pkg/client/client_test.go +++ b/pkg/client/client_test.go @@ -2,14 +2,19 @@ package client import ( "bytes" + "io" "os" "testing" - dockerClient "github.com/docker/docker/client" + "github.com/buildpacks/lifecycle/api" + "github.com/golang/mock/gomock" + dockerClient "github.com/moby/moby/client" "github.com/sclevine/spec" "github.com/sclevine/spec/report" + "github.com/buildpacks/pack/pkg/buildpack" + "github.com/buildpacks/pack/pkg/dist" "github.com/buildpacks/pack/pkg/logging" "github.com/buildpacks/pack/pkg/testmocks" h "github.com/buildpacks/pack/testhelpers" @@ -87,7 +92,7 @@ func testClient(t *testing.T, when spec.G, it spec.S) { when("#WithDockerClient", func() { it("uses docker client provided", func() { - docker, err := dockerClient.NewClientWithOpts( + docker, err := dockerClient.New( dockerClient.FromEnv, ) h.AssertNil(t, err) @@ -122,4 +127,247 @@ func testClient(t *testing.T, when spec.G, it spec.S) { h.AssertEq(t, cl.registryMirrors, registryMirrors) }) }) + + when("#processSystem", func() { + var ( + subject *Client + mockController *gomock.Controller + availableBPs []buildpack.BuildModule + systemBuildpacks dist.System + ) + + it.Before(func() { + mockController = gomock.NewController(t) + subject = &Client{} + + // Create mock buildpack modules + availableBPs = []buildpack.BuildModule{ + &mockBuildModule{id: "example/pre-bp", version: "1.0.0"}, + &mockBuildModule{id: "example/post-bp", version: "2.0.0"}, + &mockBuildModule{id: "example/optional-bp", version: "3.0.0"}, + } + }) + + it.After(func() { + mockController.Finish() + }) + + when("disableSystem is true", func() { + it("returns empty system", func() { + systemBuildpacks = dist.System{ + Pre: dist.SystemBuildpacks{ + Buildpacks: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "example/pre-bp", Version: "1.0.0"}, Optional: false}, + }, + }, + } + + result, err := subject.processSystem(systemBuildpacks, availableBPs, true) + h.AssertNil(t, err) + h.AssertEq(t, len(result.Pre.Buildpacks), 0) + h.AssertEq(t, len(result.Post.Buildpacks), 0) + }) + }) + + when("no buildpacks are available", func() { + it("returns the original system", func() { + systemBuildpacks = dist.System{ + Pre: dist.SystemBuildpacks{ + Buildpacks: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "example/pre-bp", Version: "1.0.0"}, Optional: false}, + }, + }, + } + + result, err := subject.processSystem(systemBuildpacks, []buildpack.BuildModule{}, false) + h.AssertNil(t, err) + h.AssertEq(t, result, systemBuildpacks) + }) + }) + + when("all required system buildpacks are available", func() { + it("returns resolved system with all buildpacks", func() { + systemBuildpacks = dist.System{ + Pre: dist.SystemBuildpacks{ + Buildpacks: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "example/pre-bp", Version: "1.0.0"}, Optional: false}, + }, + }, + Post: dist.SystemBuildpacks{ + Buildpacks: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "example/post-bp", Version: "2.0.0"}, Optional: false}, + }, + }, + } + + result, err := subject.processSystem(systemBuildpacks, availableBPs, false) + h.AssertNil(t, err) + h.AssertEq(t, len(result.Pre.Buildpacks), 1) + h.AssertEq(t, result.Pre.Buildpacks[0].ID, "example/pre-bp") + h.AssertEq(t, len(result.Post.Buildpacks), 1) + h.AssertEq(t, result.Post.Buildpacks[0].ID, "example/post-bp") + }) + }) + + when("required system buildpack is missing", func() { + it("returns an error for missing pre-buildpack", func() { + systemBuildpacks = dist.System{ + Pre: dist.SystemBuildpacks{ + Buildpacks: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "missing/pre-bp", Version: "1.0.0"}, Optional: false}, + }, + }, + } + + _, err := subject.processSystem(systemBuildpacks, availableBPs, false) + h.AssertError(t, err, "required system buildpack missing/pre-bp@1.0.0 is not available") + }) + + it("returns an error for missing post-buildpack", func() { + systemBuildpacks = dist.System{ + Post: dist.SystemBuildpacks{ + Buildpacks: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "missing/post-bp", Version: "1.0.0"}, Optional: false}, + }, + }, + } + + _, err := subject.processSystem(systemBuildpacks, availableBPs, false) + h.AssertError(t, err, "required system buildpack missing/post-bp@1.0.0 is not available") + }) + }) + + when("optional system buildpack is missing", func() { + it("ignores missing optional pre-buildpack", func() { + systemBuildpacks = dist.System{ + Pre: dist.SystemBuildpacks{ + Buildpacks: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "example/pre-bp", Version: "1.0.0"}, Optional: false}, + {ModuleInfo: dist.ModuleInfo{ID: "missing/optional-bp", Version: "1.0.0"}, Optional: true}, + }, + }, + } + + result, err := subject.processSystem(systemBuildpacks, availableBPs, false) + h.AssertNil(t, err) + h.AssertEq(t, len(result.Pre.Buildpacks), 1) + h.AssertEq(t, result.Pre.Buildpacks[0].ID, "example/pre-bp") + }) + + it("ignores missing optional post-buildpack", func() { + systemBuildpacks = dist.System{ + Post: dist.SystemBuildpacks{ + Buildpacks: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "example/post-bp", Version: "2.0.0"}, Optional: false}, + {ModuleInfo: dist.ModuleInfo{ID: "missing/optional-bp", Version: "1.0.0"}, Optional: true}, + }, + }, + } + + result, err := subject.processSystem(systemBuildpacks, availableBPs, false) + h.AssertNil(t, err) + h.AssertEq(t, len(result.Post.Buildpacks), 1) + h.AssertEq(t, result.Post.Buildpacks[0].ID, "example/post-bp") + }) + }) + + when("mix of available and missing buildpacks", func() { + it("includes available buildpacks and reports error for required missing ones", func() { + systemBuildpacks = dist.System{ + Pre: dist.SystemBuildpacks{ + Buildpacks: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "example/pre-bp", Version: "1.0.0"}, Optional: false}, + {ModuleInfo: dist.ModuleInfo{ID: "missing/required-bp", Version: "1.0.0"}, Optional: false}, + }, + }, + Post: dist.SystemBuildpacks{ + Buildpacks: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "example/post-bp", Version: "2.0.0"}, Optional: true}, + {ModuleInfo: dist.ModuleInfo{ID: "missing/optional-bp", Version: "1.0.0"}, Optional: true}, + }, + }, + } + + _, err := subject.processSystem(systemBuildpacks, availableBPs, false) + h.AssertError(t, err, "required system buildpack missing/required-bp@1.0.0 is not available") + }) + }) + + when("buildpack version mismatch", func() { + it("requires exact version match", func() { + systemBuildpacks = dist.System{ + Pre: dist.SystemBuildpacks{ + Buildpacks: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "example/pre-bp", Version: "2.0.0"}, Optional: false}, // wrong version + }, + }, + } + + _, err := subject.processSystem(systemBuildpacks, availableBPs, false) + h.AssertError(t, err, "required system buildpack example/pre-bp@2.0.0 is not available") + }) + }) + }) +} + +// Mock implementations for testing purpuse + +// mockDescriptor is a mock implementation of buildpack.Descriptor +type mockDescriptor struct { + info dist.ModuleInfo +} + +func (m *mockDescriptor) API() *api.Version { + return nil +} + +func (m *mockDescriptor) EnsureStackSupport(stackID string, providedMixins []string, validateRunStageMixins bool) error { + return nil +} + +func (m *mockDescriptor) EnsureTargetSupport(os, arch, distroName, distroVersion string) error { + return nil +} + +func (m *mockDescriptor) EscapedID() string { + return m.info.ID +} + +func (m *mockDescriptor) Info() dist.ModuleInfo { + return m.info +} + +func (m *mockDescriptor) Kind() string { + return buildpack.KindBuildpack +} + +func (m *mockDescriptor) Order() dist.Order { + return nil +} + +func (m *mockDescriptor) Stacks() []dist.Stack { + return nil +} + +func (m *mockDescriptor) Targets() []dist.Target { + return nil +} + +// mockBuildModule is a mock implementation of buildpack.BuildModule for testing +type mockBuildModule struct { + id string + version string +} + +func (m *mockBuildModule) Descriptor() buildpack.Descriptor { + return &mockDescriptor{ + info: dist.ModuleInfo{ + ID: m.id, + Version: m.version, + }, + } +} + +func (m *mockBuildModule) Open() (io.ReadCloser, error) { + return nil, nil } diff --git a/pkg/client/common.go b/pkg/client/common.go index b6e0e8ec29..a019d416df 100644 --- a/pkg/client/common.go +++ b/pkg/client/common.go @@ -1,24 +1,42 @@ package client import ( + "context" "errors" "fmt" + "github.com/buildpacks/imgutil" "github.com/google/go-containerregistry/pkg/name" "github.com/buildpacks/pack/internal/builder" "github.com/buildpacks/pack/internal/config" "github.com/buildpacks/pack/internal/registry" "github.com/buildpacks/pack/internal/style" + "github.com/buildpacks/pack/pkg/image" "github.com/buildpacks/pack/pkg/logging" ) +func (c *Client) addManifestToIndex(ctx context.Context, repoName string, index imgutil.ImageIndex) error { + imageRef, err := name.ParseReference(repoName, name.WeakValidation) + if err != nil { + return fmt.Errorf("'%s' is not a valid manifest reference: %s", style.Symbol(repoName), err) + } + + imageToAdd, err := c.imageFetcher.Fetch(ctx, imageRef.Name(), image.FetchOptions{Daemon: false}) + if err != nil { + return err + } + + index.AddManifest(imageToAdd.UnderlyingImage()) + return nil +} + func (c *Client) parseTagReference(imageName string) (name.Reference, error) { if imageName == "" { return nil, errors.New("image is a required parameter") } if _, err := name.ParseReference(imageName, name.WeakValidation); err != nil { - return nil, err + return nil, fmt.Errorf("'%s' is not a valid tag reference: %s", imageName, err) } ref, err := name.NewTag(imageName, name.WeakValidation) if err != nil { @@ -28,7 +46,7 @@ func (c *Client) parseTagReference(imageName string) (name.Reference, error) { return ref, nil } -func (c *Client) resolveRunImage(runImage, imgRegistry, bldrRegistry string, stackInfo builder.StackMetadata, additionalMirrors map[string][]string, publish bool) string { +func (c *Client) resolveRunImage(runImage, imgRegistry, bldrRegistry string, runImageMetadata builder.RunImageMetadata, additionalMirrors map[string][]string, publish bool, options image.FetchOptions) string { if runImage != "" { c.logger.Debugf("Using provided run-image %s", style.Symbol(runImage)) return runImage @@ -41,15 +59,17 @@ func (c *Client) resolveRunImage(runImage, imgRegistry, bldrRegistry string, sta runImageName := getBestRunMirror( preferredRegistry, - stackInfo.RunImage.Image, - stackInfo.RunImage.Mirrors, - additionalMirrors[stackInfo.RunImage.Image], + runImageMetadata.Image, + runImageMetadata.Mirrors, + additionalMirrors[runImageMetadata.Image], + c.imageFetcher, + options, ) switch { - case runImageName == stackInfo.RunImage.Image: + case runImageName == runImageMetadata.Image: c.logger.Debugf("Selected run image %s", style.Symbol(runImageName)) - case contains(stackInfo.RunImage.Mirrors, runImageName): + case contains(runImageMetadata.Mirrors, runImageName): c.logger.Debugf("Selected run image mirror %s", style.Symbol(runImageName)) default: c.logger.Debugf("Selected run image mirror %s from local config", style.Symbol(runImageName)) @@ -107,25 +127,33 @@ func contains(slc []string, v string) bool { return false } -func getBestRunMirror(registry string, runImage string, mirrors []string, preferredMirrors []string) string { - var runImageList []string - runImageList = append(runImageList, preferredMirrors...) - runImageList = append(runImageList, runImage) - runImageList = append(runImageList, mirrors...) - +func getBestRunMirror(registry string, runImage string, mirrors []string, preferredMirrors []string, fetcher ImageFetcher, options image.FetchOptions) string { + runImageList := filterImageList(append(append(append([]string{}, preferredMirrors...), runImage), mirrors...), fetcher, options) for _, img := range runImageList { ref, err := name.ParseReference(img, name.WeakValidation) if err != nil { continue } - if ref.Context().RegistryStr() == registry { + if reg := ref.Context().RegistryStr(); reg == registry { return img } } - if len(preferredMirrors) > 0 { - return preferredMirrors[0] + if len(runImageList) > 0 { + return runImageList[0] } return runImage } + +func filterImageList(imageList []string, fetcher ImageFetcher, options image.FetchOptions) []string { + var accessibleImages []string + + for i, img := range imageList { + if fetcher.CheckReadAccess(img, options) { + accessibleImages = append(accessibleImages, imageList[i]) + } + } + + return accessibleImages +} diff --git a/pkg/client/common_test.go b/pkg/client/common_test.go index 732c324b73..65376c87e4 100644 --- a/pkg/client/common_test.go +++ b/pkg/client/common_test.go @@ -4,12 +4,17 @@ import ( "bytes" "testing" + "github.com/buildpacks/lifecycle/auth" + "github.com/golang/mock/gomock" + "github.com/google/go-containerregistry/pkg/authn" "github.com/heroku/color" "github.com/sclevine/spec" "github.com/sclevine/spec/report" "github.com/buildpacks/pack/internal/builder" + "github.com/buildpacks/pack/pkg/image" "github.com/buildpacks/pack/pkg/logging" + "github.com/buildpacks/pack/pkg/testmocks" h "github.com/buildpacks/pack/testhelpers" ) @@ -25,6 +30,7 @@ func testCommon(t *testing.T, when spec.G, it spec.S) { subject *Client outBuf bytes.Buffer logger logging.Logger + keychain authn.Keychain runImageName string defaultRegistry string defaultMirror string @@ -32,13 +38,17 @@ func testCommon(t *testing.T, when spec.G, it spec.S) { gcrRunMirror string stackInfo builder.StackMetadata assert = h.NewAssertionManager(t) + publish bool + err error ) it.Before(func() { logger = logging.NewLogWithWriters(&outBuf, &outBuf) - var err error - subject, err = NewClient(WithLogger(logger)) + keychain, err = auth.DefaultKeychain("pack-test/dummy") + h.AssertNil(t, err) + + subject, err = NewClient(WithLogger(logger), WithKeychain(keychain)) assert.Nil(err) defaultRegistry = "default.registry.io" @@ -57,16 +67,29 @@ func testCommon(t *testing.T, when spec.G, it spec.S) { }) when("passed specific run image", func() { + it.Before(func() { + publish = false + }) + it("selects that run image", func() { runImgFlag := "flag/passed-run-image" - runImageName := subject.resolveRunImage(runImgFlag, defaultRegistry, "", stackInfo, nil, false) + runImageName = subject.resolveRunImage(runImgFlag, defaultRegistry, "", stackInfo.RunImage, nil, publish, image.FetchOptions{Daemon: !publish, PullPolicy: image.PullAlways}) assert.Equal(runImageName, runImgFlag) }) }) - when("publish is true", func() { + when("desirable run-image are accessible", func() { + it.Before(func() { + publish = true + mockController := gomock.NewController(t) + mockFetcher := testmocks.NewMockImageFetcher(mockController) + mockFetcher.EXPECT().CheckReadAccess(gomock.Any(), gomock.Any()).Return(true).AnyTimes() + subject, err = NewClient(WithLogger(logger), WithKeychain(keychain), WithFetcher(mockFetcher)) + h.AssertNil(t, err) + }) + it("defaults to run-image in registry publishing to", func() { - runImageName := subject.resolveRunImage("", gcrRegistry, defaultRegistry, stackInfo, nil, true) + runImageName = subject.resolveRunImage("", gcrRegistry, defaultRegistry, stackInfo.RunImage, nil, publish, image.FetchOptions{}) assert.Equal(runImageName, gcrRunMirror) }) @@ -74,7 +97,7 @@ func testCommon(t *testing.T, when spec.G, it spec.S) { configMirrors := map[string][]string{ runImageName: {defaultRegistry + "/unique-run-img"}, } - runImageName := subject.resolveRunImage("", defaultRegistry, "", stackInfo, configMirrors, true) + runImageName = subject.resolveRunImage("", defaultRegistry, "", stackInfo.RunImage, configMirrors, publish, image.FetchOptions{}) assert.NotEqual(runImageName, defaultMirror) assert.Equal(runImageName, defaultRegistry+"/unique-run-img") }) @@ -83,36 +106,46 @@ func testCommon(t *testing.T, when spec.G, it spec.S) { configMirrors := map[string][]string{ runImageName: {defaultRegistry + "/unique-run-img"}, } - runImageName := subject.resolveRunImage("", "test.registry.io", "", stackInfo, configMirrors, true) + runImageName = subject.resolveRunImage("", "test.registry.io", "", stackInfo.RunImage, configMirrors, publish, image.FetchOptions{}) assert.NotEqual(runImageName, defaultMirror) assert.Equal(runImageName, defaultRegistry+"/unique-run-img") }) }) - // If publish is false, we are using the local daemon, and want to match to the builder registry - when("publish is false", func() { - it("defaults to run-image in registry publishing to", func() { - runImageName := subject.resolveRunImage("", gcrRegistry, defaultRegistry, stackInfo, nil, false) + when("desirable run-images are not accessible", func() { + it.Before(func() { + publish = true + + mockController := gomock.NewController(t) + mockFetcher := testmocks.NewMockImageFetcher(mockController) + mockFetcher.EXPECT().CheckReadAccess(gcrRunMirror, gomock.Any()).Return(false) + mockFetcher.EXPECT().CheckReadAccess(stackInfo.RunImage.Image, gomock.Any()).Return(false) + mockFetcher.EXPECT().CheckReadAccess(defaultMirror, gomock.Any()).Return(true) + + subject, err = NewClient(WithLogger(logger), WithKeychain(keychain), WithFetcher(mockFetcher)) + h.AssertNil(t, err) + }) + + it("selects the first accessible run-image", func() { + runImageName = subject.resolveRunImage("", gcrRegistry, defaultRegistry, stackInfo.RunImage, nil, publish, image.FetchOptions{}) assert.Equal(runImageName, defaultMirror) - assert.NotEqual(runImageName, gcrRunMirror) }) + }) - it("prefers config defined run image mirror to stack defined run image mirror", func() { - configMirrors := map[string][]string{ - runImageName: {defaultRegistry + "/unique-run-img"}, + when("desirable run-image are empty", func() { + it.Before(func() { + publish = false + stackInfo = builder.StackMetadata{ + RunImage: builder.RunImageMetadata{ + Image: "stack/run-image", + }, } - runImageName := subject.resolveRunImage("", gcrRegistry, defaultRegistry, stackInfo, configMirrors, false) - assert.NotEqual(runImageName, defaultMirror) - assert.Equal(runImageName, defaultRegistry+"/unique-run-img") }) - it("returns a config mirror if no match to target registry", func() { - configMirrors := map[string][]string{ - runImageName: {defaultRegistry + "/unique-run-img"}, - } - runImageName := subject.resolveRunImage("", defaultRegistry, "test.registry.io", stackInfo, configMirrors, false) - assert.NotEqual(runImageName, defaultMirror) - assert.Equal(runImageName, defaultRegistry+"/unique-run-img") + it("selects the builder run-image", func() { + // issue: https://github.com/buildpacks/pack/issues/2078 + runImageName = subject.resolveRunImage("", "", "", stackInfo.RunImage, nil, publish, image.FetchOptions{}) + assert.Equal(runImageName, "stack/run-image") }) }) }) diff --git a/pkg/client/create_builder.go b/pkg/client/create_builder.go index 34bca4b5ee..860639dea4 100644 --- a/pkg/client/create_builder.go +++ b/pkg/client/create_builder.go @@ -1,18 +1,29 @@ package client import ( + "archive/tar" "context" "fmt" + "io" + OS "os" + "path/filepath" + "sort" + "strings" + + "github.com/buildpacks/pack/internal/name" "github.com/Masterminds/semver" "github.com/buildpacks/imgutil" "github.com/pkg/errors" + "golang.org/x/text/cases" + "golang.org/x/text/language" pubbldr "github.com/buildpacks/pack/builder" "github.com/buildpacks/pack/internal/builder" "github.com/buildpacks/pack/internal/paths" "github.com/buildpacks/pack/internal/style" "github.com/buildpacks/pack/pkg/buildpack" + "github.com/buildpacks/pack/pkg/dist" "github.com/buildpacks/pack/pkg/image" ) @@ -25,6 +36,12 @@ type CreateBuilderOptions struct { // Name of the builder. BuilderName string + // BuildConfigEnv for Builder + BuildConfigEnv map[string]string + + // Map of labels to add to the Buildpack + Labels map[string]string + // Configuration that defines the functionality a builder provides. Config pubbldr.Config @@ -32,104 +49,205 @@ type CreateBuilderOptions struct { // Requires BuilderName to be a valid registry location. Publish bool + // Append [os]-[arch] suffix to the image tag when publishing a multi-arch to a registry + // Requires Publish to be true + AppendImageNameSuffix bool + // Buildpack registry name. Defines where all registry buildpacks will be pulled from. Registry string // Strategy for updating images before a build. PullPolicy image.PullPolicy + + // List of modules to be flattened + Flatten buildpack.FlattenModuleInfos + + // Target platforms to build builder images for + Targets []dist.Target + + // Temporary directory to use for downloading lifecycle images. + TempDirectory string + + // Additional image tags to push to, each will contain contents identical to Image + AdditionalTags []string } // CreateBuilder creates and saves a builder image to a registry with the provided options. // If any configuration is invalid, it will error and exit without creating any images. func (c *Client) CreateBuilder(ctx context.Context, opts CreateBuilderOptions) error { - if err := c.validateConfig(ctx, opts); err != nil { + targets, err := c.processBuilderCreateTargets(ctx, opts) + if err != nil { return err } - bldr, err := c.createBaseBuilder(ctx, opts) + if len(targets) == 0 { + _, err = c.createBuilderTarget(ctx, opts, nil, false) + if err != nil { + return err + } + } else { + var digests []string + multiArch := len(targets) > 1 && opts.Publish + + for _, target := range targets { + digest, err := c.createBuilderTarget(ctx, opts, &target, multiArch) + if err != nil { + return err + } + digests = append(digests, digest) + } + + if multiArch && len(digests) > 1 { + return c.CreateManifest(ctx, CreateManifestOptions{ + IndexRepoName: opts.BuilderName, + RepoNames: digests, + Publish: true, + }) + } + } + + return nil +} + +func (c *Client) createBuilderTarget(ctx context.Context, opts CreateBuilderOptions, target *dist.Target, multiArch bool) (string, error) { + if err := c.validateConfig(ctx, opts, target); err != nil { + return "", err + } + + bldr, err := c.createBaseBuilder(ctx, opts, target, multiArch) if err != nil { - return errors.Wrap(err, "failed to create builder") + return "", errors.Wrap(err, "failed to create builder") } if err := c.addBuildpacksToBuilder(ctx, opts, bldr); err != nil { - return errors.Wrap(err, "failed to add buildpacks to builder") + return "", errors.Wrap(err, "failed to add buildpacks to builder") + } + + if err := c.addExtensionsToBuilder(ctx, opts, bldr); err != nil { + return "", errors.Wrap(err, "failed to add extensions to builder") } bldr.SetOrder(opts.Config.Order) - bldr.SetStack(opts.Config.Stack) + bldr.SetOrderExtensions(opts.Config.OrderExtensions) + bldr.SetSystem(opts.Config.System) + + if opts.Config.Stack.ID != "" { + bldr.SetStack(opts.Config.Stack) + } + bldr.SetRunImage(opts.Config.Run) + bldr.SetBuildConfigEnv(opts.BuildConfigEnv) - return bldr.Save(c.logger, builder.CreatorMetadata{Version: c.version}) + err = bldr.Save(c.logger, builder.CreatorMetadata{Version: c.version}, opts.AdditionalTags...) + if err != nil { + return "", err + } + + if multiArch { + // We need to keep the identifier to create the image index + id, err := bldr.Image().Identifier() + if err != nil { + return "", errors.Wrapf(err, "determining image manifest digest") + } + return id.String(), nil + } + return "", nil } -func (c *Client) validateConfig(ctx context.Context, opts CreateBuilderOptions) error { +func (c *Client) validateConfig(ctx context.Context, opts CreateBuilderOptions, target *dist.Target) error { if err := pubbldr.ValidateConfig(opts.Config); err != nil { return errors.Wrap(err, "invalid builder config") } - if err := c.validateRunImageConfig(ctx, opts); err != nil { + if err := c.validateRunImageConfig(ctx, opts, target); err != nil { return errors.Wrap(err, "invalid run image config") } return nil } -func (c *Client) validateRunImageConfig(ctx context.Context, opts CreateBuilderOptions) error { +func (c *Client) validateRunImageConfig(ctx context.Context, opts CreateBuilderOptions, target *dist.Target) error { var runImages []imgutil.Image - for _, i := range append([]string{opts.Config.Stack.RunImage}, opts.Config.Stack.RunImageMirrors...) { - if !opts.Publish { - img, err := c.imageFetcher.Fetch(ctx, i, image.FetchOptions{Daemon: true, PullPolicy: opts.PullPolicy}) + for _, r := range opts.Config.Run.Images { + for _, i := range append([]string{r.Image}, r.Mirrors...) { + if !opts.Publish { + img, err := c.imageFetcher.Fetch(ctx, i, image.FetchOptions{Daemon: true, PullPolicy: opts.PullPolicy, Target: target}) + if err != nil { + if errors.Cause(err) != image.ErrNotFound { + return errors.Wrap(err, "failed to fetch image") + } + } else { + runImages = append(runImages, img) + continue + } + } + + img, err := c.imageFetcher.Fetch(ctx, i, image.FetchOptions{Daemon: false, PullPolicy: opts.PullPolicy, Target: target}) if err != nil { if errors.Cause(err) != image.ErrNotFound { return errors.Wrap(err, "failed to fetch image") } + c.logger.Warnf("run image %s is not accessible", style.Symbol(i)) } else { runImages = append(runImages, img) - continue - } - } - - img, err := c.imageFetcher.Fetch(ctx, i, image.FetchOptions{Daemon: false, PullPolicy: opts.PullPolicy}) - if err != nil { - if errors.Cause(err) != image.ErrNotFound { - return errors.Wrap(err, "failed to fetch image") } - c.logger.Warnf("run image %s is not accessible", style.Symbol(i)) - } else { - runImages = append(runImages, img) } } for _, img := range runImages { - stackID, err := img.Label("io.buildpacks.stack.id") - if err != nil { - return errors.Wrap(err, "failed to label image") - } + if opts.Config.Stack.ID != "" { + stackID, err := img.Label("io.buildpacks.stack.id") + if err != nil { + return errors.Wrap(err, "failed to label image") + } - if stackID != opts.Config.Stack.ID { - return fmt.Errorf( - "stack %s from builder config is incompatible with stack %s from run image %s", - style.Symbol(opts.Config.Stack.ID), - style.Symbol(stackID), - style.Symbol(img.Name()), - ) + if stackID != opts.Config.Stack.ID { + return fmt.Errorf( + "stack %s from builder config is incompatible with stack %s from run image %s", + style.Symbol(opts.Config.Stack.ID), + style.Symbol(stackID), + style.Symbol(img.Name()), + ) + } } } return nil } -func (c *Client) createBaseBuilder(ctx context.Context, opts CreateBuilderOptions) (*builder.Builder, error) { - baseImage, err := c.imageFetcher.Fetch(ctx, opts.Config.Stack.BuildImage, image.FetchOptions{Daemon: !opts.Publish, PullPolicy: opts.PullPolicy}) +func (c *Client) createBaseBuilder(ctx context.Context, opts CreateBuilderOptions, target *dist.Target, multiArch bool) (*builder.Builder, error) { + baseImage, err := c.imageFetcher.Fetch(ctx, opts.Config.Build.Image, image.FetchOptions{Daemon: !opts.Publish, PullPolicy: opts.PullPolicy, Target: target}) if err != nil { return nil, errors.Wrap(err, "fetch build image") } c.logger.Debugf("Creating builder %s from build-image %s", style.Symbol(opts.BuilderName), style.Symbol(baseImage.Name())) - bldr, err := builder.New(baseImage, opts.BuilderName) + + var builderOpts []builder.BuilderOption + if opts.Flatten != nil && len(opts.Flatten.FlattenModules()) > 0 { + builderOpts = append(builderOpts, builder.WithFlattened(opts.Flatten)) + } + if len(opts.Labels) > 0 { + builderOpts = append(builderOpts, builder.WithLabels(opts.Labels)) + } + + builderName := opts.BuilderName + if multiArch && opts.AppendImageNameSuffix { + builderName, err = name.AppendSuffix(builderName, *target) + if err != nil { + return nil, errors.Wrap(err, "invalid image name") + } + } + + bldr, err := builder.New(baseImage, builderName, builderOpts...) if err != nil { return nil, errors.Wrap(err, "invalid build-image") } + architecture, err := baseImage.Architecture() + if err != nil { + return nil, errors.Wrap(err, "lookup image Architecture") + } + os, err := baseImage.OS() if err != nil { return nil, errors.Wrap(err, "lookup image OS") @@ -141,7 +259,7 @@ func (c *Client) createBaseBuilder(ctx context.Context, opts CreateBuilderOption bldr.SetDescription(opts.Config.Description) - if bldr.StackID != opts.Config.Stack.ID { + if opts.Config.Stack.ID != "" && bldr.StackID != opts.Config.Stack.ID { return nil, fmt.Errorf( "stack %s from builder config is incompatible with stack %s from build image", style.Symbol(opts.Config.Stack.ID), @@ -149,17 +267,23 @@ func (c *Client) createBaseBuilder(ctx context.Context, opts CreateBuilderOption ) } - lifecycle, err := c.fetchLifecycle(ctx, opts.Config.Lifecycle, opts.RelativeBaseDir, os) + lifecycle, err := c.fetchLifecycle(ctx, opts, os, architecture) if err != nil { return nil, errors.Wrap(err, "fetch lifecycle") } + // Validate lifecycle version for image extensions + if err := c.validateLifecycleVersion(opts.Config, lifecycle); err != nil { + return nil, err + } bldr.SetLifecycle(lifecycle) + bldr.SetBuildConfigEnv(opts.BuildConfigEnv) return bldr, nil } -func (c *Client) fetchLifecycle(ctx context.Context, config pubbldr.LifecycleConfig, relativeBaseDir, os string) (builder.Lifecycle, error) { +func (c *Client) fetchLifecycle(ctx context.Context, opts CreateBuilderOptions, os string, architecture string) (builder.Lifecycle, error) { + config := opts.Config.Lifecycle if config.Version != "" && config.URI != "" { return nil, errors.Errorf( "%s can only declare %s or %s, not both", @@ -170,20 +294,25 @@ func (c *Client) fetchLifecycle(ctx context.Context, config pubbldr.LifecycleCon var uri string var err error switch { + case buildpack.HasDockerLocator(config.URI): + uri, err = c.uriFromLifecycleImage(ctx, opts.TempDirectory, config) + if err != nil { + return nil, errors.Wrap(err, "Could not parse uri from lifecycle image") + } case config.Version != "": v, err := semver.NewVersion(config.Version) if err != nil { return nil, errors.Wrapf(err, "%s must be a valid semver", style.Symbol("lifecycle.version")) } - uri = uriFromLifecycleVersion(*v, os) + uri = c.uriFromLifecycleVersion(*v, os, architecture) case config.URI != "": - uri, err = paths.FilePathToURI(config.URI, relativeBaseDir) + uri, err = paths.FilePathToURI(config.URI, opts.RelativeBaseDir) if err != nil { return nil, err } default: - uri = uriFromLifecycleVersion(*semver.MustParse(builder.DefaultLifecycleVersion), os) + uri = c.uriFromLifecycleVersion(*semver.MustParse(builder.DefaultLifecycleVersion), os, architecture) } blob, err := c.downloader.Download(ctx, uri) @@ -201,72 +330,260 @@ func (c *Client) fetchLifecycle(ctx context.Context, config pubbldr.LifecycleCon func (c *Client) addBuildpacksToBuilder(ctx context.Context, opts CreateBuilderOptions, bldr *builder.Builder) error { for _, b := range opts.Config.Buildpacks { - c.logger.Debugf("Looking up buildpack %s", style.Symbol(b.DisplayString())) - - imageOS, err := bldr.Image().OS() - if err != nil { - return errors.Wrapf(err, "getting OS from %s", style.Symbol(bldr.Image().Name())) + if err := c.addConfig(ctx, buildpack.KindBuildpack, b, opts, bldr); err != nil { + return err } + } + return nil +} - mainBP, depBPs, err := c.buildpackDownloader.Download(ctx, b.URI, buildpack.DownloadOptions{ - RegistryName: opts.Registry, - ImageOS: imageOS, - RelativeBaseDir: opts.RelativeBaseDir, - Daemon: !opts.Publish, - PullPolicy: opts.PullPolicy, - ImageName: b.ImageName, - }) - if err != nil { - return errors.Wrap(err, "downloading buildpack") +func (c *Client) addExtensionsToBuilder(ctx context.Context, opts CreateBuilderOptions, bldr *builder.Builder) error { + for _, e := range opts.Config.Extensions { + if err := c.addConfig(ctx, buildpack.KindExtension, e, opts, bldr); err != nil { + return err } + } + return nil +} - err = validateBuildpack(mainBP, b.URI, b.ID, b.Version) - if err != nil { - return errors.Wrap(err, "invalid buildpack") - } +func (c *Client) addConfig(ctx context.Context, kind string, config pubbldr.ModuleConfig, opts CreateBuilderOptions, bldr *builder.Builder) error { + c.logger.Debugf("Looking up %s %s", kind, style.Symbol(config.DisplayString())) - bpDesc := mainBP.Descriptor() - for _, deprecatedAPI := range bldr.LifecycleDescriptor().APIs.Buildpack.Deprecated { - if deprecatedAPI.Equal(bpDesc.API) { - c.logger.Warnf("Buildpack %s is using deprecated Buildpacks API version %s", style.Symbol(bpDesc.Info.FullName()), style.Symbol(bpDesc.API.String())) - break - } - } + builderOS, err := bldr.Image().OS() + if err != nil { + return errors.Wrapf(err, "getting builder OS") + } + builderArch, err := bldr.Image().Architecture() + if err != nil { + return errors.Wrapf(err, "getting builder architecture") + } + + target := &dist.Target{OS: builderOS, Arch: builderArch} + c.logger.Debugf("Downloading buildpack for platform: %s", target.ValuesAsPlatform()) + + mainBP, depBPs, err := c.buildpackDownloader.Download(ctx, config.URI, buildpack.DownloadOptions{ + Daemon: !opts.Publish, + ImageName: config.ImageName, + ModuleKind: kind, + PullPolicy: opts.PullPolicy, + RegistryName: opts.Registry, + RelativeBaseDir: opts.RelativeBaseDir, + Target: target, + }) + if err != nil { + return errors.Wrapf(err, "downloading %s", kind) + } + err = validateModule(kind, mainBP, config.URI, config.ID, config.Version) + if err != nil { + return errors.Wrapf(err, "invalid %s", kind) + } - for _, bp := range append([]buildpack.Buildpack{mainBP}, depBPs...) { - bldr.AddBuildpack(bp) + bpDesc := mainBP.Descriptor() + for _, deprecatedAPI := range bldr.LifecycleDescriptor().APIs.Buildpack.Deprecated { + if deprecatedAPI.Equal(bpDesc.API()) { + c.logger.Warnf( + "%s %s is using deprecated Buildpacks API version %s", + cases.Title(language.AmericanEnglish).String(kind), + style.Symbol(bpDesc.Info().FullName()), + style.Symbol(bpDesc.API().String()), + ) + break } } + // Fixes 1453 + sort.Slice(depBPs, func(i, j int) bool { + compareID := strings.Compare(depBPs[i].Descriptor().Info().ID, depBPs[j].Descriptor().Info().ID) + if compareID == 0 { + return strings.Compare(depBPs[i].Descriptor().Info().Version, depBPs[j].Descriptor().Info().Version) <= 0 + } + return compareID < 0 + }) + + switch kind { + case buildpack.KindBuildpack: + bldr.AddBuildpacks(mainBP, depBPs) + case buildpack.KindExtension: + // Extensions can't be composite + bldr.AddExtension(mainBP) + default: + return fmt.Errorf("unknown module kind: %s", kind) + } return nil } -func validateBuildpack(bp buildpack.Buildpack, source, expectedID, expectedBPVersion string) error { - if expectedID != "" && bp.Descriptor().Info.ID != expectedID { +func (c *Client) processBuilderCreateTargets(ctx context.Context, opts CreateBuilderOptions) ([]dist.Target, error) { + var targets []dist.Target + + if len(opts.Targets) > 0 { + if opts.Publish { + targets = opts.Targets + } else { + // find a target that matches the daemon + daemonTarget, err := c.daemonTarget(ctx, opts.Targets) + if err != nil { + return targets, err + } + targets = append(targets, daemonTarget) + } + } + return targets, nil +} + +func validateModule(kind string, module buildpack.BuildModule, source, expectedID, expectedVersion string) error { + info := module.Descriptor().Info() + if expectedID != "" && info.ID != expectedID { return fmt.Errorf( - "buildpack from URI %s has ID %s which does not match ID %s from builder config", + "%s from URI %s has ID %s which does not match ID %s from builder config", + kind, style.Symbol(source), - style.Symbol(bp.Descriptor().Info.ID), + style.Symbol(info.ID), style.Symbol(expectedID), ) } - if expectedBPVersion != "" && bp.Descriptor().Info.Version != expectedBPVersion { + if expectedVersion != "" && info.Version != expectedVersion { return fmt.Errorf( - "buildpack from URI %s has version %s which does not match version %s from builder config", + "%s from URI %s has version %s which does not match version %s from builder config", + kind, style.Symbol(source), - style.Symbol(bp.Descriptor().Info.Version), - style.Symbol(expectedBPVersion), + style.Symbol(info.Version), + style.Symbol(expectedVersion), ) } return nil } -func uriFromLifecycleVersion(version semver.Version, os string) string { +func (c *Client) uriFromLifecycleVersion(version semver.Version, os string, architecture string) string { + arch := "x86-64" + if os == "windows" { - return fmt.Sprintf("https://github.com/buildpacks/lifecycle/releases/download/v%s/lifecycle-v%s+windows.x86-64.tgz", version.String(), version.String()) + return fmt.Sprintf("https://github.com/buildpacks/lifecycle/releases/download/v%s/lifecycle-v%s+windows.%s.tgz", version.String(), version.String(), arch) } - return fmt.Sprintf("https://github.com/buildpacks/lifecycle/releases/download/v%s/lifecycle-v%s+linux.x86-64.tgz", version.String(), version.String()) + if builder.SupportedLinuxArchitecture(architecture) { + arch = architecture + } else { + // FIXME: this should probably be an error case in the future, see https://github.com/buildpacks/pack/issues/2163 + c.logger.Warnf("failed to find a lifecycle binary for requested architecture %s, defaulting to %s", style.Symbol(architecture), style.Symbol(arch)) + } + + return fmt.Sprintf("https://github.com/buildpacks/lifecycle/releases/download/v%s/lifecycle-v%s+linux.%s.tgz", version.String(), version.String(), arch) +} + +func stripTopDirAndWrite(layerReader io.ReadCloser, outputPath string) (*OS.File, error) { + file, err := OS.Create(outputPath) + if err != nil { + return nil, err + } + + tarWriter := tar.NewWriter(file) + tarReader := tar.NewReader(layerReader) + tarReader.Next() + + for { + header, err := tarReader.Next() + if err == io.EOF { + break + } + if err != nil { + return nil, err + } + + pathSep := string(OS.PathSeparator) + cnbPrefix := fmt.Sprintf("%scnb%s", pathSep, pathSep) + newHeader := *header + newHeader.Name = strings.TrimPrefix(header.Name, cnbPrefix) + + if err := tarWriter.WriteHeader(&newHeader); err != nil { + return nil, err + } + + if _, err := io.Copy(tarWriter, tarReader); err != nil { + return nil, err + } + } + + return file, nil +} + +func (c *Client) uriFromLifecycleImage(ctx context.Context, basePath string, config pubbldr.LifecycleConfig) (uri string, err error) { + var lifecycleImage imgutil.Image + imageName := buildpack.ParsePackageLocator(config.URI) + c.logger.Debugf("Downloading lifecycle image: %s", style.Symbol(imageName)) + + lifecycleImage, err = c.imageFetcher.Fetch(ctx, imageName, image.FetchOptions{Daemon: false}) + if err != nil { + return "", err + } + + lifecyclePath := filepath.Join(basePath, "lifecycle.tar") + underlyingImage := lifecycleImage.UnderlyingImage() + if underlyingImage == nil { + return "", errors.New("lifecycle image has no underlying image") + } + + layers, err := underlyingImage.Layers() + if err != nil { + return "", err + } + + if len(layers) == 0 { + return "", errors.New("lifecycle image has no layers") + } + + // Assume the last layer has the lifecycle + lifecycleLayer := layers[len(layers)-1] + + layerReader, err := lifecycleLayer.Uncompressed() + if err != nil { + return "", err + } + defer layerReader.Close() + + file, err := stripTopDirAndWrite(layerReader, lifecyclePath) + if err != nil { + return "", err + } + + defer file.Close() + + uri, err = paths.FilePathToURI(lifecyclePath, "") + if err != nil { + return "", err + } + return uri, err +} + +func hasExtensions(builderConfig pubbldr.Config) bool { + return len(builderConfig.Extensions) > 0 || len(builderConfig.OrderExtensions) > 0 +} + +func (c *Client) validateLifecycleVersion(builderConfig pubbldr.Config, lifecycle builder.Lifecycle) error { + if !hasExtensions(builderConfig) { + return nil + } + + descriptor := lifecycle.Descriptor() + + // Extensions are stable starting from Platform API 0.13 + // Check the latest supported Platform API version + if len(descriptor.APIs.Platform.Supported) == 0 { + // No Platform API information available, skip validation + return nil + } + + platformAPI := descriptor.APIs.Platform.Supported.Latest() + if platformAPI.LessThan("0.13") { + if !c.experimental { + return errors.Errorf( + "builder config contains image extensions, but the lifecycle Platform API version (%s) is older than 0.13; "+ + "support for image extensions with Platform API < 0.13 is currently experimental", + platformAPI.String(), + ) + } + } + + return nil } diff --git a/pkg/client/create_builder_test.go b/pkg/client/create_builder_test.go index 45a16b9437..664b1bf948 100644 --- a/pkg/client/create_builder_test.go +++ b/pkg/client/create_builder_test.go @@ -4,16 +4,20 @@ import ( "bytes" "context" "fmt" - "io/ioutil" "os" "path/filepath" + "strings" "testing" + "github.com/google/go-containerregistry/pkg/name" + "github.com/google/go-containerregistry/pkg/v1/tarball" + "github.com/buildpacks/imgutil/fakes" "github.com/buildpacks/lifecycle/api" - "github.com/docker/docker/api/types" "github.com/golang/mock/gomock" "github.com/heroku/color" + mobysystem "github.com/moby/moby/api/types/system" + dockerclient "github.com/moby/moby/client" "github.com/pkg/errors" "github.com/sclevine/spec" "github.com/sclevine/spec/report" @@ -49,7 +53,7 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { mockBuildpackDownloader *testmocks.MockBuildpackDownloader mockImageFactory *testmocks.MockImageFactory mockImageFetcher *testmocks.MockImageFetcher - mockDockerClient *testmocks.MockCommonAPIClient + mockDockerClient *testmocks.MockAPIClient fakeBuildImage *fakes.Image fakeRunImage *fakes.Image fakeRunImageMirror *fakes.Image @@ -68,16 +72,37 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { mockImageFetcher.EXPECT().Fetch(gomock.Any(), "some/build-image", gomock.Any()).Return(fakeBuildImage, nil) } - var createBuildpack = func(descriptor dist.BuildpackDescriptor) buildpack.Buildpack { + var prepareExtensions = func() { + // Extensions require Platform API >= 0.13 + opts.Config.Lifecycle.URI = "file:///some-lifecycle-platform-0-13" + opts.Config.Extensions = []pubbldr.ModuleConfig{ + { + ModuleInfo: dist.ModuleInfo{ID: "ext.one", Version: "1.2.3", Homepage: "http://one.extension"}, + ImageOrURI: dist.ImageOrURI{ + BuildpackURI: dist.BuildpackURI{ + URI: "https://example.fake/ext-one.tgz", + }, + }, + }, + } + opts.Config.OrderExtensions = []dist.OrderEntry{{ + Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "ext.one", Version: "1.2.3"}, Optional: true}, + }}, + } + } + + var createBuildpack = func(descriptor dist.BuildpackDescriptor) buildpack.BuildModule { buildpack, err := ifakes.NewFakeBuildpack(descriptor, 0644) h.AssertNil(t, err) return buildpack } + var shouldCallBuildpackDownloaderWith = func(uri string, buildpackDownloadOptions buildpack.DownloadOptions) { buildpack := createBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.3"), - Info: dist.BuildpackInfo{ID: "example/foo", Version: "1.1.0"}, - Stacks: []dist.Stack{{ID: "some.stack.id"}}, + WithAPI: api.MustParse("0.3"), + WithInfo: dist.ModuleInfo{ID: "example/foo", Version: "1.1.0"}, + WithStacks: []dist.Stack{{ID: "some.stack.id"}}, }) mockBuildpackDownloader.EXPECT().Download(gomock.Any(), uri, gomock.Any()).Return(buildpack, nil, nil) } @@ -88,7 +113,7 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { mockDownloader = testmocks.NewMockBlobDownloader(mockController) mockImageFetcher = testmocks.NewMockImageFetcher(mockController) mockImageFactory = testmocks.NewMockImageFactory(mockController) - mockDockerClient = testmocks.NewMockCommonAPIClient(mockController) + mockDockerClient = testmocks.NewMockAPIClient(mockController) mockBuildpackDownloader = testmocks.NewMockBuildpackDownloader(mockController) fakeBuildImage = fakes.NewImage("some/build-image", "", nil) @@ -105,13 +130,19 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { exampleBuildpackBlob := blob.NewBlob(filepath.Join("testdata", "buildpack")) mockDownloader.EXPECT().Download(gomock.Any(), "https://example.fake/bp-one.tgz").Return(exampleBuildpackBlob, nil).AnyTimes() + exampleExtensionBlob := blob.NewBlob(filepath.Join("testdata", "extension")) + mockDownloader.EXPECT().Download(gomock.Any(), "https://example.fake/ext-one.tgz").Return(exampleExtensionBlob, nil).AnyTimes() mockDownloader.EXPECT().Download(gomock.Any(), "some/buildpack/dir").Return(blob.NewBlob(filepath.Join("testdata", "buildpack")), nil).AnyTimes() mockDownloader.EXPECT().Download(gomock.Any(), "file:///some-lifecycle").Return(blob.NewBlob(filepath.Join("testdata", "lifecycle", "platform-0.4")), nil).AnyTimes() - mockDownloader.EXPECT().Download(gomock.Any(), "file:///some-lifecycle-platform-0-1").Return(blob.NewBlob(filepath.Join("testdata", "lifecycle-platform-0.1")), nil).AnyTimes() + mockDownloader.EXPECT().Download(gomock.Any(), "file:///some-lifecycle-platform-0-1").Return(blob.NewBlob(filepath.Join("testdata", "lifecycle", "platform-0.3")), nil).AnyTimes() + mockDownloader.EXPECT().Download(gomock.Any(), "file:///some-lifecycle-platform-0-13").Return(blob.NewBlob(filepath.Join("testdata", "lifecycle", "platform-0.13")), nil).AnyTimes() - bp, err := buildpack.FromRootBlob(exampleBuildpackBlob, archive.DefaultTarWriterFactory()) + bp, err := buildpack.FromBuildpackRootBlob(exampleBuildpackBlob, archive.DefaultTarWriterFactory(), nil) h.AssertNil(t, err) mockBuildpackDownloader.EXPECT().Download(gomock.Any(), "https://example.fake/bp-one.tgz", gomock.Any()).Return(bp, nil, nil).AnyTimes() + ext, err := buildpack.FromExtensionRootBlob(exampleExtensionBlob, archive.DefaultTarWriterFactory(), nil) + h.AssertNil(t, err) + mockBuildpackDownloader.EXPECT().Download(gomock.Any(), "https://example.fake/ext-one.tgz", gomock.Any()).Return(ext, nil, nil).AnyTimes() subject, err = client.NewClient( client.WithLogger(logger), @@ -123,16 +154,16 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { ) h.AssertNil(t, err) - mockDockerClient.EXPECT().Info(context.TODO()).Return(types.Info{OSType: "linux"}, nil).AnyTimes() + mockDockerClient.EXPECT().Info(context.TODO(), gomock.Any()).Return(dockerclient.SystemInfoResult{Info: mobysystem.Info{OSType: "linux"}}, nil).AnyTimes() opts = client.CreateBuilderOptions{ RelativeBaseDir: "/", BuilderName: "some/builder", Config: pubbldr.Config{ Description: "Some description", - Buildpacks: []pubbldr.BuildpackConfig{ + Buildpacks: []pubbldr.ModuleConfig{ { - BuildpackInfo: dist.BuildpackInfo{ID: "bp.one", Version: "1.2.3", Homepage: "http://one.buildpack"}, + ModuleInfo: dist.ModuleInfo{ID: "bp.one", Version: "1.2.3", Homepage: "http://one.buildpack"}, ImageOrURI: dist.ImageOrURI{ BuildpackURI: dist.BuildpackURI{ URI: "https://example.fake/bp-one.tgz", @@ -141,15 +172,21 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { }, }, Order: []dist.OrderEntry{{ - Group: []dist.BuildpackRef{ - {BuildpackInfo: dist.BuildpackInfo{ID: "bp.one", Version: "1.2.3"}, Optional: false}, + Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "bp.one", Version: "1.2.3"}, Optional: false}, }}, }, Stack: pubbldr.StackConfig{ - ID: "some.stack.id", - BuildImage: "some/build-image", - RunImage: "some/run-image", - RunImageMirrors: []string{"localhost:5000/some/run-image"}, + ID: "some.stack.id", + }, + Run: pubbldr.RunConfig{ + Images: []pubbldr.RunImageConfig{{ + Image: "some/run-image", + Mirrors: []string{"localhost:5000/some/run-image"}, + }}, + }, + Build: pubbldr.BuildConfig{ + Image: "some/build-image", }, Lifecycle: pubbldr.LifecycleConfig{URI: "file:///some-lifecycle"}, }, @@ -157,7 +194,7 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { PullPolicy: image.PullAlways, } - tmpDir, err = ioutil.TempDir("", "create-builder-test") + tmpDir, err = os.MkdirTemp("", "create-builder-test") h.AssertNil(t, err) }) @@ -180,17 +217,19 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { } when("validating the builder config", func() { - it("should fail when the stack ID is empty", func() { + it("should not fail when the stack ID is empty", func() { opts.Config.Stack.ID = "" + prepareFetcherWithBuildImage() + prepareFetcherWithRunImages() err := subject.CreateBuilder(context.TODO(), opts) - h.AssertError(t, err, "stack.id is required") + h.AssertNil(t, err) }) it("should fail when the stack ID from the builder config does not match the stack ID from the build image", func() { - mockImageFetcher.EXPECT().Fetch(gomock.Any(), "some/build-image", image.FetchOptions{Daemon: true, PullPolicy: image.PullAlways}).Return(fakeBuildImage, nil) h.AssertNil(t, fakeBuildImage.SetLabel("io.buildpacks.stack.id", "other.stack.id")) + prepareFetcherWithBuildImage() prepareFetcherWithRunImages() err := subject.CreateBuilder(context.TODO(), opts) @@ -198,20 +237,56 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { h.AssertError(t, err, "stack 'some.stack.id' from builder config is incompatible with stack 'other.stack.id' from build image") }) - it("should fail when the build image is empty", func() { + it("should not fail when the stack is empty", func() { + opts.Config.Stack.ID = "" + opts.Config.Stack.BuildImage = "" + opts.Config.Stack.RunImage = "" + prepareFetcherWithBuildImage() + prepareFetcherWithRunImages() + + err := subject.CreateBuilder(context.TODO(), opts) + + h.AssertNil(t, err) + }) + + it("should fail when the run images and stack are empty", func() { opts.Config.Stack.BuildImage = "" + opts.Config.Stack.RunImage = "" + + opts.Config.Run = pubbldr.RunConfig{} err := subject.CreateBuilder(context.TODO(), opts) - h.AssertError(t, err, "stack.build-image is required") + h.AssertError(t, err, "run.images are required") }) - it("should fail when the run image is empty", func() { + it("should fail when the run images image and stack are empty", func() { + opts.Config.Stack.BuildImage = "" opts.Config.Stack.RunImage = "" + opts.Config.Run = pubbldr.RunConfig{ + Images: []pubbldr.RunImageConfig{{}}, + } + + err := subject.CreateBuilder(context.TODO(), opts) + + h.AssertError(t, err, "run.images.image is required") + }) + + it("should fail if stack and run image are different", func() { + opts.Config.Stack.RunImage = "some-other-stack-run-image" + + err := subject.CreateBuilder(context.TODO(), opts) + + h.AssertError(t, err, "run.images and stack.run-image do not match") + }) + + it("should fail if stack and build image are different", func() { + opts.Config.Stack.BuildImage = "some-other-stack-build-image" + err := subject.CreateBuilder(context.TODO(), opts) - h.AssertError(t, err, "stack.run-image is required") + h.AssertError(t, err, "build.image and stack.build-image do not match") }) it("should fail when lifecycle version is not a semver", func() { @@ -255,6 +330,28 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { h.AssertError(t, err, "buildpack from URI 'https://example.fake/bp-one.tgz' has version '1.2.3' which does not match version '0.0.0' from builder config") }) + + it("should fail when extension ID does not match downloaded extension", func() { + prepareFetcherWithBuildImage() + prepareFetcherWithRunImages() + prepareExtensions() + opts.Config.Extensions[0].ID = "does.not.match" + + err := subject.CreateBuilder(context.TODO(), opts) + + h.AssertError(t, err, "extension from URI 'https://example.fake/ext-one.tgz' has ID 'ext.one' which does not match ID 'does.not.match' from builder config") + }) + + it("should fail when extension version does not match downloaded extension", func() { + prepareFetcherWithBuildImage() + prepareFetcherWithRunImages() + prepareExtensions() + opts.Config.Extensions[0].Version = "0.0.0" + + err := subject.CreateBuilder(context.TODO(), opts) + + h.AssertError(t, err, "extension from URI 'https://example.fake/ext-one.tgz' has version '1.2.3' which does not match version '0.0.0' from builder config") + }) }) when("validating the run image config", func() { @@ -360,6 +457,8 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { when("windows containers", func() { when("experimental enabled", func() { it("succeeds", func() { + opts.Config.Extensions = nil // TODO: downloading extensions doesn't work yet; to be implemented in https://github.com/buildpacks/pack/issues/1489 + opts.Config.OrderExtensions = nil // TODO: downloading extensions doesn't work yet; to be implemented in https://github.com/buildpacks/pack/issues/1489 packClientWithExperimental, err := client.NewClient( client.WithLogger(logger), client.WithDownloader(mockDownloader), @@ -384,7 +483,7 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { prepareFetcherWithRunImages() h.AssertNil(t, fakeBuildImage.SetOS("windows")) - mockImageFetcher.EXPECT().Fetch(gomock.Any(), "some/build-image", image.FetchOptions{Daemon: true, PullPolicy: image.PullAlways}).Return(fakeBuildImage, nil) + mockImageFetcher.EXPECT().Fetch(gomock.Any(), "some/build-image", gomock.Any()).Return(fakeBuildImage, nil) err := subject.CreateBuilder(context.TODO(), opts) h.AssertError(t, err, "failed to create builder: Windows containers support is currently experimental.") @@ -425,6 +524,77 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { }) }) + when("validating lifecycle Platform API for extensions", func() { + when("lifecycle supports Platform API >= 0.13", func() { + it("should allow extensions without experimental flag", func() { + // Uses default lifecycle which has Platform API 0.13 + prepareFetcherWithBuildImage() + prepareFetcherWithRunImages() + opts.Config.Lifecycle.URI = "file:///some-lifecycle-platform-0-13" + + err := subject.CreateBuilder(context.TODO(), opts) + h.AssertNil(t, err) + }) + }) + + when("lifecycle supports Platform API < 0.13", func() { + when("experimental flag is not set", func() { + it("should fail when builder has extensions", func() { + prepareFetcherWithBuildImage() + prepareFetcherWithRunImages() + prepareExtensions() + // Override to use lifecycle with Platform API 0.3 (< 0.13) for this test + opts.Config.Lifecycle.URI = "file:///some-lifecycle" + + err := subject.CreateBuilder(context.TODO(), opts) + h.AssertError(t, err, "support for image extensions with Platform API < 0.13 is currently experimental") + }) + }) + + when("experimental flag is set", func() { + it("should succeed when builder has extensions", func() { + packClientWithExperimental, err := client.NewClient( + client.WithLogger(logger), + client.WithDownloader(mockDownloader), + client.WithImageFactory(mockImageFactory), + client.WithFetcher(mockImageFetcher), + client.WithDockerClient(mockDockerClient), + client.WithBuildpackDownloader(mockBuildpackDownloader), + client.WithExperimental(true), + ) + h.AssertNil(t, err) + + prepareFetcherWithBuildImage() + prepareFetcherWithRunImages() + prepareExtensions() + // Remove buildpacks to avoid API compatibility issues + opts.Config.Buildpacks = nil + opts.Config.Order = nil + // Override to use lifecycle with Platform API 0.3 (< 0.13) for this test + opts.Config.Lifecycle.URI = "file:///some-lifecycle" + + err = packClientWithExperimental.CreateBuilder(context.TODO(), opts) + h.AssertNil(t, err) + }) + }) + }) + + when("builder has no extensions", func() { + it("should succeed regardless of Platform API version", func() { + prepareFetcherWithBuildImage() + prepareFetcherWithRunImages() + // Remove extensions from config + opts.Config.Extensions = nil + opts.Config.OrderExtensions = nil + // Use lifecycle with Platform API 0.3 (< 0.13) + opts.Config.Lifecycle.URI = "file:///some-lifecycle" + + err := subject.CreateBuilder(context.TODO(), opts) + h.AssertNil(t, err) + }) + }) + }) + when("only lifecycle version is provided", func() { it("should download from predetermined uri", func() { prepareFetcherWithBuildImage() @@ -443,8 +613,28 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, err) }) + it("should download from predetermined uri for arm64", func() { + prepareFetcherWithBuildImage() + prepareFetcherWithRunImages() + opts.Config.Lifecycle.URI = "" + opts.Config.Lifecycle.Version = "3.4.5" + h.AssertNil(t, fakeBuildImage.SetArchitecture("arm64")) + + mockDownloader.EXPECT().Download( + gomock.Any(), + "https://github.com/buildpacks/lifecycle/releases/download/v3.4.5/lifecycle-v3.4.5+linux.arm64.tgz", + ).Return( + blob.NewBlob(filepath.Join("testdata", "lifecycle", "platform-0.4")), nil, + ) + + err := subject.CreateBuilder(context.TODO(), opts) + h.AssertNil(t, err) + }) + when("windows", func() { it("should download from predetermined uri", func() { + opts.Config.Extensions = nil // TODO: downloading extensions doesn't work yet; to be implemented in https://github.com/buildpacks/pack/issues/1489 + opts.Config.OrderExtensions = nil // TODO: downloading extensions doesn't work yet; to be implemented in https://github.com/buildpacks/pack/issues/1489 packClientWithExperimental, err := client.NewClient( client.WithLogger(logger), client.WithDownloader(mockDownloader), @@ -495,8 +685,32 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, err) }) + it("should download default lifecycle on arm64", func() { + prepareFetcherWithBuildImage() + prepareFetcherWithRunImages() + opts.Config.Lifecycle.URI = "" + opts.Config.Lifecycle.Version = "" + h.AssertNil(t, fakeBuildImage.SetArchitecture("arm64")) + + mockDownloader.EXPECT().Download( + gomock.Any(), + fmt.Sprintf( + "https://github.com/buildpacks/lifecycle/releases/download/v%s/lifecycle-v%s+linux.arm64.tgz", + builder.DefaultLifecycleVersion, + builder.DefaultLifecycleVersion, + ), + ).Return( + blob.NewBlob(filepath.Join("testdata", "lifecycle", "platform-0.4")), nil, + ) + + err := subject.CreateBuilder(context.TODO(), opts) + h.AssertNil(t, err) + }) + when("windows", func() { it("should download default lifecycle", func() { + opts.Config.Extensions = nil // TODO: downloading extensions doesn't work yet; to be implemented in https://github.com/buildpacks/pack/issues/1489 + opts.Config.OrderExtensions = nil // TODO: downloading extensions doesn't work yet; to be implemented in https://github.com/buildpacks/pack/issues/1489 packClientWithExperimental, err := client.NewClient( client.WithLogger(logger), client.WithDownloader(mockDownloader), @@ -529,6 +743,129 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { }) }) + when("lifecycle URI is a docker image", func() { + var lifecycleImageName = "buildpacksio/lifecycle:latest" + + setupFakeLifecycleImage := func() *h.FakeWithUnderlyingImage { + // Write the tar content to a file in tmpDir + lifecycleLayerPath := filepath.Join("testdata", "lifecycle", "lifecycle.tar") + lifecycleTag, err := name.NewTag(lifecycleImageName) + h.AssertNil(t, err) + + v1LifecycleImage, err := tarball.ImageFromPath(lifecycleLayerPath, &lifecycleTag) + h.AssertNil(t, err) + + return h.NewFakeWithUnderlyingV1Image(lifecycleImageName, nil, v1LifecycleImage) + } + + it("should download lifecycle from docker registry", func() { + prepareFetcherWithBuildImage() + prepareFetcherWithRunImages() + + fakeLifecycleImage := setupFakeLifecycleImage() + + opts.Config.Lifecycle.URI = "docker://" + lifecycleImageName + opts.Config.Lifecycle.Version = "" + opts.RelativeBaseDir = tmpDir + + // Expect the image fetcher to fetch the lifecycle image + mockImageFetcher.EXPECT().Fetch(gomock.Any(), lifecycleImageName, image.FetchOptions{Daemon: false}).Return(fakeLifecycleImage, nil) + + // Create the expected lifecycle.tar file that will be referenced + lifecyclePath := filepath.Join(tmpDir, "lifecycle.tar") + + // The downloader will be called with the extracted lifecycle tar + mockDownloader.EXPECT().Download(gomock.Any(), gomock.Any()).DoAndReturn(func(ctx context.Context, uri string) (blob.Blob, error) { + // The URI should be a file:// URI pointing to lifecycle.tar + h.AssertTrue(t, strings.Contains(uri, "lifecycle.tar")) + + // Write a minimal lifecycle tar for the test + f, err := os.Create(lifecyclePath) + h.AssertNil(t, err) + defer f.Close() + + // Copy the test lifecycle content + testLifecycle := blob.NewBlob(filepath.Join("testdata", "lifecycle", "platform-0.4")) + return testLifecycle, nil + }) + + err := subject.CreateBuilder(context.TODO(), opts) + h.AssertNil(t, err) + }) + + it("should handle docker URI without docker:// prefix", func() { + prepareFetcherWithBuildImage() + prepareFetcherWithRunImages() + + fakeLifecycleImage := setupFakeLifecycleImage() + + opts.Config.Lifecycle.URI = "docker:/" + lifecycleImageName + opts.Config.Lifecycle.Version = "" + opts.RelativeBaseDir = tmpDir + + // Expect the image fetcher to fetch the lifecycle image + mockImageFetcher.EXPECT().Fetch(gomock.Any(), lifecycleImageName, image.FetchOptions{Daemon: false}).Return(fakeLifecycleImage, nil) + + // The downloader will be called with the extracted lifecycle tar + mockDownloader.EXPECT().Download(gomock.Any(), gomock.Any()).DoAndReturn(func(ctx context.Context, uri string) (blob.Blob, error) { + testLifecycle := blob.NewBlob(filepath.Join("testdata", "lifecycle", "platform-0.4")) + return testLifecycle, nil + }) + + err := subject.CreateBuilder(context.TODO(), opts) + h.AssertNil(t, err) + }) + + when("fetching lifecycle image fails", func() { + it("should return an error", func() { + prepareFetcherWithBuildImage() + prepareFetcherWithRunImages() + + opts.Config.Lifecycle.URI = "docker://" + lifecycleImageName + opts.Config.Lifecycle.Version = "" + opts.RelativeBaseDir = tmpDir + + // Expect the image fetcher to fail + mockImageFetcher.EXPECT().Fetch(gomock.Any(), lifecycleImageName, image.FetchOptions{Daemon: false}).Return(nil, errors.New("failed to fetch image")) + + err := subject.CreateBuilder(context.TODO(), opts) + h.AssertError(t, err, "Could not parse uri from lifecycle image") + }) + }) + + when("lifecycle image has no layers", func() { + it("should return an error", func() { + prepareFetcherWithBuildImage() + prepareFetcherWithRunImages() + + opts.Config.Lifecycle.URI = "docker://" + lifecycleImageName + opts.Config.Lifecycle.Version = "" + opts.RelativeBaseDir = tmpDir + + // Create an image with no layers + emptyLifecycleImage := fakes.NewImage(lifecycleImageName, "", nil) + + mockImageFetcher.EXPECT().Fetch(gomock.Any(), lifecycleImageName, image.FetchOptions{Daemon: false}).Return(emptyLifecycleImage, nil) + + err := subject.CreateBuilder(context.TODO(), opts) + h.AssertError(t, err, "Could not parse uri from lifecycle image") + }) + }) + + when("both lifecycle URI and version are provided", func() { + it("should return an error", func() { + prepareFetcherWithBuildImage() + prepareFetcherWithRunImages() + + opts.Config.Lifecycle.URI = "docker://" + lifecycleImageName + opts.Config.Lifecycle.Version = "1.2.3" + + err := subject.CreateBuilder(context.TODO(), opts) + h.AssertError(t, err, "'lifecycle' can only declare 'version' or 'uri', not both") + }) + }) + }) + when("buildpack mixins are not satisfied", func() { it("should return an error", func() { prepareFetcherWithBuildImage() @@ -561,17 +898,39 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { bldr := successfullyCreateBuilder() - bpInfo := dist.BuildpackInfo{ + bpInfo := dist.ModuleInfo{ ID: "bp.one", Version: "1.2.3", Homepage: "http://one.buildpack", } - h.AssertEq(t, bldr.Buildpacks(), []dist.BuildpackInfo{bpInfo}) + h.AssertEq(t, bldr.Buildpacks(), []dist.ModuleInfo{bpInfo}) bpInfo.Homepage = "" h.AssertEq(t, bldr.Order(), dist.Order{{ - Group: []dist.BuildpackRef{{ - BuildpackInfo: bpInfo, - Optional: false, + Group: []dist.ModuleRef{{ + ModuleInfo: bpInfo, + Optional: false, + }}, + }}) + }) + + it("should set extensions and order-extensions metadata", func() { + prepareFetcherWithBuildImage() + prepareFetcherWithRunImages() + prepareExtensions() + + bldr := successfullyCreateBuilder() + + extInfo := dist.ModuleInfo{ + ID: "ext.one", + Version: "1.2.3", + Homepage: "http://one.extension", + } + h.AssertEq(t, bldr.Extensions(), []dist.ModuleInfo{extInfo}) + extInfo.Homepage = "" + h.AssertEq(t, bldr.OrderExtensions(), dist.Order{{ + Group: []dist.ModuleRef{{ + ModuleInfo: extInfo, + Optional: false, // extensions are always optional }}, }}) }) @@ -602,35 +961,143 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { //nolint:staticcheck h.AssertEq(t, bldr.LifecycleDescriptor().API.PlatformVersion.String(), "0.2") h.AssertEq(t, bldr.LifecycleDescriptor().APIs.Buildpack.Deprecated.AsStrings(), []string{"0.2", "0.3"}) - h.AssertEq(t, bldr.LifecycleDescriptor().APIs.Buildpack.Supported.AsStrings(), []string{"0.2", "0.3", "0.4"}) + h.AssertEq(t, bldr.LifecycleDescriptor().APIs.Buildpack.Supported.AsStrings(), []string{"0.2", "0.3", "0.4", "0.9"}) h.AssertEq(t, bldr.LifecycleDescriptor().APIs.Platform.Deprecated.AsStrings(), []string{"0.2"}) h.AssertEq(t, bldr.LifecycleDescriptor().APIs.Platform.Supported.AsStrings(), []string{"0.3", "0.4"}) }) - it("should warn when deprecated Buildpack API version used", func() { + it("should warn when deprecated Buildpack API version is used", func() { prepareFetcherWithBuildImage() prepareFetcherWithRunImages() + prepareExtensions() bldr := successfullyCreateBuilder() h.AssertEq(t, bldr.LifecycleDescriptor().APIs.Buildpack.Deprecated.AsStrings(), []string{"0.2", "0.3"}) h.AssertContains(t, out.String(), fmt.Sprintf("Buildpack %s is using deprecated Buildpacks API version %s", style.Symbol("bp.one@1.2.3"), style.Symbol("0.3"))) + h.AssertContains(t, out.String(), fmt.Sprintf("Extension %s is using deprecated Buildpacks API version %s", style.Symbol("ext.one@1.2.3"), style.Symbol("0.3"))) }) it("shouldn't warn when Buildpack API version used isn't deprecated", func() { prepareFetcherWithBuildImage() prepareFetcherWithRunImages() + prepareExtensions() opts.Config.Buildpacks[0].URI = "https://example.fake/bp-one-with-api-4.tgz" + opts.Config.Extensions[0].URI = "https://example.fake/ext-one-with-api-9.tgz" buildpackBlob := blob.NewBlob(filepath.Join("testdata", "buildpack-api-0.4")) - buildpack, err := buildpack.FromRootBlob(buildpackBlob, archive.DefaultTarWriterFactory()) + bp, err := buildpack.FromBuildpackRootBlob(buildpackBlob, archive.DefaultTarWriterFactory(), nil) h.AssertNil(t, err) - mockBuildpackDownloader.EXPECT().Download(gomock.Any(), "https://example.fake/bp-one-with-api-4.tgz", gomock.Any()).Return(buildpack, nil, nil) + mockBuildpackDownloader.EXPECT().Download(gomock.Any(), "https://example.fake/bp-one-with-api-4.tgz", gomock.Any()).Return(bp, nil, nil) + + extensionBlob := blob.NewBlob(filepath.Join("testdata", "extension-api-0.9")) + extension, err := buildpack.FromExtensionRootBlob(extensionBlob, archive.DefaultTarWriterFactory(), nil) + h.AssertNil(t, err) + mockBuildpackDownloader.EXPECT().Download(gomock.Any(), "https://example.fake/ext-one-with-api-9.tgz", gomock.Any()).Return(extension, nil, nil) bldr := successfullyCreateBuilder() h.AssertEq(t, bldr.LifecycleDescriptor().APIs.Buildpack.Deprecated.AsStrings(), []string{"0.2", "0.3"}) h.AssertNotContains(t, out.String(), "is using deprecated Buildpacks API version") }) + + it("should set labels", func() { + opts.Labels = map[string]string{"test.label.one": "1", "test.label.two": "2"} + prepareFetcherWithBuildImage() + prepareFetcherWithRunImages() + + err := subject.CreateBuilder(context.TODO(), opts) + h.AssertNil(t, err) + + imageLabels, err := fakeBuildImage.Labels() + h.AssertNil(t, err) + h.AssertEq(t, imageLabels["test.label.one"], "1") + h.AssertEq(t, imageLabels["test.label.two"], "2") + }) + + when("Buildpack dependencies are provided", func() { + var ( + bp1v1 buildpack.BuildModule + bp1v2 buildpack.BuildModule + bp2v1 buildpack.BuildModule + bp2v2 buildpack.BuildModule + fakeLayerImage *h.FakeAddedLayerImage + err error + ) + it.Before(func() { + fakeLayerImage = &h.FakeAddedLayerImage{Image: fakeBuildImage} + }) + + var prepareBuildpackDependencies = func() []buildpack.BuildModule { + bp1v1Blob := blob.NewBlob(filepath.Join("testdata", "buildpack-non-deterministic", "buildpack-1-version-1")) + bp1v2Blob := blob.NewBlob(filepath.Join("testdata", "buildpack-non-deterministic", "buildpack-1-version-2")) + bp2v1Blob := blob.NewBlob(filepath.Join("testdata", "buildpack-non-deterministic", "buildpack-2-version-1")) + bp2v2Blob := blob.NewBlob(filepath.Join("testdata", "buildpack-non-deterministic", "buildpack-2-version-2")) + + bp1v1, err = buildpack.FromBuildpackRootBlob(bp1v1Blob, archive.DefaultTarWriterFactory(), nil) + h.AssertNil(t, err) + + bp1v2, err = buildpack.FromBuildpackRootBlob(bp1v2Blob, archive.DefaultTarWriterFactory(), nil) + h.AssertNil(t, err) + + bp2v1, err = buildpack.FromBuildpackRootBlob(bp2v1Blob, archive.DefaultTarWriterFactory(), nil) + h.AssertNil(t, err) + + bp2v2, err = buildpack.FromBuildpackRootBlob(bp2v2Blob, archive.DefaultTarWriterFactory(), nil) + h.AssertNil(t, err) + + return []buildpack.BuildModule{bp2v2, bp2v1, bp1v1, bp1v2} + } + + var successfullyCreateDeterministicBuilder = func() { + t.Helper() + + err := subject.CreateBuilder(context.TODO(), opts) + h.AssertNil(t, err) + h.AssertEq(t, fakeLayerImage.IsSaved(), true) + } + + it("should add dependencies buildpacks layers order by ID and version", func() { + mockImageFetcher.EXPECT().Fetch(gomock.Any(), "some/build-image", gomock.Any()).Return(fakeLayerImage, nil) + prepareFetcherWithRunImages() + prepareExtensions() + opts.Config.Buildpacks[0].URI = "https://example.fake/bp-one-with-api-4.tgz" + opts.Config.Extensions[0].URI = "https://example.fake/ext-one-with-api-9.tgz" + bpDependencies := prepareBuildpackDependencies() + + buildpackBlob := blob.NewBlob(filepath.Join("testdata", "buildpack-api-0.4")) + bp, err := buildpack.FromBuildpackRootBlob(buildpackBlob, archive.DefaultTarWriterFactory(), nil) + h.AssertNil(t, err) + mockBuildpackDownloader.EXPECT().Download(gomock.Any(), "https://example.fake/bp-one-with-api-4.tgz", gomock.Any()).DoAndReturn( + func(ctx context.Context, buildpackURI string, opts buildpack.DownloadOptions) (buildpack.BuildModule, []buildpack.BuildModule, error) { + // test options + h.AssertEq(t, opts.Target.ValuesAsPlatform(), "linux/amd64") + return bp, bpDependencies, nil + }) + + extensionBlob := blob.NewBlob(filepath.Join("testdata", "extension-api-0.9")) + extension, err := buildpack.FromExtensionRootBlob(extensionBlob, archive.DefaultTarWriterFactory(), nil) + h.AssertNil(t, err) + mockBuildpackDownloader.EXPECT().Download(gomock.Any(), "https://example.fake/ext-one-with-api-9.tgz", gomock.Any()).DoAndReturn( + func(ctx context.Context, buildpackURI string, opts buildpack.DownloadOptions) (buildpack.BuildModule, []buildpack.BuildModule, error) { + // test options + h.AssertEq(t, opts.Target.ValuesAsPlatform(), "linux/amd64") + return extension, nil, nil + }) + + successfullyCreateDeterministicBuilder() + + layers := fakeLayerImage.AddedLayersOrder() + // Main buildpack + 4 dependencies + 1 extension + h.AssertEq(t, len(layers), 6) + + // [0] bp.one.1.2.3.tar - main buildpack + h.AssertTrue(t, strings.Contains(layers[1], h.LayerFileName(bp1v1))) + h.AssertTrue(t, strings.Contains(layers[2], h.LayerFileName(bp1v2))) + h.AssertTrue(t, strings.Contains(layers[3], h.LayerFileName(bp2v1))) + h.AssertTrue(t, strings.Contains(layers[4], h.LayerFileName(bp2v2))) + // [5] ext.one.1.2.3.tar - extension + }) + }) }) it("supports directory buildpacks", func() { @@ -641,7 +1108,7 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { opts.Config.Buildpacks[0].URI = directoryPath buildpackBlob := blob.NewBlob(directoryPath) - buildpack, err := buildpack.FromRootBlob(buildpackBlob, archive.DefaultTarWriterFactory()) + buildpack, err := buildpack.FromBuildpackRootBlob(buildpackBlob, archive.DefaultTarWriterFactory(), nil) h.AssertNil(t, err) mockBuildpackDownloader.EXPECT().Download(gomock.Any(), directoryPath, gomock.Any()).Return(buildpack, nil, nil) @@ -649,6 +1116,23 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, err) }) + it("supports directory extensions", func() { + prepareFetcherWithBuildImage() + prepareFetcherWithRunImages() + prepareExtensions() + opts.RelativeBaseDir = "" + directoryPath := "testdata/extension" + opts.Config.Extensions[0].URI = directoryPath + + extensionBlob := blob.NewBlob(directoryPath) + extension, err := buildpack.FromExtensionRootBlob(extensionBlob, archive.DefaultTarWriterFactory(), nil) + h.AssertNil(t, err) + mockBuildpackDownloader.EXPECT().Download(gomock.Any(), directoryPath, gomock.Any()).Return(extension, nil, nil) + + err = subject.CreateBuilder(context.TODO(), opts) + h.AssertNil(t, err) + }) + when("package file", func() { it.Before(func() { fileURI := func(path string) (original, uri string) { @@ -673,7 +1157,7 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { buildpack, _, err := buildpack.BuildpacksFromOCILayoutBlob(blob.NewBlob(cnbFile)) h.AssertNil(t, err) mockBuildpackDownloader.EXPECT().Download(gomock.Any(), cnbFile, gomock.Any()).Return(buildpack, nil, nil).AnyTimes() - opts.Config.Buildpacks = []pubbldr.BuildpackConfig{{ + opts.Config.Buildpacks = []pubbldr.ModuleConfig{{ ImageOrURI: dist.ImageOrURI{BuildpackURI: dist.BuildpackURI{URI: cnbFile}}, }} }) @@ -683,17 +1167,17 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { prepareFetcherWithRunImages() bldr := successfullyCreateBuilder() - bpInfo := dist.BuildpackInfo{ + bpInfo := dist.ModuleInfo{ ID: "bp.one", Version: "1.2.3", Homepage: "http://one.buildpack", } - h.AssertEq(t, bldr.Buildpacks(), []dist.BuildpackInfo{bpInfo}) + h.AssertEq(t, bldr.Buildpacks(), []dist.ModuleInfo{bpInfo}) bpInfo.Homepage = "" h.AssertEq(t, bldr.Order(), dist.Order{{ - Group: []dist.BuildpackRef{{ - BuildpackInfo: bpInfo, - Optional: false, + Group: []dist.ModuleRef{{ + ModuleInfo: bpInfo, + Optional: false, }}, }}) }) @@ -711,7 +1195,7 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { opts.Registry = "some-registry" opts.Config.Buildpacks = append( opts.Config.Buildpacks, - pubbldr.BuildpackConfig{ + pubbldr.ModuleConfig{ ImageOrURI: dist.ImageOrURI{ BuildpackURI: dist.BuildpackURI{ URI: "urn:cnb:registry:example/foo@1.1.0", @@ -726,6 +1210,245 @@ func testCreateBuilder(t *testing.T, when spec.G, it spec.S) { }) }) }) + + when("flatten option is set", func() { + /* 1 + * / \ + * 2 3 + * / \ + * 4 5 + * / \ + * 6 7 + */ + var ( + fakeLayerImage *h.FakeAddedLayerImage + err error + ) + + var successfullyCreateFlattenBuilder = func() { + t.Helper() + + err := subject.CreateBuilder(context.TODO(), opts) + h.AssertNil(t, err) + h.AssertEq(t, fakeLayerImage.IsSaved(), true) + } + + it.Before(func() { + fakeLayerImage = &h.FakeAddedLayerImage{Image: fakeBuildImage} + mockImageFetcher.EXPECT().Fetch(gomock.Any(), "some/build-image", gomock.Any()).Return(fakeLayerImage, nil) + + var depBPs []buildpack.BuildModule + blob1 := blob.NewBlob(filepath.Join("testdata", "buildpack-flatten", "buildpack-1")) + for i := 2; i <= 7; i++ { + b := blob.NewBlob(filepath.Join("testdata", "buildpack-flatten", fmt.Sprintf("buildpack-%d", i))) + bp, err := buildpack.FromBuildpackRootBlob(b, archive.DefaultTarWriterFactory(), nil) + h.AssertNil(t, err) + depBPs = append(depBPs, bp) + } + mockDownloader.EXPECT().Download(gomock.Any(), "https://example.fake/flatten-bp-1.tgz").Return(blob1, nil).AnyTimes() + + bp, err := buildpack.FromBuildpackRootBlob(blob1, archive.DefaultTarWriterFactory(), nil) + h.AssertNil(t, err) + mockBuildpackDownloader.EXPECT().Download(gomock.Any(), "https://example.fake/flatten-bp-1.tgz", gomock.Any()).Return(bp, depBPs, nil).AnyTimes() + + opts = client.CreateBuilderOptions{ + RelativeBaseDir: "/", + BuilderName: "some/builder", + Config: pubbldr.Config{ + Description: "Some description", + Buildpacks: []pubbldr.ModuleConfig{ + { + ModuleInfo: dist.ModuleInfo{ID: "flatten/bp-1", Version: "1", Homepage: "http://buildpack-1"}, + ImageOrURI: dist.ImageOrURI{ + BuildpackURI: dist.BuildpackURI{ + URI: "https://example.fake/flatten-bp-1.tgz", + }, + }, + }, + }, + Order: []dist.OrderEntry{{ + Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ID: "flatten/bp-2", Version: "2"}, Optional: false}, + {ModuleInfo: dist.ModuleInfo{ID: "flatten/bp-4", Version: "4"}, Optional: false}, + {ModuleInfo: dist.ModuleInfo{ID: "flatten/bp-6", Version: "6"}, Optional: false}, + {ModuleInfo: dist.ModuleInfo{ID: "flatten/bp-7", Version: "7"}, Optional: false}, + }}, + }, + Stack: pubbldr.StackConfig{ + ID: "some.stack.id", + }, + Run: pubbldr.RunConfig{ + Images: []pubbldr.RunImageConfig{{ + Image: "some/run-image", + Mirrors: []string{"localhost:5000/some/run-image"}, + }}, + }, + Build: pubbldr.BuildConfig{ + Image: "some/build-image", + }, + Lifecycle: pubbldr.LifecycleConfig{URI: "file:///some-lifecycle"}, + }, + Publish: false, + PullPolicy: image.PullAlways, + } + }) + + when("flatten all", func() { + it("creates 1 layer for all buildpacks", func() { + prepareFetcherWithRunImages() + opts.Flatten, err = buildpack.ParseFlattenBuildModules([]string{"flatten/bp-1@1,flatten/bp-2@2,flatten/bp-4@4,flatten/bp-6@6,flatten/bp-7@7,flatten/bp-3@3,flatten/bp-5@5"}) + h.AssertNil(t, err) + + successfullyCreateFlattenBuilder() + + layers := fakeLayerImage.AddedLayersOrder() + + h.AssertEq(t, len(layers), 1) + }) + }) + + when("only some modules are flattened", func() { + it("creates 1 layer for buildpacks [1,2,3,4,5,6] and 1 layer for buildpack [7]", func() { + prepareFetcherWithRunImages() + opts.Flatten, err = buildpack.ParseFlattenBuildModules([]string{"flatten/bp-1@1,flatten/bp-2@2,flatten/bp-4@4,flatten/bp-6@6,flatten/bp-3@3,flatten/bp-5@5"}) + h.AssertNil(t, err) + + successfullyCreateFlattenBuilder() + + layers := fakeLayerImage.AddedLayersOrder() + h.AssertEq(t, len(layers), 2) + }) + + it("creates 1 layer for buildpacks [1,2,3] and 1 layer for [4,5,6] and 1 layer for [7]", func() { + prepareFetcherWithRunImages() + opts.Flatten, err = buildpack.ParseFlattenBuildModules([]string{"flatten/bp-1@1,flatten/bp-2@2,flatten/bp-3@3", "flatten/bp-4@4,flatten/bp-6@6,flatten/bp-5@5"}) + h.AssertNil(t, err) + + successfullyCreateFlattenBuilder() + + layers := fakeLayerImage.AddedLayersOrder() + h.AssertEq(t, len(layers), 3) + }) + }) + }) + + when("daemon target selection for multi-platform builders", func() { + when("publish is false", func() { + when("daemon is linux/amd64", func() { + it.Before(func() { + mockDockerClient.EXPECT().ServerVersion(gomock.Any(), gomock.Any()).Return(dockerclient.ServerVersionResult{ + Os: "linux", + Arch: "amd64", + }, nil).AnyTimes() + }) + + when("multiple targets are provided", func() { + it("selects the matching OS and architecture", func() { + prepareFetcherWithBuildImage() + prepareFetcherWithRunImages() + + opts.Targets = []dist.Target{ + {OS: "linux", Arch: "arm64"}, + {OS: "linux", Arch: "amd64"}, // should match + {OS: "windows", Arch: "amd64"}, + } + + h.AssertNil(t, subject.CreateBuilder(context.TODO(), opts)) + + // Verify that only one image was created (for the matching target) + h.AssertEq(t, fakeBuildImage.IsSaved(), true) + }) + }) + + when("no exact architecture match exists", func() { + it("returns error", func() { + opts.Targets = []dist.Target{ + {OS: "linux", Arch: "arm64"}, + {OS: "linux", Arch: "arm"}, + } + + err := subject.CreateBuilder(context.TODO(), opts) + h.AssertError(t, err, "could not find a target that matches daemon os=linux and architecture=amd64") + }) + }) + + when("target with empty architecture exists", func() { + it("selects the OS-only match", func() { + prepareFetcherWithBuildImage() + prepareFetcherWithRunImages() + + opts.Targets = []dist.Target{ + {OS: "linux", Arch: "arm64"}, + {OS: "linux", Arch: ""}, // should match + {OS: "windows", Arch: "amd64"}, + } + + h.AssertNil(t, subject.CreateBuilder(context.TODO(), opts)) + + // Verify that the builder was created + h.AssertEq(t, fakeBuildImage.IsSaved(), true) + }) + }) + }) + + when("daemon is linux/arm64", func() { + it.Before(func() { + mockDockerClient.EXPECT().ServerVersion(gomock.Any(), gomock.Any()).Return(dockerclient.ServerVersionResult{ + Os: "linux", + Arch: "arm64", + }, nil).AnyTimes() + }) + + when("targets are ordered with amd64 first", func() { + it("selects arm64 even when amd64 appears first", func() { + prepareFetcherWithBuildImage() + prepareFetcherWithRunImages() + + opts.Targets = []dist.Target{ + {OS: "linux", Arch: "amd64"}, // appears first + {OS: "linux", Arch: "arm64"}, // should match + {OS: "windows", Arch: "arm64"}, + } + + h.AssertNil(t, subject.CreateBuilder(context.TODO(), opts)) + + // Verify that the builder was created + h.AssertEq(t, fakeBuildImage.IsSaved(), true) + }) + }) + + when("only amd64 targets available", func() { + it("returns error", func() { + opts.Targets = []dist.Target{ + {OS: "linux", Arch: "amd64"}, + {OS: "windows", Arch: "amd64"}, + } + + err := subject.CreateBuilder(context.TODO(), opts) + h.AssertError(t, err, "could not find a target that matches daemon os=linux and architecture=arm64") + }) + }) + }) + + when("empty targets list", func() { + it("creates builder without calling daemonTarget", func() { + prepareFetcherWithBuildImage() + prepareFetcherWithRunImages() + + // Empty targets should use the default behavior + opts.Targets = []dist.Target{} + + // ServerVersion should NOT be called for empty targets + mockDockerClient.EXPECT().ServerVersion(gomock.Any(), gomock.Any()).Times(0) + + h.AssertNil(t, subject.CreateBuilder(context.TODO(), opts)) + + // Verify that the builder was created + h.AssertEq(t, fakeBuildImage.IsSaved(), true) + }) + }) + }) + }) }) } diff --git a/pkg/client/docker.go b/pkg/client/docker.go new file mode 100644 index 0000000000..9a6ebf3fe5 --- /dev/null +++ b/pkg/client/docker.go @@ -0,0 +1,32 @@ +package client + +import ( + "context" + "io" + + dockerClient "github.com/moby/moby/client" +) + +// DockerClient is the subset of client.APIClient which required by this package +type DockerClient interface { + ImageHistory(ctx context.Context, image string, opts ...dockerClient.ImageHistoryOption) (dockerClient.ImageHistoryResult, error) + ImageInspect(ctx context.Context, image string, opts ...dockerClient.ImageInspectOption) (dockerClient.ImageInspectResult, error) + ImageTag(ctx context.Context, options dockerClient.ImageTagOptions) (dockerClient.ImageTagResult, error) + ImageLoad(ctx context.Context, input io.Reader, opts ...dockerClient.ImageLoadOption) (dockerClient.ImageLoadResult, error) + ImageSave(ctx context.Context, images []string, opts ...dockerClient.ImageSaveOption) (dockerClient.ImageSaveResult, error) + ImageRemove(ctx context.Context, image string, options dockerClient.ImageRemoveOptions) (dockerClient.ImageRemoveResult, error) + ImagePull(ctx context.Context, ref string, options dockerClient.ImagePullOptions) (dockerClient.ImagePullResponse, error) + Info(ctx context.Context, options dockerClient.InfoOptions) (dockerClient.SystemInfoResult, error) + ServerVersion(ctx context.Context, options dockerClient.ServerVersionOptions) (dockerClient.ServerVersionResult, error) + VolumeRemove(ctx context.Context, volumeID string, options dockerClient.VolumeRemoveOptions) (dockerClient.VolumeRemoveResult, error) + ContainerCreate(ctx context.Context, options dockerClient.ContainerCreateOptions) (dockerClient.ContainerCreateResult, error) + CopyFromContainer(ctx context.Context, containerID string, options dockerClient.CopyFromContainerOptions) (dockerClient.CopyFromContainerResult, error) + ContainerInspect(ctx context.Context, containerID string, options dockerClient.ContainerInspectOptions) (dockerClient.ContainerInspectResult, error) + ContainerRemove(ctx context.Context, container string, options dockerClient.ContainerRemoveOptions) (dockerClient.ContainerRemoveResult, error) + CopyToContainer(ctx context.Context, container string, options dockerClient.CopyToContainerOptions) (dockerClient.CopyToContainerResult, error) + ContainerWait(ctx context.Context, containerID string, options dockerClient.ContainerWaitOptions) dockerClient.ContainerWaitResult + ContainerAttach(ctx context.Context, container string, options dockerClient.ContainerAttachOptions) (dockerClient.ContainerAttachResult, error) + ContainerStart(ctx context.Context, container string, options dockerClient.ContainerStartOptions) (dockerClient.ContainerStartResult, error) + NetworkCreate(ctx context.Context, name string, options dockerClient.NetworkCreateOptions) (dockerClient.NetworkCreateResult, error) + NetworkRemove(ctx context.Context, networkID string, options dockerClient.NetworkRemoveOptions) (dockerClient.NetworkRemoveResult, error) +} diff --git a/pkg/client/docker_context.go b/pkg/client/docker_context.go new file mode 100644 index 0000000000..26f40f6dc3 --- /dev/null +++ b/pkg/client/docker_context.go @@ -0,0 +1,146 @@ +package client + +import ( + "encoding/json" + "fmt" + "io" + "os" + "path/filepath" + + "github.com/pkg/errors" + + "github.com/opencontainers/go-digest" + + "github.com/buildpacks/pack/pkg/logging" +) + +const ( + dockerHostEnvVar = "DOCKER_HOST" + dockerConfigEnvVar = "DOCKER_CONFIG" + defaultDockerRootConfigDir = ".docker" + defaultDockerConfigFileName = "config.json" + + dockerContextDirName = "contexts" + dockerContextMetaDirName = "meta" + dockerContextMetaFileName = "meta.json" + dockerContextEndpoint = "docker" + defaultDockerContext = "default" +) + +type configFile struct { + CurrentContext string `json:"currentContext,omitempty"` +} + +type endpoint struct { + Host string `json:",omitempty"` +} + +/* + Example Docker context file + { + "Name": "desktop-linux", + "dockerConfigMetadata": { + "Description": "Docker Desktop" + }, + "Endpoints": { + "docker": { + "Host": "unix:///Users/jbustamante/.docker/run/docker.sock", + "SkipTLSVerify": false + } + } + } +*/ +type dockerConfigMetadata struct { + Name string `json:",omitempty"` + Endpoints map[string]endpoint `json:"endpoints,omitempty"` +} + +func ProcessDockerContext(logger logging.Logger) error { + dockerHost := os.Getenv(dockerHostEnvVar) + if dockerHost == "" { + dockerConfigDir, err := configDir() + if err != nil { + return err + } + + logger.Debugf("looking for docker configuration file at: %s", dockerConfigDir) + configuration, err := readConfigFile(dockerConfigDir) + if err != nil { + return errors.Wrapf(err, "reading configuration file at '%s'", dockerConfigDir) + } + + if skip(configuration) { + logger.Debug("docker context is default or empty, skipping it") + return nil + } + + configMetaData, err := readConfigMetadata(dockerConfigDir, configuration.CurrentContext) + if err != nil { + return errors.Wrapf(err, "reading metadata for current context '%s' at '%s'", configuration.CurrentContext, dockerConfigDir) + } + + if dockerEndpoint, ok := configMetaData.Endpoints[dockerContextEndpoint]; ok { + os.Setenv(dockerHostEnvVar, dockerEndpoint.Host) + logger.Debugf("using docker context '%s' with endpoint = '%s'", configuration.CurrentContext, dockerEndpoint.Host) + } else { + logger.Warnf("docker endpoint doesn't exist for context '%s'", configuration.CurrentContext) + } + } else { + logger.Debugf("'%s=%s' environment variable is being used", dockerHostEnvVar, dockerHost) + } + return nil +} + +func configDir() (string, error) { + dir := os.Getenv(dockerConfigEnvVar) + if dir == "" { + home, err := os.UserHomeDir() + if err != nil { + return "", errors.Wrap(err, "determining user home directory") + } + dir = filepath.Join(home, defaultDockerRootConfigDir) + } + return dir, nil +} + +func readConfigFile(configDir string) (*configFile, error) { + filename := filepath.Join(configDir, defaultDockerConfigFileName) + config := &configFile{} + file, err := os.Open(filename) + if err != nil { + if os.IsNotExist(err) { + return &configFile{}, nil + } + return &configFile{}, err + } + defer file.Close() + if err := json.NewDecoder(file).Decode(config); err != nil && !errors.Is(err, io.EOF) { + return &configFile{}, err + } + return config, nil +} + +func readConfigMetadata(configDir string, context string) (dockerConfigMetadata, error) { + dockerContextDir := filepath.Join(configDir, dockerContextDirName) + metaFileName := filepath.Join(dockerContextDir, dockerContextMetaDirName, digest.FromString(context).Encoded(), dockerContextMetaFileName) + bytes, err := os.ReadFile(metaFileName) + if err != nil { + if errors.Is(err, os.ErrNotExist) { + return dockerConfigMetadata{}, fmt.Errorf("docker context '%s' not found", context) + } + return dockerConfigMetadata{}, err + } + var meta dockerConfigMetadata + if err := json.Unmarshal(bytes, &meta); err != nil { + return dockerConfigMetadata{}, fmt.Errorf("parsing %s: %v", metaFileName, err) + } + if meta.Name != context { + return dockerConfigMetadata{}, fmt.Errorf("context '%s' doesn't match metadata name '%s' at '%s'", context, meta.Name, metaFileName) + } + + return meta, nil +} + +func skip(configuration *configFile) bool { + return configuration == nil || configuration.CurrentContext == defaultDockerContext || configuration.CurrentContext == "" +} diff --git a/pkg/client/docker_context_test.go b/pkg/client/docker_context_test.go new file mode 100644 index 0000000000..3dc8416ea4 --- /dev/null +++ b/pkg/client/docker_context_test.go @@ -0,0 +1,202 @@ +package client_test + +import ( + "bytes" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/heroku/color" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + "github.com/buildpacks/pack/pkg/client" + "github.com/buildpacks/pack/pkg/logging" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestProcessDockerContext(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "processDockerContext", testProcessDockerContext, spec.Report(report.Terminal{})) +} + +const ( + rootFolder = "docker-context" + happyCase = "happy-cases" + errorCase = "error-cases" +) + +func testProcessDockerContext(t *testing.T, when spec.G, it spec.S) { + var ( + outBuf bytes.Buffer + logger logging.Logger + ) + + it.Before(func() { + logger = logging.NewLogWithWriters(&outBuf, &outBuf, logging.WithVerbose()) + }) + + when("env DOCKER_HOST is set", func() { + it.Before(func() { + os.Setenv("DOCKER_HOST", "some-value") + }) + + it("docker context process is skipped", func() { + err := client.ProcessDockerContext(logger) + h.AssertNil(t, err) + h.AssertContains(t, strings.TrimSpace(outBuf.String()), "'DOCKER_HOST=some-value' environment variable is being used") + }) + }) + + when("env DOCKER_HOST is empty", func() { + it.Before(func() { + os.Setenv("DOCKER_HOST", "") + }) + + when("config.json has currentContext", func() { + when("currentContext is default", func() { + it.Before(func() { + setDockerConfig(t, happyCase, "default-context") + }) + + it("docker context process is skip", func() { + err := client.ProcessDockerContext(logger) + h.AssertNil(t, err) + h.AssertContains(t, strings.TrimSpace(outBuf.String()), "docker context is default or empty, skipping it") + }) + }) + + when("currentContext is default but config doesn't exist", func() { + it.Before(func() { + setDockerConfig(t, errorCase, "empty-context") + }) + + it("throw an error", func() { + err := client.ProcessDockerContext(logger) + h.AssertNotNil(t, err) + h.AssertError(t, err, "docker context 'some-bad-context' not found") + }) + }) + + when("currentContext is not default", func() { + when("metadata has one endpoint", func() { + it.Before(func() { + setDockerConfig(t, happyCase, "custom-context") + }) + + it("docker endpoint host is being used", func() { + err := client.ProcessDockerContext(logger) + h.AssertNil(t, err) + h.AssertContains(t, outBuf.String(), "using docker context 'desktop-linux' with endpoint = 'unix:///Users/user/.docker/run/docker.sock'") + }) + }) + + when("metadata has more than one endpoint", func() { + it.Before(func() { + setDockerConfig(t, happyCase, "two-endpoints-context") + }) + + it("docker endpoint host is being used", func() { + err := client.ProcessDockerContext(logger) + h.AssertNil(t, err) + h.AssertContains(t, outBuf.String(), "using docker context 'desktop-linux' with endpoint = 'unix:///Users/user/.docker/run/docker.sock'") + }) + }) + + when("currentContext doesn't match metadata name", func() { + it.Before(func() { + setDockerConfig(t, errorCase, "current-context-does-not-match") + }) + + it("throw an error", func() { + err := client.ProcessDockerContext(logger) + h.AssertNotNil(t, err) + h.AssertError(t, err, "context 'desktop-linux' doesn't match metadata name 'bad-name'") + }) + }) + + when("metadata doesn't contain a docker endpoint", func() { + it.Before(func() { + setDockerConfig(t, errorCase, "docker-endpoint-does-not-exist") + }) + + it("writes a warn message into the log", func() { + err := client.ProcessDockerContext(logger) + h.AssertNil(t, err) + h.AssertContains(t, outBuf.String(), "docker endpoint doesn't exist for context 'desktop-linux'") + }) + }) + + when("metadata is invalid", func() { + it.Before(func() { + setDockerConfig(t, errorCase, "invalid-metadata") + }) + + it("throw an error", func() { + err := client.ProcessDockerContext(logger) + h.AssertNotNil(t, err) + h.AssertError(t, err, "reading metadata for current context 'desktop-linux'") + }) + }) + }) + }) + + when("config.json is invalid", func() { + it.Before(func() { + setDockerConfig(t, errorCase, "invalid-config") + }) + + it("throw an error", func() { + err := client.ProcessDockerContext(logger) + h.AssertNotNil(t, err) + h.AssertError(t, err, "reading configuration file") + }) + }) + + when("config.json doesn't have current context", func() { + it.Before(func() { + setDockerConfig(t, happyCase, "current-context-not-defined") + }) + + it("docker context process is skip", func() { + err := client.ProcessDockerContext(logger) + h.AssertNil(t, err) + h.AssertContains(t, strings.TrimSpace(outBuf.String()), "docker context is default or empty, skipping it") + }) + }) + + when("docker config folder doesn't exists", func() { + it.Before(func() { + setDockerConfig(t, errorCase, "no-docker-folder") + }) + + it("docker context process is skip", func() { + err := client.ProcessDockerContext(logger) + h.AssertNil(t, err) + h.AssertContains(t, strings.TrimSpace(outBuf.String()), "docker context is default or empty, skipping it") + }) + }) + + when("config.json config doesn't exists", func() { + it.Before(func() { + setDockerConfig(t, errorCase, "config-does-not-exist") + }) + + it("docker context process is skip", func() { + err := client.ProcessDockerContext(logger) + h.AssertNil(t, err) + h.AssertContains(t, strings.TrimSpace(outBuf.String()), "docker context is default or empty, skipping it") + }) + }) + }) +} + +func setDockerConfig(t *testing.T, test, context string) { + t.Helper() + contextDir, err := filepath.Abs(filepath.Join("testdata", rootFolder, test, context)) + h.AssertNil(t, err) + err = os.Setenv("DOCKER_CONFIG", contextDir) + h.AssertNil(t, err) +} diff --git a/pkg/client/download_sbom.go b/pkg/client/download_sbom.go index 26a4a8e09a..0d54e162a2 100644 --- a/pkg/client/download_sbom.go +++ b/pkg/client/download_sbom.go @@ -5,6 +5,7 @@ import ( "github.com/buildpacks/lifecycle/layers" "github.com/buildpacks/lifecycle/platform" + "github.com/buildpacks/lifecycle/platform/files" "github.com/pkg/errors" "github.com/buildpacks/pack/pkg/dist" @@ -18,7 +19,7 @@ type DownloadSBOMOptions struct { // Deserialize just the subset of fields we need to avoid breaking changes type sbomMetadata struct { - BOM *platform.LayerMetadata `json:"sbom" toml:"sbom"` + BOM *files.LayerMetadata `json:"sbom" toml:"sbom"` } func (s *sbomMetadata) isMissing() bool { @@ -39,13 +40,14 @@ func (c *Client) DownloadSBOM(name string, options DownloadSBOMOptions) error { img, err := c.imageFetcher.Fetch(context.Background(), name, image.FetchOptions{Daemon: options.Daemon, PullPolicy: image.PullNever}) if err != nil { if errors.Cause(err) == image.ErrNotFound { + c.logger.Warnf("if the image is saved on a registry run with the flag '--remote', for example: 'pack sbom download --remote %s'", name) return errors.Wrapf(image.ErrNotFound, "image '%s' cannot be found", name) } return err } var sbomMD sbomMetadata - if _, err := dist.GetLabel(img, platform.LayerMetadataLabel, &sbomMD); err != nil { + if _, err := dist.GetLabel(img, platform.LifecycleMetadataLabel, &sbomMD); err != nil { return err } diff --git a/pkg/client/download_sbom_test.go b/pkg/client/download_sbom_test.go index f1978423d1..3fd60efdf2 100644 --- a/pkg/client/download_sbom_test.go +++ b/pkg/client/download_sbom_test.go @@ -6,7 +6,6 @@ import ( "encoding/hex" "errors" "fmt" - "io/ioutil" "os" "path/filepath" "testing" @@ -34,7 +33,7 @@ func testDownloadSBOM(t *testing.T, when spec.G, it spec.S) { var ( subject *Client mockImageFetcher *testmocks.MockImageFetcher - mockDockerClient *testmocks.MockCommonAPIClient + mockDockerClient *testmocks.MockAPIClient mockController *gomock.Controller out bytes.Buffer ) @@ -42,7 +41,7 @@ func testDownloadSBOM(t *testing.T, when spec.G, it spec.S) { it.Before(func() { mockController = gomock.NewController(t) mockImageFetcher = testmocks.NewMockImageFetcher(mockController) - mockDockerClient = testmocks.NewMockCommonAPIClient(mockController) + mockDockerClient = testmocks.NewMockAPIClient(mockController) var err error subject, err = NewClient(WithLogger(logging.NewLogWithWriters(&out, &out)), WithFetcher(mockImageFetcher), WithDockerClient(mockDockerClient)) @@ -62,17 +61,17 @@ func testDownloadSBOM(t *testing.T, when spec.G, it spec.S) { it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "pack.download.sbom.test.") + tmpDir, err = os.MkdirTemp("", "pack.download.sbom.test.") h.AssertNil(t, err) - f, err := ioutil.TempFile("", "pack.download.sbom.test.") + f, err := os.CreateTemp("", "pack.download.sbom.test.") h.AssertNil(t, err) tmpFile = f.Name() err = archive.CreateSingleFileTar(tmpFile, "sbom", "some-sbom-content") h.AssertNil(t, err) - data, err := ioutil.ReadFile(tmpFile) + data, err := os.ReadFile(tmpFile) h.AssertNil(t, err) hsh := sha256.New() @@ -102,7 +101,7 @@ func testDownloadSBOM(t *testing.T, when spec.G, it spec.S) { err := subject.DownloadSBOM("some/image", DownloadSBOMOptions{Daemon: true, DestinationDir: tmpDir}) h.AssertNil(t, err) - contents, err := ioutil.ReadFile(filepath.Join(tmpDir, "sbom")) + contents, err := os.ReadFile(filepath.Join(tmpDir, "sbom")) h.AssertNil(t, err) h.AssertEq(t, string(contents), "some-sbom-content") @@ -114,7 +113,11 @@ func testDownloadSBOM(t *testing.T, when spec.G, it spec.S) { mockImageFetcher.EXPECT().Fetch(gomock.Any(), "some/non-existent-image", image.FetchOptions{Daemon: true, PullPolicy: image.PullNever}).Return(nil, image.ErrNotFound) err := subject.DownloadSBOM("some/non-existent-image", DownloadSBOMOptions{Daemon: true, DestinationDir: ""}) - h.AssertError(t, err, "image 'some/non-existent-image' cannot be found") + expectedError := fmt.Sprintf("image '%s' cannot be found", "some/non-existent-image") + h.AssertError(t, err, expectedError) + + expectedMessage := fmt.Sprintf("Warning: if the image is saved on a registry run with the flag '--remote', for example: 'pack sbom download --remote %s'", "some/non-existent-image") + h.AssertContains(t, out.String(), expectedMessage) }) }) diff --git a/pkg/client/example_build_test.go b/pkg/client/example_build_test.go index 4f01f16263..839df8ba59 100644 --- a/pkg/client/example_build_test.go +++ b/pkg/client/example_build_test.go @@ -1,5 +1,4 @@ //go:build !windows && example -// +build !windows,example package client_test @@ -31,7 +30,7 @@ func Example_build() { // initialize our options buildOpts := client.BuildOptions{ Image: "pack-lib-test-image:0.0.1", - Builder: "cnbs/sample-builder:bionic", + Builder: "cnbs/sample-builder:noble", AppPath: appPath, TrustBuilder: func(string) bool { return true }, } diff --git a/pkg/client/example_buildpack_downloader_test.go b/pkg/client/example_buildpack_downloader_test.go index b9ac7083b1..c4e24f7787 100644 --- a/pkg/client/example_buildpack_downloader_test.go +++ b/pkg/client/example_buildpack_downloader_test.go @@ -1,5 +1,4 @@ //go:build !windows && example -// +build !windows,example package client_test @@ -48,7 +47,7 @@ var _ client.BuildpackDownloader = (*bpDownloader)(nil) type bpDownloader struct{} -func (f *bpDownloader) Download(ctx context.Context, buildpackURI string, opts buildpack.DownloadOptions) (buildpack.Buildpack, []buildpack.Buildpack, error) { +func (f *bpDownloader) Download(ctx context.Context, buildpackURI string, opts buildpack.DownloadOptions) (buildpack.BuildModule, []buildpack.BuildModule, error) { fmt.Println("custom buildpack downloader called") return nil, nil, errors.New("not implemented") } diff --git a/pkg/client/example_fetcher_test.go b/pkg/client/example_fetcher_test.go index e8cf56ce65..11abb6e897 100644 --- a/pkg/client/example_fetcher_test.go +++ b/pkg/client/example_fetcher_test.go @@ -1,5 +1,4 @@ //go:build !windows && example -// +build !windows,example package client_test @@ -53,3 +52,12 @@ func (f *fetcher) Fetch(_ context.Context, imageName string, _ image.FetchOption fmt.Println("custom fetcher called") return nil, errors.New("not implemented") } + +func (f *fetcher) FetchForPlatform(_ context.Context, imageName string, _ image.FetchOptions) (imgutil.Image, error) { + fmt.Println("custom fetcher called") + return nil, errors.New("not implemented") +} + +func (f *fetcher) CheckReadAccess(_ string, _ image.FetchOptions) bool { + return true +} diff --git a/pkg/client/input_image_reference.go b/pkg/client/input_image_reference.go new file mode 100644 index 0000000000..d276a4bc85 --- /dev/null +++ b/pkg/client/input_image_reference.go @@ -0,0 +1,101 @@ +package client + +import ( + "os" + "path/filepath" + "runtime" + "strings" + + "github.com/pkg/errors" +) + +type InputImageReference interface { + Name() string + Layout() bool + FullName() (string, error) +} + +type defaultInputImageReference struct { + name string +} + +type layoutInputImageReference struct { + name string +} + +func ParseInputImageReference(input string) InputImageReference { + if strings.HasPrefix(input, "oci:") { + imageNameParsed := strings.SplitN(input, ":", 2) + return &layoutInputImageReference{ + name: imageNameParsed[1], + } + } + return &defaultInputImageReference{ + name: input, + } +} + +func (d *defaultInputImageReference) Name() string { + return d.name +} + +func (d *defaultInputImageReference) Layout() bool { + return false +} + +func (d *defaultInputImageReference) FullName() (string, error) { + return d.name, nil +} + +func (l *layoutInputImageReference) Name() string { + return filepath.Base(l.name) +} + +func (l *layoutInputImageReference) Layout() bool { + return true +} + +func (l *layoutInputImageReference) FullName() (string, error) { + var ( + fullImagePath string + err error + ) + + path := parsePath(l.name) + + if fullImagePath, err = filepath.EvalSymlinks(path); err != nil { + if !os.IsNotExist(err) { + return "", errors.Wrap(err, "evaluate symlink") + } else { + fullImagePath = path + } + } + + if fullImagePath, err = filepath.Abs(fullImagePath); err != nil { + return "", errors.Wrap(err, "resolve absolute path") + } + + return fullImagePath, nil +} + +func parsePath(path string) string { + var result string + if filepath.IsAbs(path) && runtime.GOOS == "windows" { + dir, fileWithTag := filepath.Split(path) + file := removeTag(fileWithTag) + result = filepath.Join(dir, file) + } else { + result = removeTag(path) + } + return result +} + +func removeTag(path string) string { + result := path + if strings.Contains(path, ":") { + split := strings.SplitN(path, ":", 2) + // do not include the tag in the path + result = split[0] + } + return result +} diff --git a/pkg/client/input_image_reference_test.go b/pkg/client/input_image_reference_test.go new file mode 100644 index 0000000000..0b0857cff0 --- /dev/null +++ b/pkg/client/input_image_reference_test.go @@ -0,0 +1,91 @@ +package client + +import ( + "fmt" + "os" + "path/filepath" + "testing" + + "github.com/heroku/color" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + h "github.com/buildpacks/pack/testhelpers" +) + +func TestInputImageReference(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "InputImageReference", testInputImageReference, spec.Parallel(), spec.Report(report.Terminal{})) +} + +func testInputImageReference(t *testing.T, when spec.G, it spec.S) { + var defaultImageReference, layoutImageReference InputImageReference + + it.Before(func() { + defaultImageReference = ParseInputImageReference("busybox") + layoutImageReference = ParseInputImageReference("oci:my-app") + }) + + when("#ParseInputImageReference", func() { + when("oci layout image reference is not provided", func() { + it("default implementation is returned", func() { + h.AssertEq(t, defaultImageReference.Layout(), false) + h.AssertEq(t, defaultImageReference.Name(), "busybox") + + fullName, err := defaultImageReference.FullName() + h.AssertNil(t, err) + h.AssertEq(t, fullName, "busybox") + }) + }) + + when("oci layout image reference is provided", func() { + it("layout implementation is returned", func() { + h.AssertTrue(t, layoutImageReference.Layout()) + h.AssertEq(t, layoutImageReference.Name(), "my-app") + }) + }) + }) + + when("#FullName", func() { + when("oci layout image reference is provided", func() { + when("not absolute path provided", func() { + it("it will be joined with the current working directory", func() { + fullPath, err := layoutImageReference.FullName() + h.AssertNil(t, err) + + currentWorkingDir, err := os.Getwd() + h.AssertNil(t, err) + + expectedPath := filepath.Join(currentWorkingDir, layoutImageReference.Name()) + h.AssertEq(t, fullPath, expectedPath) + }) + }) + + when("absolute path provided", func() { + var ( + fullPath, expectedFullPath, tmpDir string + err error + ) + + it.Before(func() { + tmpDir, err = os.MkdirTemp("", "pack.input.image.reference.test") + h.AssertNil(t, err) + expectedFullPath = filepath.Join(tmpDir, "my-app") + layoutImageReference = ParseInputImageReference(fmt.Sprintf("oci:%s", expectedFullPath)) + }) + + it.After(func() { + err = os.RemoveAll(tmpDir) + h.AssertNil(t, err) + }) + + it("it must returned the path provided", func() { + fullPath, err = layoutImageReference.FullName() + h.AssertNil(t, err) + h.AssertEq(t, fullPath, expectedFullPath) + }) + }) + }) + }) +} diff --git a/pkg/client/inspect_builder.go b/pkg/client/inspect_builder.go index 8a22249f3d..75eb542ec0 100644 --- a/pkg/client/inspect_builder.go +++ b/pkg/client/inspect_builder.go @@ -22,14 +22,10 @@ type BuilderInfo struct { Mixins []string // RunImage provided by the builder. - RunImage string - - // List of all run image mirrors a builder will use to provide - // the RunImage. - RunImageMirrors []string + RunImages []pubbldr.RunImageConfig // All buildpacks included within the builder. - Buildpacks []dist.BuildpackInfo + Buildpacks []dist.ModuleInfo // Detailed ordering of buildpacks and nested buildpacks where depth is specified. Order pubbldr.DetectionOrder @@ -37,7 +33,7 @@ type BuilderInfo struct { // Listing of all buildpack layers in a builder. // All elements in the Buildpacks variable are represented in this // object. - BuildpackLayers dist.BuildpackLayers + BuildpackLayers dist.ModuleLayers // Lifecycle provides the following API versioning information for a builder: // - Lifecycle Version used in this builder, @@ -48,6 +44,12 @@ type BuilderInfo struct { // Name and Version information from tooling used // to produce this builder. CreatedBy builder.CreatorMetadata + + // All extensions included within the builder. + Extensions []dist.ModuleInfo + + // Detailed ordering of extensions. + OrderExtensions pubbldr.DetectionOrder } // BuildpackInfoKey contains all information needed to determine buildpack equivalence. @@ -95,12 +97,13 @@ func (c *Client) InspectBuilder(name string, daemon bool, modifiers ...BuilderIn Description: info.Description, Stack: info.StackID, Mixins: info.Mixins, - RunImage: info.RunImage, - RunImageMirrors: info.RunImageMirrors, + RunImages: info.RunImages, Buildpacks: info.Buildpacks, Order: info.Order, BuildpackLayers: info.BuildpackLayers, Lifecycle: info.Lifecycle, CreatedBy: info.CreatedBy, + Extensions: info.Extensions, + OrderExtensions: info.OrderExtensions, }, nil } diff --git a/pkg/client/inspect_builder_test.go b/pkg/client/inspect_builder_test.go index abb2629827..60a155aac8 100644 --- a/pkg/client/inspect_builder_test.go +++ b/pkg/client/inspect_builder_test.go @@ -140,7 +140,15 @@ func testInspectBuilder(t *testing.T, when spec.G, it spec.S) { "buildpack": {"deprecated": ["0.1"], "supported": ["1.2", "1.3"]}, "platform": {"deprecated": [], "supported": ["2.3", "2.4"]} }}, - "createdBy": {"name": "pack", "version": "1.2.3"} + "createdBy": {"name": "pack", "version": "1.2.3"}, + "images": [ + { + "image": "some/run-image", + "mirrors": [ + "gcr.io/some/default" + ] + } + ] }`)) assert.Succeeds(builderImage.SetLabel( @@ -221,12 +229,11 @@ func testInspectBuilder(t *testing.T, when spec.G, it spec.S) { assert.Nil(err) want := BuilderInfo{ - Description: "Some description", - Stack: "test.stack.id", - Mixins: []string{"mixinOne", "mixinThree", "build:mixinTwo", "build:mixinFour"}, - RunImage: "some/run-image", - RunImageMirrors: []string{"gcr.io/some/default"}, - Buildpacks: []dist.BuildpackInfo{ + Description: "Some description", + Stack: "test.stack.id", + Mixins: []string{"mixinOne", "mixinThree", "build:mixinTwo", "build:mixinFour"}, + RunImages: []pubbldr.RunImageConfig{{Image: "some/run-image", Mirrors: []string{"gcr.io/some/default"}}}, + Buildpacks: []dist.ModuleInfo{ { ID: "test.bp.one", Version: "test.bp.one.version", @@ -247,36 +254,36 @@ func testInspectBuilder(t *testing.T, when spec.G, it spec.S) { { GroupDetectionOrder: pubbldr.DetectionOrder{ { - BuildpackRef: dist.BuildpackRef{ - BuildpackInfo: dist.BuildpackInfo{ID: "test.nested", Version: "test.nested.version"}, - Optional: false, + ModuleRef: dist.ModuleRef{ + ModuleInfo: dist.ModuleInfo{ID: "test.nested", Version: "test.nested.version"}, + Optional: false, }, }, { - BuildpackRef: dist.BuildpackRef{ - BuildpackInfo: dist.BuildpackInfo{ID: "test.bp.two"}, - Optional: true, + ModuleRef: dist.ModuleRef{ + ModuleInfo: dist.ModuleInfo{ID: "test.bp.two"}, + Optional: true, }, }, }, }, }, - BuildpackLayers: map[string]map[string]dist.BuildpackLayerInfo{ + BuildpackLayers: map[string]map[string]dist.ModuleLayerInfo{ "test.nested": { "test.nested.version": { API: apiVersion, Order: dist.Order{ { - Group: []dist.BuildpackRef{ + Group: []dist.ModuleRef{ { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "test.bp.one", Version: "test.bp.one.version", }, Optional: false, }, { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "test.bp.two", Version: "test.bp.two.version", }, @@ -357,22 +364,22 @@ func testInspectBuilder(t *testing.T, when spec.G, it spec.S) { { GroupDetectionOrder: pubbldr.DetectionOrder{ { - BuildpackRef: dist.BuildpackRef{ - BuildpackInfo: dist.BuildpackInfo{ID: "test.nested", Version: "test.nested.version"}, - Optional: false, + ModuleRef: dist.ModuleRef{ + ModuleInfo: dist.ModuleInfo{ID: "test.nested", Version: "test.nested.version"}, + Optional: false, }, GroupDetectionOrder: pubbldr.DetectionOrder{ { - BuildpackRef: dist.BuildpackRef{ - BuildpackInfo: dist.BuildpackInfo{ + ModuleRef: dist.ModuleRef{ + ModuleInfo: dist.ModuleInfo{ ID: "test.bp.one", Version: "test.bp.one.version", }, }, }, { - BuildpackRef: dist.BuildpackRef{ - BuildpackInfo: dist.BuildpackInfo{ + ModuleRef: dist.ModuleRef{ + ModuleInfo: dist.ModuleInfo{ ID: "test.bp.two", Version: "test.bp.two.version", }, @@ -381,9 +388,9 @@ func testInspectBuilder(t *testing.T, when spec.G, it spec.S) { }, }, { - BuildpackRef: dist.BuildpackRef{ - BuildpackInfo: dist.BuildpackInfo{ID: "test.bp.two"}, - Optional: true, + ModuleRef: dist.ModuleRef{ + ModuleInfo: dist.ModuleInfo{ID: "test.bp.two"}, + Optional: true, }, }, }, @@ -396,6 +403,8 @@ func testInspectBuilder(t *testing.T, when spec.G, it spec.S) { }) }) }) + + // TODO add test case when builder is flattened }) } }) diff --git a/pkg/client/inspect_buildpack.go b/pkg/client/inspect_buildpack.go index 84f89ce813..defe00b3d5 100644 --- a/pkg/client/inspect_buildpack.go +++ b/pkg/client/inspect_buildpack.go @@ -15,9 +15,9 @@ import ( type BuildpackInfo struct { BuildpackMetadata buildpack.Metadata - Buildpacks []dist.BuildpackInfo + Buildpacks []dist.ModuleInfo Order dist.Order - BuildpackLayers dist.BuildpackLayers + BuildpackLayers dist.ModuleLayers Location buildpack.LocatorType } @@ -36,11 +36,11 @@ func (iw ImgWrapper) Label(name string) (string, error) { } func (c *Client) InspectBuildpack(opts InspectBuildpackOptions) (*BuildpackInfo, error) { - locatorType, err := buildpack.GetLocatorType(opts.BuildpackName, "", []dist.BuildpackInfo{}) + locatorType, err := buildpack.GetLocatorType(opts.BuildpackName, "", []dist.ModuleInfo{}) if err != nil { return nil, err } - var layersMd dist.BuildpackLayers + var layersMd dist.ModuleLayers var buildpackMd buildpack.Metadata switch locatorType { @@ -66,57 +66,57 @@ func (c *Client) InspectBuildpack(opts InspectBuildpackOptions) (*BuildpackInfo, }, nil } -func metadataFromRegistry(client *Client, name, registry string) (buildpackMd buildpack.Metadata, layersMd dist.BuildpackLayers, err error) { +func metadataFromRegistry(client *Client, name, registry string) (buildpackMd buildpack.Metadata, layersMd dist.ModuleLayers, err error) { registryCache, err := getRegistry(client.logger, registry) if err != nil { - return buildpack.Metadata{}, dist.BuildpackLayers{}, fmt.Errorf("invalid registry %s: %q", registry, err) + return buildpack.Metadata{}, dist.ModuleLayers{}, fmt.Errorf("invalid registry %s: %q", registry, err) } registryBp, err := registryCache.LocateBuildpack(name) if err != nil { - return buildpack.Metadata{}, dist.BuildpackLayers{}, fmt.Errorf("unable to find %s in registry: %q", style.Symbol(name), err) + return buildpack.Metadata{}, dist.ModuleLayers{}, fmt.Errorf("unable to find %s in registry: %q", style.Symbol(name), err) } buildpackMd, layersMd, err = metadataFromImage(client, registryBp.Address, false) if err != nil { - return buildpack.Metadata{}, dist.BuildpackLayers{}, fmt.Errorf("error pulling registry specified image: %s", err) + return buildpack.Metadata{}, dist.ModuleLayers{}, fmt.Errorf("error pulling registry specified image: %s", err) } return buildpackMd, layersMd, nil } -func metadataFromArchive(downloader BlobDownloader, path string) (buildpackMd buildpack.Metadata, layersMd dist.BuildpackLayers, err error) { +func metadataFromArchive(downloader BlobDownloader, path string) (buildpackMd buildpack.Metadata, layersMd dist.ModuleLayers, err error) { imgBlob, err := downloader.Download(context.Background(), path) if err != nil { - return buildpack.Metadata{}, dist.BuildpackLayers{}, fmt.Errorf("unable to download archive: %q", err) + return buildpack.Metadata{}, dist.ModuleLayers{}, fmt.Errorf("unable to download archive: %q", err) } config, err := buildpack.ConfigFromOCILayoutBlob(imgBlob) if err != nil { - return buildpack.Metadata{}, dist.BuildpackLayers{}, fmt.Errorf("unable to fetch config from buildpack blob: %q", err) + return buildpack.Metadata{}, dist.ModuleLayers{}, fmt.Errorf("unable to fetch config from buildpack blob: %q", err) } wrapper := ImgWrapper{config} if _, err := dist.GetLabel(wrapper, dist.BuildpackLayersLabel, &layersMd); err != nil { - return buildpack.Metadata{}, dist.BuildpackLayers{}, err + return buildpack.Metadata{}, dist.ModuleLayers{}, err } if _, err := dist.GetLabel(wrapper, buildpack.MetadataLabel, &buildpackMd); err != nil { - return buildpack.Metadata{}, dist.BuildpackLayers{}, err + return buildpack.Metadata{}, dist.ModuleLayers{}, err } return buildpackMd, layersMd, nil } -func metadataFromImage(client *Client, name string, daemon bool) (buildpackMd buildpack.Metadata, layersMd dist.BuildpackLayers, err error) { +func metadataFromImage(client *Client, name string, daemon bool) (buildpackMd buildpack.Metadata, layersMd dist.ModuleLayers, err error) { imageName := buildpack.ParsePackageLocator(name) img, err := client.imageFetcher.Fetch(context.Background(), imageName, image.FetchOptions{Daemon: daemon, PullPolicy: image.PullNever}) if err != nil { - return buildpack.Metadata{}, dist.BuildpackLayers{}, err + return buildpack.Metadata{}, dist.ModuleLayers{}, err } if _, err := dist.GetLabel(img, dist.BuildpackLayersLabel, &layersMd); err != nil { - return buildpack.Metadata{}, dist.BuildpackLayers{}, fmt.Errorf("unable to get image label %s: %q", dist.BuildpackLayersLabel, err) + return buildpack.Metadata{}, dist.ModuleLayers{}, fmt.Errorf("unable to get image label %s: %q", dist.BuildpackLayersLabel, err) } if _, err := dist.GetLabel(img, buildpack.MetadataLabel, &buildpackMd); err != nil { - return buildpack.Metadata{}, dist.BuildpackLayers{}, fmt.Errorf("unable to get image label %s: %q", buildpack.MetadataLabel, err) + return buildpack.Metadata{}, dist.ModuleLayers{}, fmt.Errorf("unable to get image label %s: %q", buildpack.MetadataLabel, err) } return buildpackMd, layersMd, nil } @@ -124,22 +124,22 @@ func metadataFromImage(client *Client, name string, daemon bool) (buildpackMd bu func extractOrder(buildpackMd buildpack.Metadata) dist.Order { return dist.Order{ { - Group: []dist.BuildpackRef{ + Group: []dist.ModuleRef{ { - BuildpackInfo: buildpackMd.BuildpackInfo, + ModuleInfo: buildpackMd.ModuleInfo, }, }, }, } } -func extractBuildpacks(layersMd dist.BuildpackLayers) []dist.BuildpackInfo { - result := []dist.BuildpackInfo{} - buildpackSet := map[*dist.BuildpackInfo]bool{} +func extractBuildpacks(layersMd dist.ModuleLayers) []dist.ModuleInfo { + result := []dist.ModuleInfo{} + buildpackSet := map[*dist.ModuleInfo]bool{} for buildpackID, buildpackMap := range layersMd { for version, layerInfo := range buildpackMap { - bp := dist.BuildpackInfo{ + bp := dist.ModuleInfo{ ID: buildpackID, Name: layerInfo.Name, Version: version, diff --git a/pkg/client/inspect_buildpack_test.go b/pkg/client/inspect_buildpack_test.go index 07e2cafcae..c434e1ac06 100644 --- a/pkg/client/inspect_buildpack_test.go +++ b/pkg/client/inspect_buildpack_test.go @@ -4,9 +4,9 @@ import ( "archive/tar" "bytes" "fmt" - "io/ioutil" "os" "path/filepath" + "runtime" "testing" "github.com/buildpacks/imgutil/fakes" @@ -184,14 +184,14 @@ func testInspectBuildpack(t *testing.T, when spec.G, it spec.S) { apiVersion, err = api.NewVersion("0.2") h.AssertNil(t, err) - tmpDir, err = ioutil.TempDir("", "inspectBuildpack") + tmpDir, err = os.MkdirTemp("", "inspectBuildpack") h.AssertNil(t, err) buildpackPath = filepath.Join(tmpDir, "buildpackTarFile.tar") expectedInfo = &client.BuildpackInfo{ BuildpackMetadata: buildpack.Metadata{ - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/top-buildpack", Version: "0.0.1", Name: "top", @@ -202,7 +202,7 @@ func testInspectBuildpack(t *testing.T, when spec.G, it spec.S) { {ID: "io.buildpacks.stacks.second-stack"}, }, }, - Buildpacks: []dist.BuildpackInfo{ + Buildpacks: []dist.ModuleInfo{ { ID: "some/first-inner-buildpack", Version: "1.0.0", @@ -227,9 +227,9 @@ func testInspectBuildpack(t *testing.T, when spec.G, it spec.S) { }, Order: dist.Order{ { - Group: []dist.BuildpackRef{ + Group: []dist.ModuleRef{ { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/top-buildpack", Version: "0.0.1", Name: "top", @@ -240,7 +240,7 @@ func testInspectBuildpack(t *testing.T, when spec.G, it spec.S) { }, }, }, - BuildpackLayers: dist.BuildpackLayers{ + BuildpackLayers: dist.ModuleLayers{ "some/first-inner-buildpack": { "1.0.0": { API: apiVersion, @@ -250,16 +250,16 @@ func testInspectBuildpack(t *testing.T, when spec.G, it spec.S) { }, Order: dist.Order{ { - Group: []dist.BuildpackRef{ + Group: []dist.ModuleRef{ { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/first-inner-buildpack", Version: "1.0.0", }, Optional: false, }, { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/second-inner-buildpack", Version: "3.0.0", }, @@ -268,9 +268,9 @@ func testInspectBuildpack(t *testing.T, when spec.G, it spec.S) { }, }, { - Group: []dist.BuildpackRef{ + Group: []dist.ModuleRef{ { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/second-inner-buildpack", Version: "3.0.0", }, @@ -308,16 +308,16 @@ func testInspectBuildpack(t *testing.T, when spec.G, it spec.S) { API: apiVersion, Order: dist.Order{ { - Group: []dist.BuildpackRef{ + Group: []dist.ModuleRef{ { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/first-inner-buildpack", Version: "1.0.0", }, Optional: false, }, { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/second-inner-buildpack", Version: "2.0.0", }, @@ -326,9 +326,9 @@ func testInspectBuildpack(t *testing.T, when spec.G, it spec.S) { }, }, { - Group: []dist.BuildpackRef{ + Group: []dist.ModuleRef{ { - BuildpackInfo: dist.BuildpackInfo{ + ModuleInfo: dist.ModuleInfo{ ID: "some/first-inner-buildpack", Version: "1.0.0", }, @@ -348,7 +348,10 @@ func testInspectBuildpack(t *testing.T, when spec.G, it spec.S) { it.After(func() { mockController.Finish() - h.AssertNil(t, os.RemoveAll(tmpDir)) + err := os.RemoveAll(tmpDir) + if runtime.GOOS != "windows" { + h.AssertNil(t, err) + } }) when("inspect-buildpack", func() { @@ -394,7 +397,10 @@ func testInspectBuildpack(t *testing.T, when spec.G, it spec.S) { h.AssertEq(t, info, expectedInfo) }) + + // TODO add test case when buildpack is flattened }) + when("inspecting local buildpack archive", func() { it.Before(func() { expectedInfo.Location = buildpack.URILocator @@ -414,6 +420,8 @@ func testInspectBuildpack(t *testing.T, when spec.G, it spec.S) { h.AssertEq(t, info, expectedInfo) }) + + // TODO add test case when buildpack is flattened }) when("inspecting an image", func() { @@ -493,7 +501,7 @@ func testInspectBuildpack(t *testing.T, when spec.G, it spec.S) { when("archive is not a buildpack", func() { it.Before(func() { invalidBuildpackPath := filepath.Join(tmpDir, "fake-buildpack-path") - h.AssertNil(t, ioutil.WriteFile(invalidBuildpackPath, []byte("not a buildpack"), os.ModePerm)) + h.AssertNil(t, os.WriteFile(invalidBuildpackPath, []byte("not a buildpack"), os.ModePerm)) mockDownloader.EXPECT().Download(gomock.Any(), "https://invalid/buildpack").Return(blob.NewBlob(invalidBuildpackPath), nil) }) diff --git a/pkg/client/inspect_extension.go b/pkg/client/inspect_extension.go new file mode 100644 index 0000000000..77df989cd9 --- /dev/null +++ b/pkg/client/inspect_extension.go @@ -0,0 +1,73 @@ +package client + +import ( + "context" + "fmt" + + "github.com/buildpacks/pack/pkg/buildpack" + "github.com/buildpacks/pack/pkg/dist" + "github.com/buildpacks/pack/pkg/image" +) + +type ExtensionInfo struct { + Extension dist.ModuleInfo + Location buildpack.LocatorType +} + +type InspectExtensionOptions struct { + ExtensionName string + Daemon bool +} + +func (c *Client) InspectExtension(opts InspectExtensionOptions) (*ExtensionInfo, error) { + locatorType, err := buildpack.GetLocatorType(opts.ExtensionName, "", []dist.ModuleInfo{}) + if err != nil { + return nil, err + } + var layerMd dist.ModuleLayers + + layerMd, err = metadataOfExtensionFromImage(c, opts.ExtensionName, opts.Daemon) + + if err != nil { + return nil, err + } + + if len(layerMd) != 1 { + return nil, fmt.Errorf("expected 1 extension, got %d", len(layerMd)) + } + + return &ExtensionInfo{ + Extension: extractExtension(layerMd), + Location: locatorType, + }, nil +} + +func metadataOfExtensionFromImage(client *Client, name string, daemon bool) (layerMd dist.ModuleLayers, err error) { + imageName := buildpack.ParsePackageLocator(name) + img, err := client.imageFetcher.Fetch(context.Background(), imageName, image.FetchOptions{Daemon: daemon, PullPolicy: image.PullNever}) + if err != nil { + return dist.ModuleLayers{}, err + } + + if _, err := dist.GetLabel(img, dist.ExtensionLayersLabel, &layerMd); err != nil { + return dist.ModuleLayers{}, fmt.Errorf("unable to get image label %s: %q", dist.ExtensionLayersLabel, err) + } + + return layerMd, nil +} + +func extractExtension(layerMd dist.ModuleLayers) dist.ModuleInfo { + result := dist.ModuleInfo{} + for extensionID, extensionMap := range layerMd { + for version, layerInfo := range extensionMap { + ex := dist.ModuleInfo{ + ID: extensionID, + Name: layerInfo.Name, + Version: version, + Homepage: layerInfo.Homepage, + } + result = ex + } + } + return result +} diff --git a/pkg/client/inspect_extension_test.go b/pkg/client/inspect_extension_test.go new file mode 100644 index 0000000000..1f83161f8e --- /dev/null +++ b/pkg/client/inspect_extension_test.go @@ -0,0 +1,160 @@ +package client_test + +import ( + "bytes" + "fmt" + "testing" + + "github.com/buildpacks/imgutil/fakes" + "github.com/golang/mock/gomock" + "github.com/heroku/color" + "github.com/pkg/errors" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + "github.com/buildpacks/pack/pkg/buildpack" + "github.com/buildpacks/pack/pkg/client" + "github.com/buildpacks/pack/pkg/dist" + "github.com/buildpacks/pack/pkg/image" + "github.com/buildpacks/pack/pkg/logging" + "github.com/buildpacks/pack/pkg/testmocks" + h "github.com/buildpacks/pack/testhelpers" +) + +const extensionMetadataTag = `{ + "id": "some/top-extension", + "version": "0.0.1", + "name": "top", + "homepage": "top-extension-homepage" +}` + +const extensionLayersTag = `{ + "some/top-extension":{ + "0.0.1":{ + "api":"0.2", + "homepage":"top-extension-homepage", + "name": "top" + } + } +}` + +func TestInspectExtension(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "InspectExtension", testInspectExtension, spec.Sequential(), spec.Report(report.Terminal{})) +} +func testInspectExtension(t *testing.T, when spec.G, it spec.S) { + var ( + subject *client.Client + mockImageFetcher *testmocks.MockImageFetcher + mockController *gomock.Controller + out bytes.Buffer + extensionImage *fakes.Image + expectedInfo *client.ExtensionInfo + ) + + it.Before(func() { + mockController = gomock.NewController(t) + mockImageFetcher = testmocks.NewMockImageFetcher(mockController) + + subject = &client.Client{} + client.WithLogger(logging.NewLogWithWriters(&out, &out))(subject) + client.WithFetcher(mockImageFetcher)(subject) + + extensionImage = fakes.NewImage("some/extension", "", nil) + h.AssertNil(t, extensionImage.SetLabel(dist.ExtensionMetadataLabel, extensionMetadataTag)) + h.AssertNil(t, extensionImage.SetLabel(dist.ExtensionLayersLabel, extensionLayersTag)) + + expectedInfo = &client.ExtensionInfo{ + Extension: dist.ModuleInfo{ + ID: "some/top-extension", + Version: "0.0.1", + Name: "top", + Homepage: "top-extension-homepage", + }, + } + }) + + it.After(func() { + mockController.Finish() + }) + + when("inspect-extension", func() { + when("inspecting an image", func() { + for _, useDaemon := range []bool{true, false} { + useDaemon := useDaemon + when(fmt.Sprintf("daemon is %t", useDaemon), func() { + it.Before(func() { + expectedInfo.Location = buildpack.PackageLocator + if useDaemon { + mockImageFetcher.EXPECT().Fetch(gomock.Any(), "some/extension", image.FetchOptions{Daemon: true, PullPolicy: image.PullNever}).Return(extensionImage, nil) + } else { + mockImageFetcher.EXPECT().Fetch(gomock.Any(), "some/extension", image.FetchOptions{Daemon: false, PullPolicy: image.PullNever}).Return(extensionImage, nil) + } + }) + + it("succeeds", func() { + inspectOptions := client.InspectExtensionOptions{ + ExtensionName: "docker://some/extension", + Daemon: useDaemon, + } + info, err := subject.InspectExtension(inspectOptions) + h.AssertNil(t, err) + + h.AssertEq(t, info, expectedInfo) + }) + }) + } + }) + }) + when("failure cases", func() { + when("invalid extension name", func() { + it.Before(func() { + mockImageFetcher.EXPECT().Fetch(gomock.Any(), "", image.FetchOptions{Daemon: false, PullPolicy: image.PullNever}).Return(nil, errors.Wrapf(image.ErrNotFound, "unable to handle locator")) + }) + it("returns an error", func() { + invalidExtensionName := "" + inspectOptions := client.InspectExtensionOptions{ + ExtensionName: invalidExtensionName, + } + _, err := subject.InspectExtension(inspectOptions) + + h.AssertError(t, err, "unable to handle locator") + h.AssertTrue(t, errors.Is(err, image.ErrNotFound)) + }) + }) + when("extension image", func() { + when("unable to fetch extension image", func() { + it.Before(func() { + mockImageFetcher.EXPECT().Fetch(gomock.Any(), "missing/extension", image.FetchOptions{Daemon: true, PullPolicy: image.PullNever}).Return(nil, errors.Wrapf(image.ErrNotFound, "big bad error")) + }) + it("returns an ErrNotFound error", func() { + inspectOptions := client.InspectExtensionOptions{ + ExtensionName: "docker://missing/extension", + Daemon: true, + } + _, err := subject.InspectExtension(inspectOptions) + h.AssertTrue(t, errors.Is(err, image.ErrNotFound)) + }) + }) + + when("image does not have extension metadata", func() { + it.Before(func() { + fakeImage := fakes.NewImage("empty", "", nil) + h.AssertNil(t, fakeImage.SetLabel(dist.ExtensionLayersLabel, ":::")) + mockImageFetcher.EXPECT().Fetch(gomock.Any(), "missing-metadata/extension", image.FetchOptions{Daemon: true, PullPolicy: image.PullNever}).Return(fakeImage, nil) + }) + it("returns an error", func() { + inspectOptions := client.InspectExtensionOptions{ + ExtensionName: "docker://missing-metadata/extension", + Daemon: true, + } + _, err := subject.InspectExtension(inspectOptions) + + h.AssertError(t, err, fmt.Sprintf("unable to get image label %s", dist.ExtensionLayersLabel)) + h.AssertFalse(t, errors.Is(err, image.ErrNotFound)) + }) + }) + }) + }) +} diff --git a/pkg/client/inspect_image.go b/pkg/client/inspect_image.go index c881dc20a3..84fa044301 100644 --- a/pkg/client/inspect_image.go +++ b/pkg/client/inspect_image.go @@ -8,6 +8,7 @@ import ( "github.com/buildpacks/lifecycle/buildpack" "github.com/buildpacks/lifecycle/launch" "github.com/buildpacks/lifecycle/platform" + "github.com/buildpacks/lifecycle/platform/files" "github.com/pkg/errors" "github.com/buildpacks/pack/pkg/dist" @@ -22,7 +23,11 @@ type ImageInfo struct { // List of buildpacks that passed detection, ran their build // phases and made a contribution to this image. - Buildpacks []buildpack.GroupBuildpack + Buildpacks []buildpack.GroupElement + + // List of extensions that passed detection, ran their generate + // phases and made a contribution to this image. + Extensions []buildpack.GroupElement // Base includes two references to the run image, // - the Run Image ID, @@ -38,7 +43,7 @@ type ImageInfo struct { // the first 1 to k layers all belong to the run image, // the last k+1 to n layers are added by buildpacks. // the sum of all of these is our app image. - Base platform.RunImageMetadata + Base files.RunImageForRebase // BOM or Bill of materials, contains dependency and // version information provided by each buildpack. @@ -46,10 +51,13 @@ type ImageInfo struct { // Stack includes the run image name, and a list of image mirrors, // where the run image is hosted. - Stack platform.StackMetadata + Stack files.Stack // Processes lists all processes contributed by buildpacks. Processes ProcessDetails + + // If the image can be rebased + Rebasable bool } // ProcessDetails is a collection of all start command metadata @@ -64,8 +72,8 @@ type ProcessDetails struct { // Deserialize just the subset of fields we need to avoid breaking changes type layersMetadata struct { - RunImage platform.RunImageMetadata `json:"runImage" toml:"run-image"` - Stack platform.StackMetadata `json:"stack" toml:"stack"` + RunImage files.RunImageForRebase `json:"runImage" toml:"run-image"` + Stack files.Stack `json:"stack" toml:"stack"` } const ( @@ -94,11 +102,11 @@ func (c *Client) InspectImage(name string, daemon bool) (*ImageInfo, error) { } var layersMd layersMetadata - if _, err := dist.GetLabel(img, platform.LayerMetadataLabel, &layersMd); err != nil { + if _, err := dist.GetLabel(img, platform.LifecycleMetadataLabel, &layersMd); err != nil { return nil, err } - var buildMD platform.BuildMetadata + var buildMD files.BuildMetadata if _, err := dist.GetLabel(img, platform.BuildMetadataLabel, &buildMD); err != nil { return nil, err } @@ -115,6 +123,11 @@ func (c *Client) InspectImage(name string, daemon bool) (*ImageInfo, error) { return nil, err } + rebasable, err := getRebasableLabel(img) + if err != nil { + return nil, err + } + platformAPI, err := img.Env(platformAPIEnv) if err != nil { return nil, errors.Wrap(err, "reading platform api") @@ -154,9 +167,17 @@ func (c *Client) InspectImage(name string, daemon bool) (*ImageInfo, error) { } } + workingDir, err := img.WorkingDir() + if err != nil { + return nil, errors.Wrap(err, "reading WorkingDir") + } + var processDetails ProcessDetails for _, proc := range buildMD.Processes { proc := proc + if proc.WorkingDirectory == "" { + proc.WorkingDirectory = workingDir + } if proc.Type == defaultProcessType { processDetails.DefaultProcess = &proc continue @@ -164,12 +185,47 @@ func (c *Client) InspectImage(name string, daemon bool) (*ImageInfo, error) { processDetails.OtherProcesses = append(processDetails.OtherProcesses, proc) } + var stackCompat files.Stack + if layersMd.RunImage.Image != "" { + stackCompat = layersMd.RunImage.ToStack() + } else { + stackCompat = layersMd.Stack + } + + if buildMD.Extensions != nil { + return &ImageInfo{ + StackID: stackID, + Stack: stackCompat, + Base: layersMd.RunImage, + BOM: buildMD.BOM, + Buildpacks: buildMD.Buildpacks, + Extensions: buildMD.Extensions, + Processes: processDetails, + Rebasable: rebasable, + }, nil + } + return &ImageInfo{ StackID: stackID, - Stack: layersMd.Stack, + Stack: stackCompat, Base: layersMd.RunImage, BOM: buildMD.BOM, Buildpacks: buildMD.Buildpacks, Processes: processDetails, + Rebasable: rebasable, }, nil } + +func getRebasableLabel(labeled dist.Labeled) (bool, error) { + var rebasableOutput bool + isPresent, err := dist.GetLabel(labeled, platform.RebasableLabel, &rebasableOutput) + if err != nil { + return false, err + } + + if !isPresent { + rebasableOutput = true + } + + return rebasableOutput, nil +} diff --git a/pkg/client/inspect_image_test.go b/pkg/client/inspect_image_test.go index b6184495ba..c70fdae4c0 100644 --- a/pkg/client/inspect_image_test.go +++ b/pkg/client/inspect_image_test.go @@ -9,8 +9,10 @@ import ( "github.com/buildpacks/imgutil/fakes" "github.com/buildpacks/lifecycle/launch" - "github.com/buildpacks/lifecycle/platform" + "github.com/buildpacks/lifecycle/platform/files" "github.com/golang/mock/gomock" + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" "github.com/heroku/color" "github.com/sclevine/spec" "github.com/sclevine/spec/report" @@ -27,27 +29,38 @@ func TestInspectImage(t *testing.T) { spec.Run(t, "InspectImage", testInspectImage, spec.Parallel(), spec.Report(report.Terminal{})) } +// PlatformAPI should be ignored because it is not set in the metadata label +var ignorePlatformAPI = []cmp.Option{ + cmpopts.IgnoreFields(launch.Process{}, "PlatformAPI"), + cmpopts.IgnoreFields(launch.RawCommand{}, "PlatformAPI"), +} + func testInspectImage(t *testing.T, when spec.G, it spec.S) { var ( - subject *Client - mockImageFetcher *testmocks.MockImageFetcher - mockDockerClient *testmocks.MockCommonAPIClient - mockController *gomock.Controller - mockImage *testmocks.MockImage - out bytes.Buffer + subject *Client + mockImageFetcher *testmocks.MockImageFetcher + mockDockerClient *testmocks.MockAPIClient + mockController *gomock.Controller + mockImage *testmocks.MockImage + mockImageNoRebasable *testmocks.MockImage + mockImageRebasableWithoutLabel *testmocks.MockImage + mockImageWithExtension *testmocks.MockImage + out bytes.Buffer ) it.Before(func() { mockController = gomock.NewController(t) mockImageFetcher = testmocks.NewMockImageFetcher(mockController) - mockDockerClient = testmocks.NewMockCommonAPIClient(mockController) + mockDockerClient = testmocks.NewMockAPIClient(mockController) var err error subject, err = NewClient(WithLogger(logging.NewLogWithWriters(&out, &out)), WithFetcher(mockImageFetcher), WithDockerClient(mockDockerClient)) h.AssertNil(t, err) mockImage = testmocks.NewImage("some/image", "", nil) + h.AssertNil(t, mockImage.SetWorkingDir("/test-workdir")) h.AssertNil(t, mockImage.SetLabel("io.buildpacks.stack.id", "test.stack.id")) + h.AssertNil(t, mockImage.SetLabel("io.buildpacks.rebasable", "true")) h.AssertNil(t, mockImage.SetLabel( "io.buildpacks.lifecycle.metadata", `{ @@ -101,6 +114,195 @@ func testInspectImage(t *testing.T, when spec.G, it spec.S) { "launcher": { "version": "0.5.0" } +}`, + )) + + mockImageNoRebasable = testmocks.NewImage("some/imageNoRebasable", "", nil) + h.AssertNil(t, mockImageNoRebasable.SetWorkingDir("/test-workdir")) + h.AssertNil(t, mockImageNoRebasable.SetLabel("io.buildpacks.stack.id", "test.stack.id")) + h.AssertNil(t, mockImageNoRebasable.SetLabel("io.buildpacks.rebasable", "false")) + h.AssertNil(t, mockImageNoRebasable.SetLabel( + "io.buildpacks.lifecycle.metadata", + `{ + "stack": { + "runImage": { + "image": "some-run-image-no-rebasable", + "mirrors": [ + "some-mirror", + "other-mirror" + ] + } + }, + "runImage": { + "topLayer": "some-top-layer", + "reference": "some-run-image-reference" + } +}`, + )) + h.AssertNil(t, mockImageNoRebasable.SetLabel( + "io.buildpacks.build.metadata", + `{ + "bom": [ + { + "name": "some-bom-element" + } + ], + "buildpacks": [ + { + "id": "some-buildpack", + "version": "some-version" + }, + { + "id": "other-buildpack", + "version": "other-version" + } + ], + "processes": [ + { + "type": "other-process", + "command": "/other/process", + "args": ["opt", "1"], + "direct": true + }, + { + "type": "web", + "command": "/start/web-process", + "args": ["-p", "1234"], + "direct": false + } + ], + "launcher": { + "version": "0.5.0" + } +}`, + )) + + mockImageRebasableWithoutLabel = testmocks.NewImage("some/imageRebasableWithoutLabel", "", nil) + h.AssertNil(t, mockImageNoRebasable.SetWorkingDir("/test-workdir")) + h.AssertNil(t, mockImageNoRebasable.SetLabel("io.buildpacks.stack.id", "test.stack.id")) + h.AssertNil(t, mockImageNoRebasable.SetLabel( + "io.buildpacks.lifecycle.metadata", + `{ + "stack": { + "runImage": { + "image": "some-run-image-no-rebasable", + "mirrors": [ + "some-mirror", + "other-mirror" + ] + } + }, + "runImage": { + "topLayer": "some-top-layer", + "reference": "some-run-image-reference" + } +}`, + )) + h.AssertNil(t, mockImageNoRebasable.SetLabel( + "io.buildpacks.build.metadata", + `{ + "bom": [ + { + "name": "some-bom-element" + } + ], + "buildpacks": [ + { + "id": "some-buildpack", + "version": "some-version" + }, + { + "id": "other-buildpack", + "version": "other-version" + } + ], + "processes": [ + { + "type": "other-process", + "command": "/other/process", + "args": ["opt", "1"], + "direct": true + }, + { + "type": "web", + "command": "/start/web-process", + "args": ["-p", "1234"], + "direct": false + } + ], + "launcher": { + "version": "0.5.0" + } +}`, + )) + + mockImageWithExtension = testmocks.NewImage("some/imageWithExtension", "", nil) + h.AssertNil(t, mockImageWithExtension.SetWorkingDir("/test-workdir")) + h.AssertNil(t, mockImageWithExtension.SetLabel("io.buildpacks.stack.id", "test.stack.id")) + h.AssertNil(t, mockImageWithExtension.SetLabel("io.buildpacks.rebasable", "true")) + h.AssertNil(t, mockImageWithExtension.SetLabel( + "io.buildpacks.lifecycle.metadata", + `{ + "stack": { + "runImage": { + "image": "some-run-image", + "mirrors": [ + "some-mirror", + "other-mirror" + ] + } + }, + "runImage": { + "topLayer": "some-top-layer", + "reference": "some-run-image-reference" + } +}`, + )) + h.AssertNil(t, mockImageWithExtension.SetLabel( + "io.buildpacks.build.metadata", + `{ + "bom": [ + { + "name": "some-bom-element" + } + ], + "buildpacks": [ + { + "id": "some-buildpack", + "version": "some-version" + }, + { + "id": "other-buildpack", + "version": "other-version" + } + ], + "extensions": [ + { + "id": "some-extension", + "version": "some-version" + }, + { + "id": "other-extension", + "version": "other-version" + } + ], + "processes": [ + { + "type": "other-process", + "command": "/other/process", + "args": ["opt", "1"], + "direct": true + }, + { + "type": "web", + "command": "/start/web-process", + "args": ["-p", "1234"], + "direct": false + } + ], + "launcher": { + "version": "0.5.0" + } }`, )) }) @@ -115,9 +317,15 @@ func testInspectImage(t *testing.T, when spec.G, it spec.S) { when(fmt.Sprintf("daemon is %t", useDaemon), func() { it.Before(func() { if useDaemon { - mockImageFetcher.EXPECT().Fetch(gomock.Any(), "some/image", image.FetchOptions{Daemon: true, PullPolicy: image.PullNever}).Return(mockImage, nil) + mockImageFetcher.EXPECT().Fetch(gomock.Any(), "some/image", image.FetchOptions{Daemon: true, PullPolicy: image.PullNever}).Return(mockImage, nil).AnyTimes() + mockImageFetcher.EXPECT().Fetch(gomock.Any(), "some/imageNoRebasable", image.FetchOptions{Daemon: true, PullPolicy: image.PullNever}).Return(mockImageNoRebasable, nil).AnyTimes() + mockImageFetcher.EXPECT().Fetch(gomock.Any(), "some/imageRebasableWithoutLabel", image.FetchOptions{Daemon: true, PullPolicy: image.PullNever}).Return(mockImageRebasableWithoutLabel, nil).AnyTimes() + mockImageFetcher.EXPECT().Fetch(gomock.Any(), "some/imageWithExtension", image.FetchOptions{Daemon: true, PullPolicy: image.PullNever}).Return(mockImageWithExtension, nil).AnyTimes() } else { - mockImageFetcher.EXPECT().Fetch(gomock.Any(), "some/image", image.FetchOptions{Daemon: false, PullPolicy: image.PullNever}).Return(mockImage, nil) + mockImageFetcher.EXPECT().Fetch(gomock.Any(), "some/image", image.FetchOptions{Daemon: false, PullPolicy: image.PullNever}).Return(mockImage, nil).AnyTimes() + mockImageFetcher.EXPECT().Fetch(gomock.Any(), "some/imageNoRebasable", image.FetchOptions{Daemon: false, PullPolicy: image.PullNever}).Return(mockImageNoRebasable, nil).AnyTimes() + mockImageFetcher.EXPECT().Fetch(gomock.Any(), "some/imageRebasableWithoutLabel", image.FetchOptions{Daemon: false, PullPolicy: image.PullNever}).Return(mockImageRebasableWithoutLabel, nil).AnyTimes() + mockImageFetcher.EXPECT().Fetch(gomock.Any(), "some/imageWithExtension", image.FetchOptions{Daemon: false, PullPolicy: image.PullNever}).Return(mockImageWithExtension, nil).AnyTimes() } }) @@ -127,12 +335,51 @@ func testInspectImage(t *testing.T, when spec.G, it spec.S) { h.AssertEq(t, info.StackID, "test.stack.id") }) + it("returns the stack ID with extension", func() { + infoWithExtension, err := subject.InspectImage("some/imageWithExtension", useDaemon) + h.AssertNil(t, err) + h.AssertEq(t, infoWithExtension.StackID, "test.stack.id") + }) + + it("returns the stack from runImage.Image if set", func() { + h.AssertNil(t, mockImage.SetLabel( + "io.buildpacks.lifecycle.metadata", + `{ + "runImage": { + "topLayer": "some-top-layer", + "reference": "some-run-image-reference", + "image": "is everything" + } +}`, + )) + info, err := subject.InspectImage("some/image", useDaemon) + h.AssertNil(t, err) + h.AssertEq(t, info.Stack, + files.Stack{RunImage: files.RunImageForExport{Image: "is everything"}}) + }) + it("returns the stack", func() { info, err := subject.InspectImage("some/image", useDaemon) h.AssertNil(t, err) h.AssertEq(t, info.Stack, - platform.StackMetadata{ - RunImage: platform.StackRunImageMetadata{ + files.Stack{ + RunImage: files.RunImageForExport{ + Image: "some-run-image", + Mirrors: []string{ + "some-mirror", + "other-mirror", + }, + }, + }, + ) + }) + + it("returns the stack with extension", func() { + infoWithExtension, err := subject.InspectImage("some/imageWithExtension", useDaemon) + h.AssertNil(t, err) + h.AssertEq(t, infoWithExtension.Stack, + files.Stack{ + RunImage: files.RunImageForExport{ Image: "some-run-image", Mirrors: []string{ "some-mirror", @@ -147,13 +394,48 @@ func testInspectImage(t *testing.T, when spec.G, it spec.S) { info, err := subject.InspectImage("some/image", useDaemon) h.AssertNil(t, err) h.AssertEq(t, info.Base, - platform.RunImageMetadata{ + files.RunImageForRebase{ TopLayer: "some-top-layer", Reference: "some-run-image-reference", }, ) }) + it("returns the base image with extension", func() { + infoWithExtension, err := subject.InspectImage("some/imageWithExtension", useDaemon) + h.AssertNil(t, err) + h.AssertEq(t, infoWithExtension.Base, + files.RunImageForRebase{ + TopLayer: "some-top-layer", + Reference: "some-run-image-reference", + }, + ) + }) + + it("returns the rebasable image", func() { + info, err := subject.InspectImage("some/image", useDaemon) + h.AssertNil(t, err) + h.AssertEq(t, info.Rebasable, true) + }) + + it("returns the rebasable image true if the label has not been set", func() { + info, err := subject.InspectImage("some/imageRebasableWithoutLabel", useDaemon) + h.AssertNil(t, err) + h.AssertEq(t, info.Rebasable, true) + }) + + it("returns the no rebasable image", func() { + info, err := subject.InspectImage("some/imageNoRebasable", useDaemon) + h.AssertNil(t, err) + h.AssertEq(t, info.Rebasable, false) + }) + + it("returns the rebasable image with Extension", func() { + infoRebasableWithExtension, err := subject.InspectImage("some/imageWithExtension", useDaemon) + h.AssertNil(t, err) + h.AssertEq(t, infoRebasableWithExtension.Rebasable, true) + }) + it("returns the BOM", func() { info, err := subject.InspectImage("some/image", useDaemon) h.AssertNil(t, err) @@ -163,6 +445,15 @@ func testInspectImage(t *testing.T, when spec.G, it spec.S) { h.AssertContains(t, string(rawBOM), `[{"name":"some-bom-element"`) }) + it("returns the BOM", func() { + infoWithExtension, err := subject.InspectImage("some/imageWithExtension", useDaemon) + h.AssertNil(t, err) + + rawBOM, err := json.Marshal(infoWithExtension.BOM) + h.AssertNil(t, err) + h.AssertContains(t, string(rawBOM), `[{"name":"some-bom-element"`) + }) + it("returns the buildpacks", func() { info, err := subject.InspectImage("some/image", useDaemon) h.AssertNil(t, err) @@ -174,6 +465,28 @@ func testInspectImage(t *testing.T, when spec.G, it spec.S) { h.AssertEq(t, info.Buildpacks[1].Version, "other-version") }) + it("returns the buildpacks with extension", func() { + infoWithExtension, err := subject.InspectImage("some/imageWithExtension", useDaemon) + h.AssertNil(t, err) + + h.AssertEq(t, len(infoWithExtension.Buildpacks), 2) + h.AssertEq(t, infoWithExtension.Buildpacks[0].ID, "some-buildpack") + h.AssertEq(t, infoWithExtension.Buildpacks[0].Version, "some-version") + h.AssertEq(t, infoWithExtension.Buildpacks[1].ID, "other-buildpack") + h.AssertEq(t, infoWithExtension.Buildpacks[1].Version, "other-version") + }) + + it("returns the extensions", func() { + infoWithExtension, err := subject.InspectImage("some/imageWithExtension", useDaemon) + h.AssertNil(t, err) + + h.AssertEq(t, len(infoWithExtension.Extensions), 2) + h.AssertEq(t, infoWithExtension.Extensions[0].ID, "some-extension") + h.AssertEq(t, infoWithExtension.Extensions[0].Version, "some-version") + h.AssertEq(t, infoWithExtension.Extensions[1].ID, "other-extension") + h.AssertEq(t, infoWithExtension.Extensions[1].Version, "other-version") + }) + it("returns the processes setting the web process as default", func() { info, err := subject.InspectImage("some/image", useDaemon) h.AssertNil(t, err) @@ -181,21 +494,23 @@ func testInspectImage(t *testing.T, when spec.G, it spec.S) { h.AssertEq(t, info.Processes, ProcessDetails{ DefaultProcess: &launch.Process{ - Type: "web", - Command: "/start/web-process", - Args: []string{"-p", "1234"}, - Direct: false, + Type: "web", + Command: launch.RawCommand{Entries: []string{"/start/web-process"}}, + Args: []string{"-p", "1234"}, + Direct: false, + WorkingDirectory: "/test-workdir", }, OtherProcesses: []launch.Process{ { - Type: "other-process", - Command: "/other/process", - Args: []string{"opt", "1"}, - Direct: true, + Type: "other-process", + Command: launch.RawCommand{Entries: []string{"/other/process"}}, + Args: []string{"opt", "1"}, + Direct: true, + WorkingDirectory: "/test-workdir", }, }, }, - ) + ignorePlatformAPI...) }) when("Platform API < 0.4", func() { @@ -211,21 +526,23 @@ func testInspectImage(t *testing.T, when spec.G, it spec.S) { h.AssertEq(t, info.Processes, ProcessDetails{ DefaultProcess: &launch.Process{ - Type: "other-process", - Command: "/other/process", - Args: []string{"opt", "1"}, - Direct: true, + Type: "other-process", + Command: launch.RawCommand{Entries: []string{"/other/process"}}, + Args: []string{"opt", "1"}, + Direct: true, + WorkingDirectory: "/test-workdir", }, OtherProcesses: []launch.Process{ { - Type: "web", - Command: "/start/web-process", - Args: []string{"-p", "1234"}, - Direct: false, + Type: "web", + Command: launch.RawCommand{Entries: []string{"/start/web-process"}}, + Args: []string{"-p", "1234"}, + Direct: false, + WorkingDirectory: "/test-workdir", }, }, }, - ) + ignorePlatformAPI...) }) }) @@ -243,20 +560,22 @@ func testInspectImage(t *testing.T, when spec.G, it spec.S) { DefaultProcess: nil, OtherProcesses: []launch.Process{ { - Type: "other-process", - Command: "/other/process", - Args: []string{"opt", "1"}, - Direct: true, + Type: "other-process", + Command: launch.RawCommand{Entries: []string{"/other/process"}}, + Args: []string{"opt", "1"}, + Direct: true, + WorkingDirectory: "/test-workdir", }, { - Type: "web", - Command: "/start/web-process", - Args: []string{"-p", "1234"}, - Direct: false, + Type: "web", + Command: launch.RawCommand{Entries: []string{"/start/web-process"}}, + Args: []string{"-p", "1234"}, + Direct: false, + WorkingDirectory: "/test-workdir", }, }, }, - ) + ignorePlatformAPI...) }) }) @@ -283,18 +602,19 @@ func testInspectImage(t *testing.T, when spec.G, it spec.S) { DefaultProcess: nil, OtherProcesses: []launch.Process{ { - Type: "other-process", - Command: "/other/process", - Args: []string{"opt", "1"}, - Direct: true, + Type: "other-process", + Command: launch.RawCommand{Entries: []string{"/other/process"}}, + Args: []string{"opt", "1"}, + Direct: true, + WorkingDirectory: "/test-workdir", }, }, }, - ) + ignorePlatformAPI...) }) }) - when("Platform API >= 0.4", func() { + when("Platform API >= 0.4 and <= 0.8", func() { it.Before(func() { h.AssertNil(t, mockImage.SetEnv("CNB_PLATFORM_API", "0.4")) }) @@ -326,20 +646,22 @@ func testInspectImage(t *testing.T, when spec.G, it spec.S) { DefaultProcess: nil, OtherProcesses: []launch.Process{ { - Type: "other-process", - Command: "/other/process", - Args: []string{"opt", "1"}, - Direct: true, + Type: "other-process", + Command: launch.RawCommand{Entries: []string{"/other/process"}}, + Args: []string{"opt", "1"}, + Direct: true, + WorkingDirectory: "/test-workdir", }, { - Type: "web", - Command: "/start/web-process", - Args: []string{"-p", "1234"}, - Direct: false, + Type: "web", + Command: launch.RawCommand{Entries: []string{"/start/web-process"}}, + Args: []string{"-p", "1234"}, + Direct: false, + WorkingDirectory: "/test-workdir", }, }, }, - ) + ignorePlatformAPI...) }) }) @@ -357,21 +679,23 @@ func testInspectImage(t *testing.T, when spec.G, it spec.S) { h.AssertEq(t, info.Processes, ProcessDetails{ DefaultProcess: &launch.Process{ - Type: "web", - Command: "/start/web-process", - Args: []string{"-p", "1234"}, - Direct: false, + Type: "web", + Command: launch.RawCommand{Entries: []string{"/start/web-process"}}, + Args: []string{"-p", "1234"}, + Direct: false, + WorkingDirectory: "/test-workdir", }, OtherProcesses: []launch.Process{ { - Type: "other-process", - Command: "/other/process", - Args: []string{"opt", "1"}, - Direct: true, + Type: "other-process", + Command: launch.RawCommand{Entries: []string{"/other/process"}}, + Args: []string{"opt", "1"}, + Direct: true, + WorkingDirectory: "/test-workdir", }, }, }, - ) + ignorePlatformAPI...) }) }) @@ -389,20 +713,22 @@ func testInspectImage(t *testing.T, when spec.G, it spec.S) { DefaultProcess: nil, OtherProcesses: []launch.Process{ { - Type: "other-process", - Command: "/other/process", - Args: []string{"opt", "1"}, - Direct: true, + Type: "other-process", + Command: launch.RawCommand{Entries: []string{"/other/process"}}, + Args: []string{"opt", "1"}, + Direct: true, + WorkingDirectory: "/test-workdir", }, { - Type: "web", - Command: "/start/web-process", - Args: []string{"-p", "1234"}, - Direct: false, + Type: "web", + Command: launch.RawCommand{Entries: []string{"/start/web-process"}}, + Args: []string{"-p", "1234"}, + Direct: false, + WorkingDirectory: "/test-workdir", }, }, }, - ) + ignorePlatformAPI...) }) }) @@ -432,14 +758,15 @@ func testInspectImage(t *testing.T, when spec.G, it spec.S) { DefaultProcess: nil, OtherProcesses: []launch.Process{ { - Type: "other-process", - Command: "/other/process", - Args: []string{"opt", "1"}, - Direct: true, + Type: "other-process", + Command: launch.RawCommand{Entries: []string{"/other/process"}}, + Args: []string{"opt", "1"}, + Direct: true, + WorkingDirectory: "/test-workdir", }, }, }, - ) + ignorePlatformAPI...) }) }) @@ -456,20 +783,22 @@ func testInspectImage(t *testing.T, when spec.G, it spec.S) { DefaultProcess: nil, OtherProcesses: []launch.Process{ { - Type: "other-process", - Command: "/other/process", - Args: []string{"opt", "1"}, - Direct: true, + Type: "other-process", + Command: launch.RawCommand{Entries: []string{"/other/process"}}, + Args: []string{"opt", "1"}, + Direct: true, + WorkingDirectory: "/test-workdir", }, { - Type: "web", - Command: "/start/web-process", - Args: []string{"-p", "1234"}, - Direct: false, + Type: "web", + Command: launch.RawCommand{Entries: []string{"/start/web-process"}}, + Args: []string{"-p", "1234"}, + Direct: false, + WorkingDirectory: "/test-workdir", }, }, }, - ) + ignorePlatformAPI...) }) }) @@ -485,20 +814,22 @@ func testInspectImage(t *testing.T, when spec.G, it spec.S) { DefaultProcess: nil, OtherProcesses: []launch.Process{ { - Type: "other-process", - Command: "/other/process", - Args: []string{"opt", "1"}, - Direct: true, + Type: "other-process", + Command: launch.RawCommand{Entries: []string{"/other/process"}}, + Args: []string{"opt", "1"}, + Direct: true, + WorkingDirectory: "/test-workdir", }, { - Type: "web", - Command: "/start/web-process", - Args: []string{"-p", "1234"}, - Direct: false, + Type: "web", + Command: launch.RawCommand{Entries: []string{"/start/web-process"}}, + Args: []string{"-p", "1234"}, + Direct: false, + WorkingDirectory: "/test-workdir", }, }, }, - ) + ignorePlatformAPI...) }) }) @@ -512,25 +843,95 @@ func testInspectImage(t *testing.T, when spec.G, it spec.S) { h.AssertEq(t, info.Processes, ProcessDetails{ DefaultProcess: &launch.Process{ - Type: "other-process", - Command: "/other/process", - Args: []string{"opt", "1"}, - Direct: true, + Type: "other-process", + Command: launch.RawCommand{Entries: []string{"/other/process"}}, + Args: []string{"opt", "1"}, + Direct: true, + WorkingDirectory: "/test-workdir", }, OtherProcesses: []launch.Process{ { - Type: "web", - Command: "/start/web-process", - Args: []string{"-p", "1234"}, - Direct: false, + Type: "web", + Command: launch.RawCommand{Entries: []string{"/start/web-process"}}, + Args: []string{"-p", "1234"}, + Direct: false, + WorkingDirectory: "/test-workdir", }, }, }, - ) + ignorePlatformAPI...) }) }) }) }) + + when("Platform API > 0.8", func() { + when("working-dir is set", func() { + it("returns process with working directory if available", func() { + h.AssertNil(t, mockImage.SetLabel( + "io.buildpacks.build.metadata", + `{ + "processes": [ + { + "type": "other-process", + "command": "/other/process", + "args": ["opt", "1"], + "direct": true, + "working-dir": "/other-workdir" + } + ] + }`, + )) + + info, err := subject.InspectImage("some/image", useDaemon) + h.AssertNil(t, err) + fmt.Print(info) + + h.AssertEq(t, info.Processes, + ProcessDetails{ + DefaultProcess: nil, + OtherProcesses: []launch.Process{ + { + Type: "other-process", + Command: launch.RawCommand{Entries: []string{"/other/process"}}, + Args: []string{"opt", "1"}, + Direct: true, + WorkingDirectory: "/other-workdir", + }, + }, + }, + ignorePlatformAPI...) + }) + }) + + when("working-dir is not set", func() { + it("returns process with working directory from image", func() { + info, err := subject.InspectImage("some/image", useDaemon) + h.AssertNil(t, err) + + h.AssertEq(t, info.Processes, + ProcessDetails{ + DefaultProcess: &launch.Process{ + Type: "web", + Command: launch.RawCommand{Entries: []string{"/start/web-process"}}, + Args: []string{"-p", "1234"}, + Direct: false, + WorkingDirectory: "/test-workdir", + }, + OtherProcesses: []launch.Process{ + { + Type: "other-process", + Command: launch.RawCommand{Entries: []string{"/other/process"}}, + Args: []string{"opt", "1"}, + Direct: true, + WorkingDirectory: "/test-workdir", + }, + }, + }, + ignorePlatformAPI...) + }) + }) + }) }) } }) @@ -561,7 +962,7 @@ func testInspectImage(t *testing.T, when spec.G, it spec.S) { Return(fakes.NewImage("missing/labels", "", nil), nil) info, err := subject.InspectImage("missing/labels", true) h.AssertNil(t, err) - h.AssertEq(t, info, &ImageInfo{}) + h.AssertEq(t, info, &ImageInfo{Rebasable: true}, ignorePlatformAPI...) }) }) @@ -614,7 +1015,7 @@ func testInspectImage(t *testing.T, when spec.G, it spec.S) { info, err := subject.InspectImage("old/image", true) h.AssertNil(t, err) h.AssertEq(t, info.Base, - platform.RunImageMetadata{ + files.RunImageForRebase{ TopLayer: "some-top-layer", Reference: "", }, diff --git a/pkg/client/lifecycle.tar b/pkg/client/lifecycle.tar new file mode 100644 index 0000000000..c631f0b2c5 Binary files /dev/null and b/pkg/client/lifecycle.tar differ diff --git a/pkg/client/manifest_add.go b/pkg/client/manifest_add.go new file mode 100644 index 0000000000..e465549711 --- /dev/null +++ b/pkg/client/manifest_add.go @@ -0,0 +1,35 @@ +package client + +import ( + "context" + "fmt" + + "github.com/buildpacks/pack/internal/style" +) + +type ManifestAddOptions struct { + // Image index we want to update + IndexRepoName string + + // Name of image we wish to add into the image index + RepoName string +} + +// AddManifest implements commands.PackClient. +func (c *Client) AddManifest(ctx context.Context, opts ManifestAddOptions) (err error) { + idx, err := c.indexFactory.LoadIndex(opts.IndexRepoName) + if err != nil { + return err + } + + if err = c.addManifestToIndex(ctx, opts.RepoName, idx); err != nil { + return err + } + + if err = idx.SaveDir(); err != nil { + return fmt.Errorf("failed to save manifest list %s to local storage: %w", style.Symbol(opts.RepoName), err) + } + + c.logger.Infof("Successfully added image %s to index", style.Symbol(opts.RepoName)) + return nil +} diff --git a/pkg/client/manifest_add_test.go b/pkg/client/manifest_add_test.go new file mode 100644 index 0000000000..12b20c60db --- /dev/null +++ b/pkg/client/manifest_add_test.go @@ -0,0 +1,195 @@ +package client + +import ( + "bytes" + "context" + "errors" + "os" + "path/filepath" + "testing" + + "github.com/buildpacks/imgutil" + "github.com/golang/mock/gomock" + "github.com/google/go-containerregistry/pkg/authn" + "github.com/heroku/color" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + ifakes "github.com/buildpacks/pack/internal/fakes" + "github.com/buildpacks/pack/pkg/logging" + "github.com/buildpacks/pack/pkg/testmocks" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestAddManifest(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + + spec.Run(t, "build", testAddManifest, spec.Report(report.Terminal{})) +} + +func testAddManifest(t *testing.T, when spec.G, it spec.S) { + var ( + mockController *gomock.Controller + mockIndexFactory *testmocks.MockIndexFactory + fakeImageFetcher *ifakes.FakeImageFetcher + out bytes.Buffer + logger logging.Logger + subject *Client + err error + tmpDir string + ) + + it.Before(func() { + fakeImageFetcher = ifakes.NewFakeImageFetcher() + logger = logging.NewLogWithWriters(&out, &out, logging.WithVerbose()) + mockController = gomock.NewController(t) + mockIndexFactory = testmocks.NewMockIndexFactory(mockController) + + tmpDir, err = os.MkdirTemp("", "add-manifest-test") + h.AssertNil(t, err) + os.Setenv("XDG_RUNTIME_DIR", tmpDir) + + subject, err = NewClient( + WithLogger(logger), + WithFetcher(fakeImageFetcher), + WithIndexFactory(mockIndexFactory), + WithExperimental(true), + WithKeychain(authn.DefaultKeychain), + ) + h.AssertSameInstance(t, mockIndexFactory, subject.indexFactory) + h.AssertNil(t, err) + + // Create a remote image to be fetched when adding to the image index + fakeImage := h.NewFakeWithRandomUnderlyingV1Image(t, "pack/image", nil) + fakeImageFetcher.RemoteImages["index.docker.io/pack/image:latest"] = fakeImage + }) + it.After(func() { + mockController.Finish() + os.RemoveAll(tmpDir) + }) + + when("#AddManifest", func() { + when("index doesn't exist", func() { + it.Before(func() { + mockIndexFactory.EXPECT().LoadIndex(gomock.Any(), gomock.Any()).Return(nil, errors.New("index not found locally")) + }) + + it("should return an error", func() { + err = subject.AddManifest( + context.TODO(), + ManifestAddOptions{ + IndexRepoName: "pack/none-existent-index", + RepoName: "pack/image", + }, + ) + h.AssertError(t, err, "index not found locally") + }) + }) + + when("index exists", func() { + var ( + indexPath string + indexRepoName string + ) + + when("no errors on save", func() { + when("valid manifest is provided", func() { + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + indexPath = filepath.Join(tmpDir, imgutil.MakeFileSafeName(indexRepoName)) + // Initialize the Index with 2 image manifest + idx := h.RandomCNBIndex(t, indexRepoName, 1, 2) + h.AssertNil(t, idx.SaveDir()) + mockIndexFactory.EXPECT().LoadIndex(gomock.Eq(indexRepoName), gomock.Any()).Return(idx, nil) + }) + + it("adds the given image", func() { + err = subject.AddManifest( + context.TODO(), + ManifestAddOptions{ + IndexRepoName: indexRepoName, + RepoName: "pack/image", + }, + ) + h.AssertNil(t, err) + h.AssertContains(t, out.String(), "Successfully added image 'pack/image' to index") + + // We expect one more manifest to be added + index := h.ReadIndexManifest(t, indexPath) + h.AssertEq(t, len(index.Manifests), 3) + }) + }) + + when("invalid manifest reference name is used", func() { + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + indexPath = filepath.Join(tmpDir, imgutil.MakeFileSafeName(indexRepoName)) + // Initialize the Index with 2 image manifest + idx := h.RandomCNBIndex(t, indexRepoName, 1, 2) + mockIndexFactory.EXPECT().LoadIndex(gomock.Eq(indexRepoName), gomock.Any()).Return(idx, nil) + }) + + it("errors a message", func() { + err = subject.AddManifest( + context.TODO(), + ManifestAddOptions{ + IndexRepoName: indexRepoName, + RepoName: "pack@@image", + }, + ) + h.AssertNotNil(t, err) + h.AssertError(t, err, "is not a valid manifest reference") + }) + }) + + when("when manifest reference doesn't exist in the registry", func() { + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + indexPath = filepath.Join(tmpDir, imgutil.MakeFileSafeName(indexRepoName)) + // Initialize the Index with 2 image manifest + idx := h.RandomCNBIndex(t, indexRepoName, 1, 2) + mockIndexFactory.EXPECT().LoadIndex(gomock.Eq(indexRepoName), gomock.Any()).Return(idx, nil) + }) + + it("it errors a message", func() { + err = subject.AddManifest( + context.TODO(), + ManifestAddOptions{ + IndexRepoName: indexRepoName, + RepoName: "pack/image-not-found", + }, + ) + h.AssertNotNil(t, err) + h.AssertError(t, err, "does not exist in registry") + }) + }) + }) + + when("errors on save", func() { + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + cnbIdx := h.NewMockImageIndex(t, indexRepoName, 1, 2) + cnbIdx.ErrorOnSave = true + mockIndexFactory. + EXPECT(). + LoadIndex(gomock.Eq(indexRepoName), gomock.Any()). + Return(cnbIdx, nil). + AnyTimes() + }) + + it("errors when the manifest list couldn't be saved locally", func() { + err = subject.AddManifest( + context.TODO(), + ManifestAddOptions{ + IndexRepoName: indexRepoName, + RepoName: "pack/image", + }, + ) + h.AssertNotNil(t, err) + h.AssertError(t, err, "failed to save manifest list") + }) + }) + }) + }) +} diff --git a/pkg/client/manifest_annotate.go b/pkg/client/manifest_annotate.go new file mode 100644 index 0000000000..f5d375c587 --- /dev/null +++ b/pkg/client/manifest_annotate.go @@ -0,0 +1,87 @@ +package client + +import ( + "context" + "fmt" + + "github.com/google/go-containerregistry/pkg/name" + + "github.com/buildpacks/pack/internal/style" + "github.com/buildpacks/pack/pkg/image" +) + +type ManifestAnnotateOptions struct { + // Image index we want to update + IndexRepoName string + + // Name of image within the index that we wish to update + RepoName string + + // 'os' of the image we wish to update in the image index + OS string + + // 'architecture' of the image we wish to update in the image index + OSArch string + + // 'os variant' of the image we wish to update in the image index + OSVariant string + + // 'annotations' of the image we wish to update in the image index + Annotations map[string]string +} + +// AnnotateManifest implements commands.PackClient. +func (c *Client) AnnotateManifest(ctx context.Context, opts ManifestAnnotateOptions) error { + idx, err := c.indexFactory.LoadIndex(opts.IndexRepoName) + if err != nil { + return err + } + + imageRef, err := name.ParseReference(opts.RepoName, name.WeakValidation) + if err != nil { + return fmt.Errorf("'%s' is not a valid image reference: %s", opts.RepoName, err) + } + + imageToAnnotate, err := c.imageFetcher.Fetch(ctx, imageRef.Name(), image.FetchOptions{Daemon: false}) + if err != nil { + return err + } + + hash, err := imageToAnnotate.Identifier() + if err != nil { + return err + } + + digest, err := name.NewDigest(hash.String()) + if err != nil { + return err + } + + if opts.OS != "" { + if err = idx.SetOS(digest, opts.OS); err != nil { + return fmt.Errorf("failed to set the 'os' for %s: %w", style.Symbol(opts.RepoName), err) + } + } + if opts.OSArch != "" { + if err = idx.SetArchitecture(digest, opts.OSArch); err != nil { + return fmt.Errorf("failed to set the 'arch' for %s: %w", style.Symbol(opts.RepoName), err) + } + } + if opts.OSVariant != "" { + if err = idx.SetVariant(digest, opts.OSVariant); err != nil { + return fmt.Errorf("failed to set the 'os variant' for %s: %w", style.Symbol(opts.RepoName), err) + } + } + if len(opts.Annotations) != 0 { + if err = idx.SetAnnotations(digest, opts.Annotations); err != nil { + return fmt.Errorf("failed to set the 'annotations' for %s: %w", style.Symbol(opts.RepoName), err) + } + } + + if err = idx.SaveDir(); err != nil { + return fmt.Errorf("failed to save manifest list %s to local storage: %w", style.Symbol(opts.RepoName), err) + } + + c.logger.Infof("Successfully annotated image %s in index %s", style.Symbol(opts.RepoName), style.Symbol(opts.IndexRepoName)) + return nil +} diff --git a/pkg/client/manifest_annotate_test.go b/pkg/client/manifest_annotate_test.go new file mode 100644 index 0000000000..4557accb06 --- /dev/null +++ b/pkg/client/manifest_annotate_test.go @@ -0,0 +1,284 @@ +package client + +import ( + "bytes" + "context" + "os" + "testing" + + "github.com/buildpacks/imgutil" + "github.com/golang/mock/gomock" + "github.com/google/go-containerregistry/pkg/authn" + "github.com/google/go-containerregistry/pkg/name" + "github.com/heroku/color" + "github.com/pkg/errors" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + ifakes "github.com/buildpacks/pack/internal/fakes" + "github.com/buildpacks/pack/pkg/logging" + "github.com/buildpacks/pack/pkg/testmocks" + h "github.com/buildpacks/pack/testhelpers" +) + +const invalidDigest = "sha256:d4707523ce6e12afdbe9a3be5ad69027150a834870ca0933baf7516dd1fe0f56" + +func TestAnnotateManifest(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "build", testAnnotateManifest, spec.Sequential(), spec.Report(report.Terminal{})) +} + +func testAnnotateManifest(t *testing.T, when spec.G, it spec.S) { + var ( + mockController *gomock.Controller + mockIndexFactory *testmocks.MockIndexFactory + fakeImageFetcher *ifakes.FakeImageFetcher + out bytes.Buffer + logger logging.Logger + subject *Client + err error + tmpDir string + ) + + it.Before(func() { + fakeImageFetcher = ifakes.NewFakeImageFetcher() + logger = logging.NewLogWithWriters(&out, &out, logging.WithVerbose()) + mockController = gomock.NewController(t) + mockIndexFactory = testmocks.NewMockIndexFactory(mockController) + + tmpDir, err = os.MkdirTemp("", "annotate-manifest-test") + h.AssertNil(t, err) + os.Setenv("XDG_RUNTIME_DIR", tmpDir) + + subject, err = NewClient( + WithLogger(logger), + WithFetcher(fakeImageFetcher), + WithIndexFactory(mockIndexFactory), + WithExperimental(true), + WithKeychain(authn.DefaultKeychain), + ) + h.AssertSameInstance(t, mockIndexFactory, subject.indexFactory) + h.AssertNil(t, err) + }) + it.After(func() { + mockController.Finish() + os.RemoveAll(tmpDir) + }) + + when("#AnnotateManifest", func() { + var ( + digest name.Digest + idx imgutil.ImageIndex + indexRepoName string + ) + when("index doesn't exist", func() { + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + mockIndexFactory.EXPECT().LoadIndex(gomock.Any(), gomock.Any()).Return(nil, errors.New("index not found locally")) + }) + + it("should return an error", func() { + err = subject.AnnotateManifest( + context.TODO(), + ManifestAnnotateOptions{ + IndexRepoName: indexRepoName, + RepoName: "pack/image", + }, + ) + h.AssertEq(t, err.Error(), "index not found locally") + }) + }) + + when("index exists", func() { + when("no errors on save", func() { + when("OS is given", func() { + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + idx, digest = h.RandomCNBIndexAndDigest(t, indexRepoName, 1, 2) + mockIndexFactory.EXPECT().LoadIndex(gomock.Eq(indexRepoName), gomock.Any()).Return(idx, nil) + fakeImage := h.NewFakeWithRandomUnderlyingV1Image(t, "pack/image", digest) + fakeImageFetcher.RemoteImages[digest.Name()] = fakeImage + }) + + it("should set OS for given image", func() { + err = subject.AnnotateManifest( + context.TODO(), + ManifestAnnotateOptions{ + IndexRepoName: indexRepoName, + RepoName: digest.Name(), + OS: "some-os", + }, + ) + h.AssertNil(t, err) + + os, err := idx.OS(digest) + h.AssertNil(t, err) + h.AssertEq(t, os, "some-os") + }) + }) + when("Arch is given", func() { + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + idx, digest = h.RandomCNBIndexAndDigest(t, indexRepoName, 1, 2) + mockIndexFactory.EXPECT().LoadIndex(gomock.Eq(indexRepoName), gomock.Any()).Return(idx, nil) + fakeImage := h.NewFakeWithRandomUnderlyingV1Image(t, "pack/image", digest) + fakeImageFetcher.RemoteImages[digest.Name()] = fakeImage + }) + + it("should set Arch for given image", func() { + err = subject.AnnotateManifest( + context.TODO(), + ManifestAnnotateOptions{ + IndexRepoName: indexRepoName, + RepoName: digest.Name(), + OSArch: "some-arch", + }, + ) + h.AssertNil(t, err) + + arch, err := idx.Architecture(digest) + h.AssertNil(t, err) + h.AssertEq(t, arch, "some-arch") + }) + }) + when("OS Variant is given", func() { + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + idx, digest = h.RandomCNBIndexAndDigest(t, indexRepoName, 1, 2) + mockIndexFactory.EXPECT().LoadIndex(gomock.Eq(indexRepoName), gomock.Any()).Return(idx, nil) + fakeImage := h.NewFakeWithRandomUnderlyingV1Image(t, "pack/image", digest) + fakeImageFetcher.RemoteImages[digest.Name()] = fakeImage + }) + + it("should set Variant for given image", func() { + err = subject.AnnotateManifest( + context.TODO(), + ManifestAnnotateOptions{ + IndexRepoName: indexRepoName, + RepoName: digest.Name(), + OSVariant: "some-variant", + }, + ) + h.AssertNil(t, err) + + variant, err := idx.Variant(digest) + h.AssertNil(t, err) + h.AssertEq(t, variant, "some-variant") + }) + }) + when("Annotations are given", func() { + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + idx, digest = h.RandomCNBIndexAndDigest(t, indexRepoName, 1, 2) + mockIndexFactory.EXPECT().LoadIndex(gomock.Eq(indexRepoName), gomock.Any()).Return(idx, nil) + fakeImage := h.NewFakeWithRandomUnderlyingV1Image(t, "pack/image", digest) + fakeImageFetcher.RemoteImages[digest.Name()] = fakeImage + }) + + it("should set Annotations for given image", func() { + err = subject.AnnotateManifest( + context.TODO(), + ManifestAnnotateOptions{ + IndexRepoName: indexRepoName, + RepoName: digest.Name(), + Annotations: map[string]string{"some-key": "some-value"}, + }, + ) + h.AssertNil(t, err) + + annos, err := idx.Annotations(digest) + h.AssertNil(t, err) + h.AssertEq(t, annos, map[string]string{"some-key": "some-value"}) + }) + + it("should save the annotated index", func() { + var ( + fakeOS = "some-os" + fakeArch = "some-arch" + fakeVariant = "some-variant" + fakeAnnotations = map[string]string{"some-key": "some-value"} + ) + + err = subject.AnnotateManifest( + context.TODO(), + ManifestAnnotateOptions{ + IndexRepoName: indexRepoName, + RepoName: digest.Name(), + OS: fakeOS, + OSArch: fakeArch, + OSVariant: fakeVariant, + Annotations: fakeAnnotations, + }, + ) + h.AssertNil(t, err) + + err = idx.SaveDir() + h.AssertNil(t, err) + + os, err := idx.OS(digest) + h.AssertNil(t, err) + h.AssertEq(t, os, fakeOS) + + arch, err := idx.Architecture(digest) + h.AssertNil(t, err) + h.AssertEq(t, arch, fakeArch) + + variant, err := idx.Variant(digest) + h.AssertNil(t, err) + h.AssertEq(t, variant, fakeVariant) + + annos, err := idx.Annotations(digest) + h.AssertNil(t, err) + h.AssertEq(t, annos, fakeAnnotations) + }) + }) + }) + }) + + when("image does not exist with given digest", func() { + var nonExistentDigest string + + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + idx = h.RandomCNBIndex(t, indexRepoName, 1, 2) + nonExistentDigest = "busybox@" + invalidDigest + mockIndexFactory.EXPECT().LoadIndex(gomock.Eq(indexRepoName), gomock.Any()).Return(idx, nil) + }) + + it("errors for Arch", func() { + err = subject.AnnotateManifest( + context.TODO(), + ManifestAnnotateOptions{ + IndexRepoName: indexRepoName, + RepoName: nonExistentDigest, + OSArch: "some-arch", + }, + ) + h.AssertNotNil(t, err) + }) + it("errors for Variant", func() { + err = subject.AnnotateManifest( + context.TODO(), + ManifestAnnotateOptions{ + IndexRepoName: indexRepoName, + RepoName: nonExistentDigest, + OSVariant: "some-variant", + }, + ) + h.AssertNotNil(t, err) + }) + it("errors for Annotations", func() { + err = subject.AnnotateManifest( + context.TODO(), + ManifestAnnotateOptions{ + IndexRepoName: indexRepoName, + RepoName: nonExistentDigest, + Annotations: map[string]string{"some-key": "some-value"}, + }, + ) + h.AssertNotNil(t, err) + }) + }) + }) +} diff --git a/pkg/client/manifest_create.go b/pkg/client/manifest_create.go new file mode 100644 index 0000000000..9790c5fce5 --- /dev/null +++ b/pkg/client/manifest_create.go @@ -0,0 +1,82 @@ +package client + +import ( + "context" + "fmt" + + "github.com/buildpacks/imgutil" + "github.com/google/go-containerregistry/pkg/v1/types" + + "github.com/buildpacks/pack/internal/style" +) + +type CreateManifestOptions struct { + // Image index we want to create + IndexRepoName string + + // Name of images we wish to add into the image index + RepoNames []string + + // Media type of the index + Format types.MediaType + + // true if we want to publish to an insecure registry + Insecure bool + + // true if we want to push the index to a registry after creating + Publish bool +} + +// CreateManifest implements commands.PackClient. +func (c *Client) CreateManifest(ctx context.Context, opts CreateManifestOptions) (err error) { + ops := parseOptsToIndexOptions(opts) + + if c.indexFactory.Exists(opts.IndexRepoName) { + return fmt.Errorf("manifest list '%s' already exists in local storage; use 'pack manifest remove' to "+ + "remove it before creating a new manifest list with the same name", style.Symbol(opts.IndexRepoName)) + } + + index, err := c.indexFactory.CreateIndex(opts.IndexRepoName, ops...) + if err != nil { + return err + } + + for _, repoName := range opts.RepoNames { + if err = c.addManifestToIndex(ctx, repoName, index); err != nil { + return err + } + } + + if opts.Publish { + // push to a registry without saving a local copy + ops = append(ops, imgutil.WithPurge(true)) + if err = index.Push(ops...); err != nil { + return err + } + + c.logger.Infof("Successfully pushed manifest list %s to registry", style.Symbol(opts.IndexRepoName)) + return nil + } + + if err = index.SaveDir(); err != nil { + return fmt.Errorf("manifest list %s could not be saved to local storage: %w", style.Symbol(opts.IndexRepoName), err) + } + + c.logger.Infof("Successfully created manifest list %s", style.Symbol(opts.IndexRepoName)) + return nil +} + +func parseOptsToIndexOptions(opts CreateManifestOptions) (idxOpts []imgutil.IndexOption) { + if opts.Insecure { + return []imgutil.IndexOption{ + imgutil.WithMediaType(opts.Format), + imgutil.WithInsecure(), + } + } + if opts.Format == "" { + opts.Format = types.OCIImageIndex + } + return []imgutil.IndexOption{ + imgutil.WithMediaType(opts.Format), + } +} diff --git a/pkg/client/manifest_create_test.go b/pkg/client/manifest_create_test.go new file mode 100644 index 0000000000..f7c724a04a --- /dev/null +++ b/pkg/client/manifest_create_test.go @@ -0,0 +1,232 @@ +package client + +import ( + "bytes" + "context" + "os" + "path/filepath" + "testing" + + "github.com/buildpacks/imgutil" + "github.com/golang/mock/gomock" + "github.com/google/go-containerregistry/pkg/authn" + "github.com/google/go-containerregistry/pkg/v1/types" + "github.com/heroku/color" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + ifakes "github.com/buildpacks/pack/internal/fakes" + "github.com/buildpacks/pack/pkg/logging" + "github.com/buildpacks/pack/pkg/testmocks" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestCreateManifest(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "build", testCreateManifest, spec.Report(report.Terminal{})) +} + +func testCreateManifest(t *testing.T, when spec.G, it spec.S) { + var ( + mockController *gomock.Controller + mockIndexFactory *testmocks.MockIndexFactory + fakeImageFetcher *ifakes.FakeImageFetcher + out bytes.Buffer + logger logging.Logger + subject *Client + err error + tmpDir string + ) + + it.Before(func() { + fakeImageFetcher = ifakes.NewFakeImageFetcher() + logger = logging.NewLogWithWriters(&out, &out, logging.WithVerbose()) + mockController = gomock.NewController(t) + mockIndexFactory = testmocks.NewMockIndexFactory(mockController) + + tmpDir, err = os.MkdirTemp("", "add-manifest-test") + h.AssertNil(t, err) + os.Setenv("XDG_RUNTIME_DIR", tmpDir) + + subject, err = NewClient( + WithLogger(logger), + WithFetcher(fakeImageFetcher), + WithIndexFactory(mockIndexFactory), + WithExperimental(true), + WithKeychain(authn.DefaultKeychain), + ) + h.AssertSameInstance(t, mockIndexFactory, subject.indexFactory) + h.AssertNil(t, err) + }) + it.After(func() { + mockController.Finish() + h.AssertNil(t, os.RemoveAll(tmpDir)) + }) + + when("#CreateManifest", func() { + var indexRepoName string + when("index doesn't exist", func() { + var indexLocalPath string + + when("remote manifest is provided", func() { + it.Before(func() { + fakeImage := h.NewFakeWithRandomUnderlyingV1Image(t, "pack/image", nil) + fakeImageFetcher.RemoteImages["index.docker.io/library/busybox:1.36-musl"] = fakeImage + }) + + when("publish is false", func() { + it.Before(func() { + // We want to actually create an index, so no need to mock the index factory + subject, err = NewClient( + WithLogger(logger), + WithFetcher(fakeImageFetcher), + WithExperimental(true), + WithKeychain(authn.DefaultKeychain), + ) + }) + + when("no errors on save", func() { + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + indexLocalPath = filepath.Join(tmpDir, imgutil.MakeFileSafeName(indexRepoName)) + }) + + when("no media type is provided", func() { + it("creates the index adding the manifest", func() { + err = subject.CreateManifest( + context.TODO(), + CreateManifestOptions{ + IndexRepoName: indexRepoName, + RepoNames: []string{"busybox:1.36-musl"}, + }, + ) + h.AssertNil(t, err) + index := h.ReadIndexManifest(t, indexLocalPath) + h.AssertEq(t, len(index.Manifests), 1) + // By default uses OCI media-types + h.AssertEq(t, index.MediaType, types.OCIImageIndex) + }) + }) + + when("media type is provided", func() { + it("creates the index adding the manifest", func() { + err = subject.CreateManifest( + context.TODO(), + CreateManifestOptions{ + IndexRepoName: indexRepoName, + RepoNames: []string{"busybox:1.36-musl"}, + Format: types.DockerManifestList, + }, + ) + h.AssertNil(t, err) + index := h.ReadIndexManifest(t, indexLocalPath) + h.AssertEq(t, len(index.Manifests), 1) + h.AssertEq(t, index.MediaType, types.DockerManifestList) + }) + }) + }) + }) + + when("publish is true", func() { + var index *h.MockImageIndex + + when("no errors on save", func() { + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + indexLocalPath = filepath.Join(tmpDir, imgutil.MakeFileSafeName(indexRepoName)) + + // index stub return to check if push operation was called + index = h.NewMockImageIndex(t, indexRepoName, 0, 0) + + // We need to mock the index factory to inject a stub index to be pushed. + mockIndexFactory.EXPECT().Exists(gomock.Eq(indexRepoName)).Return(false) + mockIndexFactory.EXPECT().CreateIndex(gomock.Eq(indexRepoName), gomock.Any()).Return(index, nil) + }) + + it("creates the index adding the manifest and pushes it to the registry", func() { + err = subject.CreateManifest( + context.TODO(), + CreateManifestOptions{ + IndexRepoName: indexRepoName, + RepoNames: []string{"busybox:1.36-musl"}, + Publish: true, + }, + ) + h.AssertNil(t, err) + + // index is not saved locally and push it to the registry + h.AssertPathDoesNotExists(t, indexLocalPath) + h.AssertTrue(t, index.PushCalled) + h.AssertTrue(t, index.PurgeOption) + }) + }) + }) + }) + + when("no manifest is provided", func() { + when("no errors on save", func() { + it.Before(func() { + // We want to actually create an index, so no need to mock the index factory + subject, err = NewClient( + WithLogger(logger), + WithFetcher(fakeImageFetcher), + WithExperimental(true), + WithKeychain(authn.DefaultKeychain), + ) + + indexRepoName = h.NewRandomIndexRepoName() + indexLocalPath = filepath.Join(tmpDir, imgutil.MakeFileSafeName(indexRepoName)) + }) + + it("creates an empty index with OCI media-type", func() { + err = subject.CreateManifest( + context.TODO(), + CreateManifestOptions{ + IndexRepoName: indexRepoName, + Format: types.OCIImageIndex, + }, + ) + h.AssertNil(t, err) + index := h.ReadIndexManifest(t, indexLocalPath) + h.AssertEq(t, len(index.Manifests), 0) + h.AssertEq(t, index.MediaType, types.OCIImageIndex) + }) + + it("creates an empty index with Docker media-type", func() { + err = subject.CreateManifest( + context.TODO(), + CreateManifestOptions{ + IndexRepoName: indexRepoName, + Format: types.DockerManifestList, + }, + ) + h.AssertNil(t, err) + index := h.ReadIndexManifest(t, indexLocalPath) + h.AssertEq(t, len(index.Manifests), 0) + h.AssertEq(t, index.MediaType, types.DockerManifestList) + }) + }) + }) + }) + + when("index exists", func() { + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + + // mock the index factory to simulate the index exists + mockIndexFactory.EXPECT().Exists(gomock.Eq(indexRepoName)).AnyTimes().Return(true) + }) + + it("returns an error when index already exists", func() { + err = subject.CreateManifest( + context.TODO(), + CreateManifestOptions{ + IndexRepoName: indexRepoName, + }, + ) + h.AssertError(t, err, "already exists in local storage; use 'pack manifest remove' to remove it before creating a new manifest list with the same name") + }) + }) + }) +} diff --git a/pkg/client/manifest_inspect.go b/pkg/client/manifest_inspect.go new file mode 100644 index 0000000000..3cb4e3673c --- /dev/null +++ b/pkg/client/manifest_inspect.go @@ -0,0 +1,28 @@ +package client + +import ( + "fmt" + + "github.com/buildpacks/imgutil" +) + +// InspectManifest implements commands.PackClient. +func (c *Client) InspectManifest(indexRepoName string) error { + var ( + index imgutil.ImageIndex + indexStr string + err error + ) + + index, err = c.indexFactory.FindIndex(indexRepoName) + if err != nil { + return err + } + + if indexStr, err = index.Inspect(); err != nil { + return fmt.Errorf("failed to inspect manifest list '%s': %w", indexRepoName, err) + } + + c.logger.Info(indexStr) + return nil +} diff --git a/pkg/client/manifest_inspect_test.go b/pkg/client/manifest_inspect_test.go new file mode 100644 index 0000000000..34420849fd --- /dev/null +++ b/pkg/client/manifest_inspect_test.go @@ -0,0 +1,110 @@ +package client + +import ( + "bytes" + "encoding/json" + "testing" + + "github.com/buildpacks/imgutil" + "github.com/golang/mock/gomock" + "github.com/google/go-containerregistry/pkg/authn" + v1 "github.com/google/go-containerregistry/pkg/v1" + "github.com/google/go-containerregistry/pkg/v1/random" + "github.com/heroku/color" + "github.com/pkg/errors" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + "github.com/buildpacks/pack/pkg/logging" + "github.com/buildpacks/pack/pkg/testmocks" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestInspectManifest(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "build", testInspectManifest, spec.Report(report.Terminal{})) +} + +func testInspectManifest(t *testing.T, when spec.G, it spec.S) { + var ( + mockController *gomock.Controller + mockIndexFactory *testmocks.MockIndexFactory + stdout bytes.Buffer + stderr bytes.Buffer + logger logging.Logger + subject *Client + err error + ) + + it.Before(func() { + logger = logging.NewLogWithWriters(&stdout, &stderr, logging.WithVerbose()) + mockController = gomock.NewController(t) + mockIndexFactory = testmocks.NewMockIndexFactory(mockController) + + subject, err = NewClient( + WithLogger(logger), + WithIndexFactory(mockIndexFactory), + WithExperimental(true), + WithKeychain(authn.DefaultKeychain), + ) + h.AssertSameInstance(t, mockIndexFactory, subject.indexFactory) + h.AssertSameInstance(t, subject.logger, logger) + h.AssertNil(t, err) + }) + it.After(func() { + mockController.Finish() + }) + + when("#InspectManifest", func() { + var indexRepoName string + + when("index doesn't exits", func() { + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + mockIndexFactory. + EXPECT(). + FindIndex(gomock.Eq(indexRepoName), gomock.Any()).Return(nil, errors.New("index not found")) + }) + + it("should return an error when index not found", func() { + err = subject.InspectManifest(indexRepoName) + h.AssertEq(t, err.Error(), "index not found") + }) + }) + + when("index exists", func() { + var indexManifest *v1.IndexManifest + + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + idx := setUpIndex(t, indexRepoName, *mockIndexFactory) + indexManifest, err = idx.IndexManifest() + h.AssertNil(t, err) + }) + + it("should return formatted IndexManifest", func() { + err = subject.InspectManifest(indexRepoName) + h.AssertNil(t, err) + + printedIndex := &v1.IndexManifest{} + err = json.Unmarshal(stdout.Bytes(), printedIndex) + h.AssertEq(t, indexManifest, printedIndex) + }) + }) + }) +} + +func setUpIndex(t *testing.T, indexRepoName string, mockIndexFactory testmocks.MockIndexFactory) v1.ImageIndex { + randomUnderlyingIndex, err := random.Index(1024, 1, 2) + h.AssertNil(t, err) + + options := &imgutil.IndexOptions{ + BaseIndex: randomUnderlyingIndex, + } + idx, err := imgutil.NewCNBIndex(indexRepoName, *options) + h.AssertNil(t, err) + + mockIndexFactory.EXPECT().FindIndex(gomock.Eq(indexRepoName), gomock.Any()).Return(idx, nil) + return randomUnderlyingIndex +} diff --git a/pkg/client/manifest_push.go b/pkg/client/manifest_push.go new file mode 100644 index 0000000000..07defb8573 --- /dev/null +++ b/pkg/client/manifest_push.go @@ -0,0 +1,60 @@ +package client + +import ( + "fmt" + + "github.com/buildpacks/imgutil" + "github.com/google/go-containerregistry/pkg/v1/types" + + "github.com/buildpacks/pack/internal/style" +) + +type PushManifestOptions struct { + // Image index we want to update + IndexRepoName string + + // Index media-type + Format types.MediaType + + // true if we want to publish to an insecure registry + Insecure bool + + // true if we want the index to be deleted from local storage after pushing it + Purge bool +} + +// PushManifest implements commands.PackClient. +func (c *Client) PushManifest(opts PushManifestOptions) (err error) { + if opts.Format == "" { + opts.Format = types.OCIImageIndex + } + ops := parseOptions(opts) + + idx, err := c.indexFactory.LoadIndex(opts.IndexRepoName) + if err != nil { + return + } + + if err = idx.Push(ops...); err != nil { + return fmt.Errorf("failed to push manifest list %s: %w", style.Symbol(opts.IndexRepoName), err) + } + + if !opts.Purge { + c.logger.Infof("Successfully pushed manifest list %s to registry", style.Symbol(opts.IndexRepoName)) + return nil + } + + return idx.DeleteDir() +} + +func parseOptions(opts PushManifestOptions) (idxOptions []imgutil.IndexOption) { + if opts.Insecure { + idxOptions = append(idxOptions, imgutil.WithInsecure()) + } + + if opts.Purge { + idxOptions = append(idxOptions, imgutil.WithPurge(true)) + } + + return append(idxOptions, imgutil.WithMediaType(opts.Format)) +} diff --git a/pkg/client/manifest_push_test.go b/pkg/client/manifest_push_test.go new file mode 100644 index 0000000000..3a0752d5dd --- /dev/null +++ b/pkg/client/manifest_push_test.go @@ -0,0 +1,85 @@ +package client + +import ( + "bytes" + "os" + "testing" + + "github.com/golang/mock/gomock" + "github.com/google/go-containerregistry/pkg/authn" + "github.com/heroku/color" + "github.com/pkg/errors" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + "github.com/buildpacks/pack/pkg/logging" + "github.com/buildpacks/pack/pkg/testmocks" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestPushManifest(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "build", testPushManifest, spec.Report(report.Terminal{})) +} + +func testPushManifest(t *testing.T, when spec.G, it spec.S) { + var ( + mockController *gomock.Controller + mockIndexFactory *testmocks.MockIndexFactory + out bytes.Buffer + logger logging.Logger + subject *Client + err error + tmpDir string + ) + it.Before(func() { + logger = logging.NewLogWithWriters(&out, &out, logging.WithVerbose()) + mockController = gomock.NewController(t) + mockIndexFactory = testmocks.NewMockIndexFactory(mockController) + + subject, err = NewClient( + WithLogger(logger), + WithIndexFactory(mockIndexFactory), + WithExperimental(true), + WithKeychain(authn.DefaultKeychain), + ) + h.AssertSameInstance(t, mockIndexFactory, subject.indexFactory) + h.AssertNil(t, err) + }) + it.After(func() { + mockController.Finish() + h.AssertNil(t, os.RemoveAll(tmpDir)) + }) + + when("#PushManifest", func() { + when("index exists locally", func() { + var index *h.MockImageIndex + + it.Before(func() { + index = h.NewMockImageIndex(t, "some-index", 1, 2) + mockIndexFactory.EXPECT().LoadIndex(gomock.Eq("some-index"), gomock.Any()).Return(index, nil) + }) + it("pushes the index to the registry", func() { + err = subject.PushManifest(PushManifestOptions{ + IndexRepoName: "some-index", + }) + h.AssertNil(t, err) + h.AssertTrue(t, index.PushCalled) + }) + }) + + when("index doesn't exist locally", func() { + it.Before(func() { + mockIndexFactory.EXPECT().LoadIndex(gomock.Any(), gomock.Any()).Return(nil, errors.New("ErrNoImageOrIndexFoundWithGivenDigest")) + }) + + it("errors with a message", func() { + err = subject.PushManifest(PushManifestOptions{ + IndexRepoName: "some-index", + }) + h.AssertNotNil(t, err) + }) + }) + }) +} diff --git a/pkg/client/manifest_remove.go b/pkg/client/manifest_remove.go new file mode 100644 index 0000000000..41a79865d2 --- /dev/null +++ b/pkg/client/manifest_remove.go @@ -0,0 +1,24 @@ +package client + +import "errors" + +// DeleteManifest implements commands.PackClient. +func (c *Client) DeleteManifest(names []string) error { + var allErrors error + for _, name := range names { + imgIndex, err := c.indexFactory.LoadIndex(name) + if err != nil { + allErrors = errors.Join(allErrors, err) + continue + } + + if err := imgIndex.DeleteDir(); err != nil { + allErrors = errors.Join(allErrors, err) + } + } + + if allErrors == nil { + c.logger.Info("Successfully deleted manifest list(s) from local storage") + } + return allErrors +} diff --git a/pkg/client/manifest_remove_test.go b/pkg/client/manifest_remove_test.go new file mode 100644 index 0000000000..5f31f2dfa3 --- /dev/null +++ b/pkg/client/manifest_remove_test.go @@ -0,0 +1,99 @@ +package client + +import ( + "bytes" + "os" + "path/filepath" + "testing" + + "github.com/buildpacks/imgutil" + "github.com/golang/mock/gomock" + "github.com/google/go-containerregistry/pkg/authn" + "github.com/heroku/color" + "github.com/pkg/errors" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + "github.com/buildpacks/pack/pkg/logging" + "github.com/buildpacks/pack/pkg/testmocks" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestDeleteManifest(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "build", testDeleteManifest, spec.Report(report.Terminal{})) +} + +func testDeleteManifest(t *testing.T, when spec.G, it spec.S) { + var ( + mockController *gomock.Controller + mockIndexFactory *testmocks.MockIndexFactory + out bytes.Buffer + logger logging.Logger + subject *Client + err error + tmpDir string + ) + + it.Before(func() { + logger = logging.NewLogWithWriters(&out, &out, logging.WithVerbose()) + mockController = gomock.NewController(t) + mockIndexFactory = testmocks.NewMockIndexFactory(mockController) + + tmpDir, err = os.MkdirTemp("", "remove-manifest-test") + h.AssertNil(t, err) + os.Setenv("XDG_RUNTIME_DIR", tmpDir) + + subject, err = NewClient( + WithLogger(logger), + WithIndexFactory(mockIndexFactory), + WithExperimental(true), + WithKeychain(authn.DefaultKeychain), + ) + h.AssertSameInstance(t, mockIndexFactory, subject.indexFactory) + h.AssertNil(t, err) + }) + it.After(func() { + mockController.Finish() + h.AssertNil(t, os.RemoveAll(tmpDir)) + }) + + when("#DeleteManifest", func() { + var ( + indexPath string + indexRepoName string + ) + + when("index doesn't exists", func() { + it.Before(func() { + mockIndexFactory.EXPECT().LoadIndex(gomock.Any(), gomock.Any()).Return(nil, errors.New("index not found locally")) + }) + it("should return an error when index is already deleted", func() { + err = subject.DeleteManifest([]string{"pack/none-existent-index"}) + h.AssertNotNil(t, err) + }) + }) + + when("index exists", func() { + var idx imgutil.ImageIndex + + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + indexPath = filepath.Join(tmpDir, imgutil.MakeFileSafeName(indexRepoName)) + idx = h.RandomCNBIndex(t, indexRepoName, 1, 1) + mockIndexFactory.EXPECT().LoadIndex(gomock.Eq(indexRepoName), gomock.Any()).Return(idx, nil) + + // Let's write the index on disk + h.AssertNil(t, idx.SaveDir()) + }) + + it("should delete local index", func() { + err = subject.DeleteManifest([]string{indexRepoName}) + h.AssertNil(t, err) + h.AssertContains(t, out.String(), "Successfully deleted manifest list(s) from local storage") + h.AssertPathDoesNotExists(t, indexPath) + }) + }) + }) +} diff --git a/pkg/client/manifest_rm.go b/pkg/client/manifest_rm.go new file mode 100644 index 0000000000..98ac8a0926 --- /dev/null +++ b/pkg/client/manifest_rm.go @@ -0,0 +1,39 @@ +package client + +import ( + "errors" + "fmt" + + gccrName "github.com/google/go-containerregistry/pkg/name" +) + +// RemoveManifest implements commands.PackClient. +func (c *Client) RemoveManifest(name string, images []string) error { + var allErrors error + + imgIndex, err := c.indexFactory.LoadIndex(name) + if err != nil { + return err + } + + for _, image := range images { + ref, err := gccrName.NewDigest(image, gccrName.WeakValidation, gccrName.Insecure) + if err != nil { + allErrors = errors.Join(allErrors, fmt.Errorf("invalid instance '%s': %w", image, err)) + } + + if err = imgIndex.RemoveManifest(ref); err != nil { + allErrors = errors.Join(allErrors, err) + } + + if err = imgIndex.SaveDir(); err != nil { + allErrors = errors.Join(allErrors, err) + } + } + + if allErrors == nil { + c.logger.Infof("Successfully removed image(s) from index: '%s'", name) + } + + return allErrors +} diff --git a/pkg/client/manifest_rm_test.go b/pkg/client/manifest_rm_test.go new file mode 100644 index 0000000000..bc683c2063 --- /dev/null +++ b/pkg/client/manifest_rm_test.go @@ -0,0 +1,92 @@ +package client + +import ( + "bytes" + "os" + "path/filepath" + "testing" + + "github.com/buildpacks/imgutil" + "github.com/golang/mock/gomock" + "github.com/google/go-containerregistry/pkg/authn" + "github.com/google/go-containerregistry/pkg/name" + "github.com/heroku/color" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + "github.com/buildpacks/pack/pkg/logging" + "github.com/buildpacks/pack/pkg/testmocks" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestRemoveManifest(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "build", testRemoveManifest, spec.Report(report.Terminal{})) +} + +func testRemoveManifest(t *testing.T, when spec.G, it spec.S) { + var ( + mockController *gomock.Controller + mockIndexFactory *testmocks.MockIndexFactory + out bytes.Buffer + logger logging.Logger + subject *Client + err error + tmpDir string + ) + + it.Before(func() { + logger = logging.NewLogWithWriters(&out, &out, logging.WithVerbose()) + mockController = gomock.NewController(t) + mockIndexFactory = testmocks.NewMockIndexFactory(mockController) + + tmpDir, err = os.MkdirTemp("", "rm-manifest-test") + h.AssertNil(t, err) + os.Setenv("XDG_RUNTIME_DIR", tmpDir) + + subject, err = NewClient( + WithLogger(logger), + WithIndexFactory(mockIndexFactory), + WithExperimental(true), + WithKeychain(authn.DefaultKeychain), + ) + h.AssertSameInstance(t, mockIndexFactory, subject.indexFactory) + h.AssertNil(t, err) + }) + it.After(func() { + mockController.Finish() + h.AssertNil(t, os.RemoveAll(tmpDir)) + }) + + when("#RemoveManifest", func() { + var ( + indexPath string + indexRepoName string + ) + + when("index exists", func() { + var digest name.Digest + var idx imgutil.ImageIndex + + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + indexPath = filepath.Join(tmpDir, imgutil.MakeFileSafeName(indexRepoName)) + + // Initialize the Index with 2 image manifest + idx, digest = h.RandomCNBIndexAndDigest(t, indexRepoName, 1, 2) + mockIndexFactory.EXPECT().LoadIndex(gomock.Eq(indexRepoName), gomock.Any()).Return(idx, nil) + }) + + it("should remove local index", func() { + err = subject.RemoveManifest(indexRepoName, []string{digest.Name()}) + h.AssertNil(t, err) + + // We expect one manifest after removing one of them + index := h.ReadIndexManifest(t, indexPath) + h.AssertEq(t, len(index.Manifests), 1) + h.AssertNotEq(t, index.Manifests[0].Digest.String(), digest.Name()) + }) + }) + }) +} diff --git a/pkg/client/new_buildpack.go b/pkg/client/new_buildpack.go index df85eb0168..d5648dbda7 100644 --- a/pkg/client/new_buildpack.go +++ b/pkg/client/new_buildpack.go @@ -2,7 +2,6 @@ package client import ( "context" - "io/ioutil" "os" "path/filepath" @@ -44,12 +43,15 @@ type NewBuildpackOptions struct { // version of the output buildpack artifact. Version string - // The stacks this buildpack will work with + // Deprecated: The stacks this buildpack will work with Stacks []dist.Stack + + // the targets this buildpack will work with + Targets []dist.Target } func (c *Client) NewBuildpack(ctx context.Context, opts NewBuildpackOptions) error { - err := createBuildpackTOML(opts.Path, opts.ID, opts.Version, opts.API, opts.Stacks, c) + err := createBuildpackTOML(opts.Path, opts.ID, opts.Version, opts.API, opts.Stacks, opts.Targets, c) if err != nil { return err } @@ -83,7 +85,7 @@ func createBinScript(path, name, contents string, c *Client) error { // The following line's comment is for gosec, it will ignore rule 306 in this case // G306: Expect WriteFile permissions to be 0600 or less /* #nosec G306 */ - err = ioutil.WriteFile(binFile, []byte(contents), 0755) + err = os.WriteFile(binFile, []byte(contents), 0755) if err != nil { return err } @@ -95,16 +97,17 @@ func createBinScript(path, name, contents string, c *Client) error { return nil } -func createBuildpackTOML(path, id, version, apiStr string, stacks []dist.Stack, c *Client) error { +func createBuildpackTOML(path, id, version, apiStr string, stacks []dist.Stack, targets []dist.Target, c *Client) error { api, err := api.NewVersion(apiStr) if err != nil { return err } buildpackTOML := dist.BuildpackDescriptor{ - API: api, - Stacks: stacks, - Info: dist.BuildpackInfo{ + WithAPI: api, + WithStacks: stacks, + WithTargets: targets, + WithInfo: dist.ModuleInfo{ ID: id, Version: version, }, diff --git a/pkg/client/new_buildpack_test.go b/pkg/client/new_buildpack_test.go index 084e904814..c2eb0eb6ed 100644 --- a/pkg/client/new_buildpack_test.go +++ b/pkg/client/new_buildpack_test.go @@ -2,8 +2,6 @@ package client_test import ( "context" - "fmt" - "io/ioutil" "os" "path/filepath" "runtime" @@ -34,7 +32,7 @@ func testNewBuildpack(t *testing.T, when spec.G, it spec.S) { it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "new-buildpack-test") + tmpDir, err = os.MkdirTemp("", "new-buildpack-test") h.AssertNil(t, err) subject, err = client.NewClient() @@ -82,11 +80,11 @@ func testNewBuildpack(t *testing.T, when spec.G, it spec.S) { err = os.MkdirAll(filepath.Join(tmpDir, "bin"), 0755) h.AssertNil(t, err) - err = ioutil.WriteFile(filepath.Join(tmpDir, "buildpack.toml"), []byte("expected value"), 0655) + err = os.WriteFile(filepath.Join(tmpDir, "buildpack.toml"), []byte("expected value"), 0655) h.AssertNil(t, err) - err = ioutil.WriteFile(filepath.Join(tmpDir, "bin", "build"), []byte("expected value"), 0755) + err = os.WriteFile(filepath.Join(tmpDir, "bin", "build"), []byte("expected value"), 0755) h.AssertNil(t, err) - err = ioutil.WriteFile(filepath.Join(tmpDir, "bin", "detect"), []byte("expected value"), 0755) + err = os.WriteFile(filepath.Join(tmpDir, "bin", "detect"), []byte("expected value"), 0755) h.AssertNil(t, err) }) @@ -105,15 +103,15 @@ func testNewBuildpack(t *testing.T, when spec.G, it spec.S) { }) h.AssertNil(t, err) - content, err := ioutil.ReadFile(filepath.Join(tmpDir, "buildpack.toml")) + content, err := os.ReadFile(filepath.Join(tmpDir, "buildpack.toml")) h.AssertNil(t, err) h.AssertEq(t, content, []byte("expected value")) - content, err = ioutil.ReadFile(filepath.Join(tmpDir, "bin", "build")) + content, err = os.ReadFile(filepath.Join(tmpDir, "bin", "build")) h.AssertNil(t, err) h.AssertEq(t, content, []byte("expected value")) - content, err = ioutil.ReadFile(filepath.Join(tmpDir, "bin", "detect")) + content, err = os.ReadFile(filepath.Join(tmpDir, "bin", "detect")) h.AssertNil(t, err) h.AssertEq(t, content, []byte("expected value")) }) @@ -133,6 +131,5 @@ func assertBuildpackToml(t *testing.T, path string, id string) { h.AssertNil(t, err) defer f.Close() - fmt.Printf("%s\n", buildpackDescriptor) - h.AssertEq(t, buildpackDescriptor.Info.ID, "example/my-cnb") + h.AssertEq(t, buildpackDescriptor.Info().ID, "example/my-cnb") } diff --git a/pkg/client/package_buildpack.go b/pkg/client/package_buildpack.go index c26d27623c..4ef96d3aba 100644 --- a/pkg/client/package_buildpack.go +++ b/pkg/client/package_buildpack.go @@ -2,15 +2,21 @@ package client import ( "context" + "fmt" + "path/filepath" + "github.com/moby/moby/client" "github.com/pkg/errors" + "github.com/buildpacks/pack/internal/name" + pubbldpkg "github.com/buildpacks/pack/buildpackage" "github.com/buildpacks/pack/internal/layer" "github.com/buildpacks/pack/internal/paths" "github.com/buildpacks/pack/internal/style" "github.com/buildpacks/pack/pkg/blob" "github.com/buildpacks/pack/pkg/buildpack" + "github.com/buildpacks/pack/pkg/dist" "github.com/buildpacks/pack/pkg/image" ) @@ -44,12 +50,31 @@ type PackageBuildpackOptions struct { // specified in the Name variable. Publish bool + // Append [os]-[arch] suffix to the image tag when publishing a multi-arch to a registry + // Requires Publish to be true + AppendImageNameSuffix bool + // Strategy for updating images before packaging. PullPolicy image.PullPolicy // Name of the buildpack registry. Used to // add buildpacks to a package. Registry string + + // Flatten layers + Flatten bool + + // List of buildpack images to exclude from being flattened. + FlattenExclude []string + + // Map of labels to add to the Buildpack + Labels map[string]string + + // Target platforms to build packages for + Targets []dist.Target + + // Additional image tags to push to, each will contain contents identical to Image + AdditionalTags []string } // PackageBuildpack packages buildpack(s) into either an image or file. @@ -58,69 +83,148 @@ func (c *Client) PackageBuildpack(ctx context.Context, opts PackageBuildpackOpti opts.Format = FormatImage } - if opts.Config.Platform.OS == "windows" && !c.experimental { - return NewExperimentError("Windows buildpackage support is currently experimental.") + targets, err := c.processPackageBuildpackTargets(ctx, opts) + if err != nil { + return err + } + multiArch := len(targets) > 1 && (opts.Publish || opts.Format == FormatFile) + + var digests []string + targets = dist.ExpandTargetsDistributions(targets...) + for _, target := range targets { + digest, err := c.packageBuildpackTarget(ctx, opts, target, multiArch) + if err != nil { + return err + } + digests = append(digests, digest) } - err := c.validateOSPlatform(ctx, opts.Config.Platform.OS, opts.Publish, opts.Format) + if opts.Publish && len(digests) > 1 { + // Image Index must be created only when we pushed to registry + return c.CreateManifest(ctx, CreateManifestOptions{ + IndexRepoName: opts.Name, + RepoNames: digests, + Publish: true, + }) + } + + return nil +} + +func (c *Client) packageBuildpackTarget(ctx context.Context, opts PackageBuildpackOptions, target dist.Target, multiArch bool) (string, error) { + var digest string + if target.OS == "windows" && !c.experimental { + return "", NewExperimentError("Windows buildpackage support is currently experimental.") + } + + err := c.validateOSPlatform(ctx, target.OS, opts.Publish, opts.Format) if err != nil { - return err + return digest, err } - writerFactory, err := layer.NewWriterFactory(opts.Config.Platform.OS) + writerFactory, err := layer.NewWriterFactory(target.OS) if err != nil { - return errors.Wrap(err, "creating layer writer factory") + return digest, errors.Wrap(err, "creating layer writer factory") } - packageBuilder := buildpack.NewBuilder(c.imageFactory) + var packageBuilderOpts []buildpack.PackageBuilderOption + if opts.Flatten { + packageBuilderOpts = append(packageBuilderOpts, buildpack.DoNotFlatten(opts.FlattenExclude), + buildpack.WithLayerWriterFactory(writerFactory), buildpack.WithLogger(c.logger)) + } + packageBuilder := buildpack.NewBuilder(c.imageFactory, packageBuilderOpts...) bpURI := opts.Config.Buildpack.URI if bpURI == "" { - return errors.New("buildpack URI must be provided") + return digest, errors.New("buildpack URI must be provided") + } + + if ok, platformRootFolder := buildpack.PlatformRootFolder(bpURI, target); ok { + bpURI = platformRootFolder } mainBlob, err := c.downloadBuildpackFromURI(ctx, bpURI, opts.RelativeBaseDir) if err != nil { - return err + return digest, err } - bp, err := buildpack.FromRootBlob(mainBlob, writerFactory) + bp, err := buildpack.FromBuildpackRootBlob(mainBlob, writerFactory, c.logger) if err != nil { - return errors.Wrapf(err, "creating buildpack from %s", style.Symbol(bpURI)) + return digest, errors.Wrapf(err, "creating buildpack from %s", style.Symbol(bpURI)) } packageBuilder.SetBuildpack(bp) + platform := target.ValuesAsPlatform() + for _, dep := range opts.Config.Dependencies { - var depBPs []buildpack.Buildpack + if multiArch { + locatorType, err := buildpack.GetLocatorType(dep.URI, opts.RelativeBaseDir, []dist.ModuleInfo{}) + if err != nil { + return digest, err + } + if locatorType == buildpack.URILocator { + // When building a composite multi-platform buildpack all the dependencies must be pushed to a registry + return digest, errors.New(fmt.Sprintf("uri %s is not allowed when creating a composite multi-platform buildpack; push your dependencies to a registry and use 'docker://' instead", style.Symbol(dep.URI))) + } + } + + c.logger.Debugf("Downloading buildpack dependency for platform %s", platform) mainBP, deps, err := c.buildpackDownloader.Download(ctx, dep.URI, buildpack.DownloadOptions{ RegistryName: opts.Registry, RelativeBaseDir: opts.RelativeBaseDir, - ImageOS: opts.Config.Platform.OS, ImageName: dep.ImageName, Daemon: !opts.Publish, PullPolicy: opts.PullPolicy, + Target: &target, }) - if err != nil { - return errors.Wrapf(err, "packaging dependencies (uri=%s,image=%s)", style.Symbol(dep.URI), style.Symbol(dep.ImageName)) + return digest, errors.Wrapf(err, "packaging dependencies (uri=%s,image=%s)", style.Symbol(dep.URI), style.Symbol(dep.ImageName)) } - depBPs = append([]buildpack.Buildpack{mainBP}, deps...) - for _, depBP := range depBPs { - packageBuilder.AddDependency(depBP) - } + packageBuilder.AddDependencies(mainBP, deps) } switch opts.Format { case FormatFile: - return packageBuilder.SaveAsFile(opts.Name, opts.Config.Platform.OS) + name := opts.Name + if multiArch { + extension := filepath.Ext(name) + origFileName := name[:len(name)-len(filepath.Ext(name))] + if target.Arch != "" { + name = fmt.Sprintf("%s-%s-%s%s", origFileName, target.OS, target.Arch, extension) + } else { + name = fmt.Sprintf("%s-%s%s", origFileName, target.OS, extension) + } + } + err = packageBuilder.SaveAsFile(name, target, opts.Labels) + if err != nil { + return digest, err + } case FormatImage: - _, err = packageBuilder.SaveAsImage(opts.Name, opts.Publish, opts.Config.Platform.OS) - return errors.Wrapf(err, "saving image") + packageName := opts.Name + if multiArch && opts.AppendImageNameSuffix { + packageName, err = name.AppendSuffix(packageName, target) + if err != nil { + return "", errors.Wrap(err, "invalid image name") + } + } + img, err := packageBuilder.SaveAsImage(packageName, opts.Publish, target, opts.Labels, opts.AdditionalTags...) + if err != nil { + return digest, errors.Wrapf(err, "saving image") + } + if multiArch { + // We need to keep the identifier to create the image index + id, err := img.Identifier() + if err != nil { + return digest, errors.Wrapf(err, "determining image manifest digest") + } + digest = id.String() + } default: - return errors.Errorf("unknown format: %s", style.Symbol(opts.Format)) + return digest, errors.Errorf("unknown format: %s", style.Symbol(opts.Format)) } + return digest, nil } func (c *Client) downloadBuildpackFromURI(ctx context.Context, uri, relativeBaseDir string) (blob.Blob, error) { @@ -139,19 +243,55 @@ func (c *Client) downloadBuildpackFromURI(ctx context.Context, uri, relativeBase return blob, nil } +func (c *Client) processPackageBuildpackTargets(ctx context.Context, opts PackageBuildpackOptions) ([]dist.Target, error) { + var targets []dist.Target + if len(opts.Targets) > 0 { + // when exporting to the daemon, we need to select just one target + if !opts.Publish && opts.Format == FormatImage { + daemonTarget, err := c.daemonTarget(ctx, opts.Targets) + if err != nil { + return targets, err + } + targets = append(targets, daemonTarget) + } else { + targets = opts.Targets + } + } else { + targets = append(targets, dist.Target{OS: opts.Config.Platform.OS}) + } + return targets, nil +} + func (c *Client) validateOSPlatform(ctx context.Context, os string, publish bool, format string) error { if publish || format == FormatFile { return nil } - info, err := c.docker.Info(ctx) + result, err := c.docker.Info(ctx, client.InfoOptions{}) if err != nil { return err } - if info.OSType != os { - return errors.Errorf("invalid %s specified: DOCKER_OS is %s", style.Symbol("platform.os"), style.Symbol(info.OSType)) + if result.Info.OSType != os { + return errors.Errorf("invalid %s specified: DOCKER_OS is %s", style.Symbol("platform.os"), style.Symbol(result.Info.OSType)) } return nil } + +// daemonTarget returns a target that matches with the given daemon os/arch +func (c *Client) daemonTarget(ctx context.Context, targets []dist.Target) (dist.Target, error) { + serverResult, err := c.docker.ServerVersion(ctx, client.ServerVersionOptions{}) + if err != nil { + return dist.Target{}, err + } + + for _, t := range targets { + if t.Arch != "" && t.OS == serverResult.Os && t.Arch == serverResult.Arch { + return t, nil + } else if t.Arch == "" && t.OS == serverResult.Os { + return t, nil + } + } + return dist.Target{}, errors.Errorf("could not find a target that matches daemon os=%s and architecture=%s", serverResult.Os, serverResult.Arch) +} diff --git a/pkg/client/package_buildpack_test.go b/pkg/client/package_buildpack_test.go index cd47a61180..dd94f0946b 100644 --- a/pkg/client/package_buildpack_test.go +++ b/pkg/client/package_buildpack_test.go @@ -4,7 +4,6 @@ import ( "bytes" "context" "fmt" - "io/ioutil" "os" "path/filepath" "testing" @@ -12,12 +11,16 @@ import ( "github.com/buildpacks/imgutil" "github.com/buildpacks/imgutil/fakes" "github.com/buildpacks/lifecycle/api" - "github.com/docker/docker/api/types" "github.com/golang/mock/gomock" + "github.com/google/go-containerregistry/pkg/name" "github.com/heroku/color" + mobysystem "github.com/moby/moby/api/types/system" + dockerclient "github.com/moby/moby/client" "github.com/sclevine/spec" "github.com/sclevine/spec/report" + "github.com/buildpacks/pack/pkg/archive" + pubbldpkg "github.com/buildpacks/pack/buildpackage" cfg "github.com/buildpacks/pack/internal/config" ifakes "github.com/buildpacks/pack/internal/fakes" @@ -45,7 +48,8 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { mockDownloader *testmocks.MockBlobDownloader mockImageFactory *testmocks.MockImageFactory mockImageFetcher *testmocks.MockImageFetcher - mockDockerClient *testmocks.MockCommonAPIClient + mockDockerClient *testmocks.MockAPIClient + mockIndexFactory *testmocks.MockIndexFactory out bytes.Buffer ) @@ -54,7 +58,8 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { mockDownloader = testmocks.NewMockBlobDownloader(mockController) mockImageFactory = testmocks.NewMockImageFactory(mockController) mockImageFetcher = testmocks.NewMockImageFetcher(mockController) - mockDockerClient = testmocks.NewMockCommonAPIClient(mockController) + mockDockerClient = testmocks.NewMockAPIClient(mockController) + mockIndexFactory = testmocks.NewMockIndexFactory(mockController) var err error subject, err = client.NewClient( @@ -63,6 +68,7 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { client.WithImageFactory(mockImageFactory), client.WithFetcher(mockImageFetcher), client.WithDockerClient(mockDockerClient), + client.WithIndexFactory(mockIndexFactory), ) h.AssertNil(t, err) }) @@ -72,7 +78,7 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { }) createBuildpack := func(descriptor dist.BuildpackDescriptor) string { - bp, err := ifakes.NewFakeBuildpackBlob(descriptor, 0644) + bp, err := ifakes.NewFakeBuildpackBlob(&descriptor, 0644) h.AssertNil(t, err) url := fmt.Sprintf("https://example.com/bp.%s.tgz", h.RandString(12)) mockDownloader.EXPECT().Download(gomock.Any(), url).Return(bp, nil).AnyTimes() @@ -136,15 +142,15 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { dependencyPath := "http://example.com/flawed.file" mockDownloader.EXPECT().Download(gomock.Any(), dependencyPath).Return(blob.NewBlob("no-file.txt"), nil).AnyTimes() - mockDockerClient.EXPECT().Info(context.TODO()).Return(types.Info{OSType: "linux"}, nil).AnyTimes() + mockDockerClient.EXPECT().Info(context.TODO(), gomock.Any()).Return(dockerclient.SystemInfoResult{Info: mobysystem.Info{OSType: "linux"}}, nil).AnyTimes() packageDescriptor := dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.1", Version: "1.2.3"}, - Order: dist.Order{{ - Group: []dist.BuildpackRef{{ - BuildpackInfo: dist.BuildpackInfo{ID: "bp.nested", Version: "2.3.4"}, - Optional: false, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.1", Version: "1.2.3"}, + WithOrder: dist.Order{{ + Group: []dist.ModuleRef{{ + ModuleInfo: dist.ModuleInfo{ID: "bp.nested", Version: "2.3.4"}, + Optional: false, }}, }}, } @@ -169,8 +175,8 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { when("simple package for both OS formats (experimental only)", func() { it("creates package image based on daemon OS", func() { for _, daemonOS := range []string{"linux", "windows"} { - localMockDockerClient := testmocks.NewMockCommonAPIClient(mockController) - localMockDockerClient.EXPECT().Info(context.TODO()).Return(types.Info{OSType: daemonOS}, nil).AnyTimes() + localMockDockerClient := testmocks.NewMockAPIClient(mockController) + localMockDockerClient.EXPECT().Info(context.TODO(), gomock.Any()).Return(dockerclient.SystemInfoResult{Info: mobysystem.Info{OSType: daemonOS}}, nil).AnyTimes() packClientWithExperimental, err := client.NewClient( client.WithDockerClient(localMockDockerClient), @@ -181,7 +187,7 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, err) fakeImage := fakes.NewImage("basic/package-"+h.RandString(12), "", nil) - mockImageFactory.EXPECT().NewImage(fakeImage.Name(), true, daemonOS).Return(fakeImage, nil) + mockImageFactory.EXPECT().NewImage(fakeImage.Name(), true, dist.Target{OS: daemonOS}).Return(fakeImage, nil) fakeBlob := blob.NewBlob(filepath.Join("testdata", "empty-file")) bpURL := fmt.Sprintf("https://example.com/bp.%s.tgz", h.RandString(12)) @@ -193,9 +199,9 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { Config: pubbldpkg.Config{ Platform: dist.Platform{OS: daemonOS}, Buildpack: dist.BuildpackURI{URI: createBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.basic", Version: "2.3.4"}, - Stacks: []dist.Stack{{ID: "some.stack.id"}}, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.basic", Version: "2.3.4"}, + WithStacks: []dist.Stack{{ID: "some.stack.id"}}, })}, }, PullPolicy: image.PullNever, @@ -204,7 +210,7 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { }) it("fails without experimental on Windows daemons", func() { - windowsMockDockerClient := testmocks.NewMockCommonAPIClient(mockController) + windowsMockDockerClient := testmocks.NewMockAPIClient(mockController) packClientWithoutExperimental, err := client.NewClient( client.WithDockerClient(windowsMockDockerClient), @@ -223,8 +229,8 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { }) it("fails for mismatched platform and daemon os", func() { - windowsMockDockerClient := testmocks.NewMockCommonAPIClient(mockController) - windowsMockDockerClient.EXPECT().Info(context.TODO()).Return(types.Info{OSType: "windows"}, nil).AnyTimes() + windowsMockDockerClient := testmocks.NewMockAPIClient(mockController) + windowsMockDockerClient.EXPECT().Info(context.TODO(), gomock.Any()).Return(dockerclient.SystemInfoResult{Info: mobysystem.Info{OSType: "windows"}}, nil).AnyTimes() packClientWithoutExperimental, err := client.NewClient( client.WithDockerClient(windowsMockDockerClient), @@ -249,18 +255,18 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { it.Before(func() { nestedPackage = fakes.NewImage("nested/package-"+h.RandString(12), "", nil) - mockImageFactory.EXPECT().NewImage(nestedPackage.Name(), false, "linux").Return(nestedPackage, nil) + mockImageFactory.EXPECT().NewImage(nestedPackage.Name(), false, dist.Target{OS: "linux"}).Return(nestedPackage, nil) - mockDockerClient.EXPECT().Info(context.TODO()).Return(types.Info{OSType: "linux"}, nil).AnyTimes() + mockDockerClient.EXPECT().Info(context.TODO(), gomock.Any()).Return(dockerclient.SystemInfoResult{Info: mobysystem.Info{OSType: "linux"}}, nil).AnyTimes() h.AssertNil(t, subject.PackageBuildpack(context.TODO(), client.PackageBuildpackOptions{ Name: nestedPackage.Name(), Config: pubbldpkg.Config{ Platform: dist.Platform{OS: "linux"}, Buildpack: dist.BuildpackURI{URI: createBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.nested", Version: "2.3.4"}, - Stacks: []dist.Stack{{ID: "some.stack.id"}}, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.nested", Version: "2.3.4"}, + WithStacks: []dist.Stack{{ID: "some.stack.id"}}, })}, }, Publish: true, @@ -269,22 +275,22 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { }) shouldFetchNestedPackage := func(demon bool, pull image.PullPolicy) { - mockImageFetcher.EXPECT().Fetch(gomock.Any(), nestedPackage.Name(), image.FetchOptions{Daemon: demon, PullPolicy: pull}).Return(nestedPackage, nil) + mockImageFetcher.EXPECT().Fetch(gomock.Any(), nestedPackage.Name(), image.FetchOptions{Daemon: demon, PullPolicy: pull, Target: &dist.Target{OS: "linux"}}).Return(nestedPackage, nil) } shouldNotFindNestedPackageWhenCallingImageFetcherWith := func(demon bool, pull image.PullPolicy) { - mockImageFetcher.EXPECT().Fetch(gomock.Any(), nestedPackage.Name(), image.FetchOptions{Daemon: demon, PullPolicy: pull}).Return(nil, image.ErrNotFound) + mockImageFetcher.EXPECT().Fetch(gomock.Any(), nestedPackage.Name(), image.FetchOptions{Daemon: demon, PullPolicy: pull, Target: &dist.Target{OS: "linux"}}).Return(nil, image.ErrNotFound) } shouldCreateLocalPackage := func() imgutil.Image { img := fakes.NewImage("some/package-"+h.RandString(12), "", nil) - mockImageFactory.EXPECT().NewImage(img.Name(), true, "linux").Return(img, nil) + mockImageFactory.EXPECT().NewImage(img.Name(), true, dist.Target{OS: "linux"}).Return(img, nil) return img } shouldCreateRemotePackage := func() *fakes.Image { img := fakes.NewImage("some/package-"+h.RandString(12), "", nil) - mockImageFactory.EXPECT().NewImage(img.Name(), false, "linux").Return(img, nil) + mockImageFactory.EXPECT().NewImage(img.Name(), false, dist.Target{OS: "linux"}).Return(img, nil) return img } @@ -298,12 +304,12 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { Config: pubbldpkg.Config{ Platform: dist.Platform{OS: "linux"}, Buildpack: dist.BuildpackURI{URI: createBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.1", Version: "1.2.3"}, - Order: dist.Order{{ - Group: []dist.BuildpackRef{{ - BuildpackInfo: dist.BuildpackInfo{ID: "bp.nested", Version: "2.3.4"}, - Optional: false, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.1", Version: "1.2.3"}, + WithOrder: dist.Order{{ + Group: []dist.ModuleRef{{ + ModuleInfo: dist.ModuleInfo{ID: "bp.nested", Version: "2.3.4"}, + Optional: false, }}, }}, })}, @@ -325,12 +331,12 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { Config: pubbldpkg.Config{ Platform: dist.Platform{OS: "linux"}, Buildpack: dist.BuildpackURI{URI: createBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.1", Version: "1.2.3"}, - Order: dist.Order{{ - Group: []dist.BuildpackRef{{ - BuildpackInfo: dist.BuildpackInfo{ID: "bp.nested", Version: "2.3.4"}, - Optional: false, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.1", Version: "1.2.3"}, + WithOrder: dist.Order{{ + Group: []dist.ModuleRef{{ + ModuleInfo: dist.ModuleInfo{ID: "bp.nested", Version: "2.3.4"}, + Optional: false, }}, }}, })}, @@ -352,12 +358,12 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { Config: pubbldpkg.Config{ Platform: dist.Platform{OS: "linux"}, Buildpack: dist.BuildpackURI{URI: createBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.1", Version: "1.2.3"}, - Order: dist.Order{{ - Group: []dist.BuildpackRef{{ - BuildpackInfo: dist.BuildpackInfo{ID: "bp.nested", Version: "2.3.4"}, - Optional: false, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.1", Version: "1.2.3"}, + WithOrder: dist.Order{{ + Group: []dist.ModuleRef{{ + ModuleInfo: dist.ModuleInfo{ID: "bp.nested", Version: "2.3.4"}, + Optional: false, }}, }}, })}, @@ -378,9 +384,9 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { Config: pubbldpkg.Config{ Platform: dist.Platform{OS: "linux"}, Buildpack: dist.BuildpackURI{URI: createBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.1", Version: "1.2.3"}, - Stacks: []dist.Stack{{ID: "some.stack.id"}}, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.1", Version: "1.2.3"}, + WithStacks: []dist.Stack{{ID: "some.stack.id"}}, })}, Dependencies: []dist.ImageOrURI{{ImageRef: dist.ImageRef{ImageName: nestedPackage.Name()}}}, }, @@ -394,18 +400,18 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { when("nested package is not a valid package", func() { it("should error", func() { notPackageImage := fakes.NewImage("not/package", "", nil) - mockImageFetcher.EXPECT().Fetch(gomock.Any(), notPackageImage.Name(), image.FetchOptions{Daemon: true, PullPolicy: image.PullAlways}).Return(notPackageImage, nil) + mockImageFetcher.EXPECT().Fetch(gomock.Any(), notPackageImage.Name(), image.FetchOptions{Daemon: true, PullPolicy: image.PullAlways, Target: &dist.Target{OS: "linux"}}).Return(notPackageImage, nil) - mockDockerClient.EXPECT().Info(context.TODO()).Return(types.Info{OSType: "linux"}, nil).AnyTimes() + mockDockerClient.EXPECT().Info(context.TODO(), gomock.Any()).Return(dockerclient.SystemInfoResult{Info: mobysystem.Info{OSType: "linux"}}, nil).AnyTimes() h.AssertError(t, subject.PackageBuildpack(context.TODO(), client.PackageBuildpackOptions{ Name: "some/package", Config: pubbldpkg.Config{ Platform: dist.Platform{OS: "linux"}, Buildpack: dist.BuildpackURI{URI: createBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.1", Version: "1.2.3"}, - Stacks: []dist.Stack{{ID: "some.stack.id"}}, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.1", Version: "1.2.3"}, + WithStacks: []dist.Stack{{ID: "some.stack.id"}}, })}, Dependencies: []dist.ImageOrURI{{ImageRef: dist.ImageRef{ImageName: notPackageImage.Name()}}}, }, @@ -414,18 +420,800 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { }), "extracting buildpacks from 'not/package': could not find label 'io.buildpacks.buildpackage.metadata'") }) }) + + when("flatten option is set", func() { + /* 1 + * / \ + * 2 3 + * / \ + * 4 5 + * / \ + * 6 7 + */ + var ( + fakeLayerImage *h.FakeAddedLayerImage + opts client.PackageBuildpackOptions + mockBuildpackDownloader *testmocks.MockBuildpackDownloader + ) + + var successfullyCreateFlattenPackage = func() { + t.Helper() + err := subject.PackageBuildpack(context.TODO(), opts) + h.AssertNil(t, err) + h.AssertEq(t, fakeLayerImage.IsSaved(), true) + } + + it.Before(func() { + mockBuildpackDownloader = testmocks.NewMockBuildpackDownloader(mockController) + + var err error + subject, err = client.NewClient( + client.WithLogger(logging.NewLogWithWriters(&out, &out)), + client.WithDownloader(mockDownloader), + client.WithImageFactory(mockImageFactory), + client.WithFetcher(mockImageFetcher), + client.WithDockerClient(mockDockerClient), + client.WithBuildpackDownloader(mockBuildpackDownloader), + ) + h.AssertNil(t, err) + + mockDockerClient.EXPECT().Info(context.TODO(), gomock.Any()).Return(dockerclient.SystemInfoResult{Info: mobysystem.Info{OSType: "linux"}}, nil).AnyTimes() + + name := "basic/package-" + h.RandString(12) + fakeImage := fakes.NewImage(name, "", nil) + fakeLayerImage = &h.FakeAddedLayerImage{Image: fakeImage} + mockImageFactory.EXPECT().NewImage(fakeLayerImage.Name(), true, dist.Target{OS: "linux"}).Return(fakeLayerImage, nil) + mockImageFetcher.EXPECT().Fetch(gomock.Any(), name, gomock.Any()).Return(fakeLayerImage, nil).AnyTimes() + + blob1 := blob.NewBlob(filepath.Join("testdata", "buildpack-flatten", "buildpack-1")) + mockDownloader.EXPECT().Download(gomock.Any(), "https://example.fake/flatten-bp-1.tgz").Return(blob1, nil).AnyTimes() + bp, err := buildpack.FromBuildpackRootBlob(blob1, archive.DefaultTarWriterFactory(), nil) + h.AssertNil(t, err) + mockBuildpackDownloader.EXPECT().Download(gomock.Any(), "https://example.fake/flatten-bp-1.tgz", gomock.Any()).Return(bp, nil, nil).AnyTimes() + + // flatten buildpack 2 + blob2 := blob.NewBlob(filepath.Join("testdata", "buildpack-flatten", "buildpack-2")) + bp2, err := buildpack.FromBuildpackRootBlob(blob2, archive.DefaultTarWriterFactory(), nil) + h.AssertNil(t, err) + mockBuildpackDownloader.EXPECT().Download(gomock.Any(), "https://example.fake/flatten-bp-2.tgz", gomock.Any()).Return(bp2, nil, nil).AnyTimes() + + // flatten buildpack 3 + blob3 := blob.NewBlob(filepath.Join("testdata", "buildpack-flatten", "buildpack-3")) + bp3, err := buildpack.FromBuildpackRootBlob(blob3, archive.DefaultTarWriterFactory(), nil) + h.AssertNil(t, err) + + var depBPs []buildpack.BuildModule + for i := 4; i <= 7; i++ { + b := blob.NewBlob(filepath.Join("testdata", "buildpack-flatten", fmt.Sprintf("buildpack-%d", i))) + bp, err := buildpack.FromBuildpackRootBlob(b, archive.DefaultTarWriterFactory(), nil) + h.AssertNil(t, err) + depBPs = append(depBPs, bp) + } + mockBuildpackDownloader.EXPECT().Download(gomock.Any(), "https://example.fake/flatten-bp-3.tgz", gomock.Any()).Return(bp3, depBPs, nil).AnyTimes() + + opts = client.PackageBuildpackOptions{ + Format: client.FormatImage, + Name: fakeLayerImage.Name(), + Config: pubbldpkg.Config{ + Platform: dist.Platform{OS: "linux"}, + Buildpack: dist.BuildpackURI{URI: "https://example.fake/flatten-bp-1.tgz"}, + Dependencies: []dist.ImageOrURI{ + {BuildpackURI: dist.BuildpackURI{URI: "https://example.fake/flatten-bp-2.tgz"}}, + {BuildpackURI: dist.BuildpackURI{URI: "https://example.fake/flatten-bp-3.tgz"}}, + }, + }, + PullPolicy: image.PullNever, + Flatten: true, + } + }) + + when("flatten all", func() { + it("creates package image with all dependencies", func() { + successfullyCreateFlattenPackage() + + layers := fakeLayerImage.AddedLayersOrder() + h.AssertEq(t, len(layers), 1) + }) + + // TODO add test case for flatten all with --flatten-exclude + }) + }) + + when("multi-platform", func() { + var ( + index *h.MockImageIndex + indexLocalPath string + targets []dist.Target + bpPathURI string + repoName string + tmpDir string + err error + ) + + it.Before(func() { + tmpDir, err = os.MkdirTemp("", "package-buildpack-multi-platform") + h.AssertNil(t, err) + h.AssertNil(t, os.Setenv("XDG_RUNTIME_DIR", tmpDir)) + + repoName = "basic/multi-platform-package-" + h.RandString(12) + indexLocalPath = filepath.Join(tmpDir, imgutil.MakeFileSafeName(repoName)) + }) + + it.After(func() { + os.Remove(tmpDir) + }) + + when("simple buildpack", func() { + it.Before(func() { + // index stub returned to check if push operation was called + index = h.NewMockImageIndex(t, repoName, 0, 0) + + // We need to mock the index factory to inject a stub index to be pushed. + mockIndexFactory.EXPECT().Exists(gomock.Eq(repoName)).Return(false) + mockIndexFactory.EXPECT().CreateIndex(gomock.Eq(repoName), gomock.Any()).Return(index, nil) + }) + + when("folder structure doesn't follow multi-platform convention", func() { + it.Before(func() { + destBpPath := filepath.Join("testdata", "buildpack-multi-platform", "buildpack-old-format") + bpPathURI, err = paths.FilePathToURI(destBpPath, "") + + prepareDownloadedBuildpackBlobAtURI(t, mockDownloader, destBpPath) + prepareExpectedMultiPlaformImages(t, mockImageFactory, mockImageFetcher, repoName, dist.Target{OS: "linux", Arch: "amd64"}, + expectedMultiPlatformImage{digest: newDigest(t, repoName, "sha256:b9d056b83bb6446fee29e89a7fcf10203c562c1f59586a6e2f39c903597bda34")}) + prepareExpectedMultiPlaformImages(t, mockImageFactory, mockImageFetcher, repoName, dist.Target{OS: "linux", Arch: "arm"}, + expectedMultiPlatformImage{digest: newDigest(t, repoName, "sha256:b9d056b83bb6446fee29e89a7fcf10203c562c1f59586a6e2f39c903597bda35")}) + }) + + it("creates a multi-platform buildpack and pushes it to a registry", func() { + // Define targets we want to package + targets = []dist.Target{{OS: "linux", Arch: "amd64"}, {OS: "linux", Arch: "arm"}} + + h.AssertNil(t, subject.PackageBuildpack(context.TODO(), client.PackageBuildpackOptions{ + Format: client.FormatImage, + Publish: true, + RelativeBaseDir: "", + Name: repoName, + Config: pubbldpkg.Config{ + Buildpack: dist.BuildpackURI{URI: bpPathURI}, + Targets: []dist.Target{}, + }, + Targets: targets, + PullPolicy: image.PullNever, + })) + + // index is not saved locally + h.AssertPathDoesNotExists(t, indexLocalPath) + + // Push operation was done + h.AssertTrue(t, index.PushCalled) + h.AssertTrue(t, index.PurgeOption) + + // index has the two expected manifests amd64 and arm + indexManifest, err := index.IndexManifest() + h.AssertNil(t, err) + h.AssertEq(t, len(indexManifest.Manifests), 2) + }) + }) + + when("folder structure follows multi-platform convention", func() { + when("os/arch is used", func() { + it.Before(func() { + destBpPath := filepath.Join("testdata", "buildpack-multi-platform", "buildpack-new-format") + + bpPathURI, err = paths.FilePathToURI(destBpPath, "") + h.AssertNil(t, err) + + prepareDownloadedBuildpackBlobAtURI(t, mockDownloader, filepath.Join(destBpPath, "linux", "amd64")) + prepareDownloadedBuildpackBlobAtURI(t, mockDownloader, filepath.Join(destBpPath, "linux", "arm")) + + prepareExpectedMultiPlaformImages(t, mockImageFactory, mockImageFetcher, repoName, dist.Target{OS: "linux", Arch: "amd64"}, + expectedMultiPlatformImage{digest: newDigest(t, repoName, "sha256:b9d056b83bb6446fee29e89a7fcf10203c562c1f59586a6e2f39c903597bda34")}) + + prepareExpectedMultiPlaformImages(t, mockImageFactory, mockImageFetcher, repoName, dist.Target{OS: "linux", Arch: "arm"}, + expectedMultiPlatformImage{digest: newDigest(t, repoName, "sha256:b9d056b83bb6446fee29e89a7fcf10203c562c1f59586a6e2f39c903597bda35")}) + }) + + it("creates a multi-platform buildpack and pushes it to a registry", func() { + // Define targets we want to package + targets = []dist.Target{{OS: "linux", Arch: "amd64"}, {OS: "linux", Arch: "arm"}} + + h.AssertNil(t, subject.PackageBuildpack(context.TODO(), client.PackageBuildpackOptions{ + Format: client.FormatImage, + Publish: true, + RelativeBaseDir: "", + Name: repoName, + Config: pubbldpkg.Config{ + Buildpack: dist.BuildpackURI{URI: bpPathURI}, + Targets: []dist.Target{}, + }, + Targets: targets, + PullPolicy: image.PullNever, + })) + + // index is not saved locally + h.AssertPathDoesNotExists(t, indexLocalPath) + + // Push operation was done + h.AssertTrue(t, index.PushCalled) + h.AssertTrue(t, index.PurgeOption) + + // index has the two expected manifests amd64 and arm + indexManifest, err := index.IndexManifest() + h.AssertNil(t, err) + h.AssertEq(t, len(indexManifest.Manifests), 2) + }) + }) + + when("os/arch/variant/name@version is used", func() { + it.Before(func() { + destBpPath := filepath.Join("testdata", "buildpack-multi-platform", "buildpack-new-format-with-versions") + + bpPathURI, err = paths.FilePathToURI(destBpPath, "") + h.AssertNil(t, err) + + prepareDownloadedBuildpackBlobAtURI(t, mockDownloader, filepath.Join(destBpPath, "linux", "amd64", "v5", "ubuntu@18.01")) + prepareDownloadedBuildpackBlobAtURI(t, mockDownloader, filepath.Join(destBpPath, "linux", "amd64", "v5", "ubuntu@21.01")) + prepareDownloadedBuildpackBlobAtURI(t, mockDownloader, filepath.Join(destBpPath, "linux", "arm", "v6", "ubuntu@18.01")) + prepareDownloadedBuildpackBlobAtURI(t, mockDownloader, filepath.Join(destBpPath, "linux", "arm", "v6", "ubuntu@21.01")) + + prepareExpectedMultiPlaformImages(t, mockImageFactory, mockImageFetcher, repoName, dist.Target{OS: "linux", Arch: "amd64", ArchVariant: "v5", Distributions: []dist.Distribution{ + {Name: "ubuntu", Version: "21.01"}}}, expectedMultiPlatformImage{digest: newDigest(t, repoName, "sha256:b9d056b83bb6446fee29e89a7fcf10203c562c1f59586a6e2f39c903597bda34")}) + + prepareExpectedMultiPlaformImages(t, mockImageFactory, mockImageFetcher, repoName, dist.Target{OS: "linux", Arch: "amd64", ArchVariant: "v5", Distributions: []dist.Distribution{ + {Name: "ubuntu", Version: "18.01"}}}, expectedMultiPlatformImage{digest: newDigest(t, repoName, "sha256:b9d056b83bb6446fee29e89a7fcf10203c562c1f59586a6e2f39c903597bda35")}) + + prepareExpectedMultiPlaformImages(t, mockImageFactory, mockImageFetcher, repoName, dist.Target{OS: "linux", Arch: "arm", ArchVariant: "v6", Distributions: []dist.Distribution{ + {Name: "ubuntu", Version: "18.01"}}}, expectedMultiPlatformImage{digest: newDigest(t, repoName, "sha256:b9d056b83bb6446fee29e89a7fcf10203c562c1f59586a6e2f39c903597bda36")}) + + prepareExpectedMultiPlaformImages(t, mockImageFactory, mockImageFetcher, repoName, dist.Target{OS: "linux", Arch: "arm", ArchVariant: "v6", Distributions: []dist.Distribution{ + {Name: "ubuntu", Version: "21.01"}}}, expectedMultiPlatformImage{digest: newDigest(t, repoName, "sha256:b9d056b83bb6446fee29e89a7fcf10203c562c1f59586a6e2f39c903597bda36")}) + }) + + it("creates a multi-platform buildpack and pushes it to a registry", func() { + // Define targets we want to package + targets = []dist.Target{{OS: "linux", Arch: "amd64", ArchVariant: "v5", + Distributions: []dist.Distribution{{Name: "ubuntu", Version: "18.01"}, {Name: "ubuntu", Version: "21.01"}}}, + {OS: "linux", Arch: "arm", ArchVariant: "v6", Distributions: []dist.Distribution{{Name: "ubuntu", Version: "18.01"}, {Name: "ubuntu", Version: "21.01"}}}} + + h.AssertNil(t, subject.PackageBuildpack(context.TODO(), client.PackageBuildpackOptions{ + Format: client.FormatImage, + Publish: true, + RelativeBaseDir: "", + Name: repoName, + Config: pubbldpkg.Config{ + Buildpack: dist.BuildpackURI{URI: bpPathURI}, + Targets: []dist.Target{}, + }, + Targets: targets, + PullPolicy: image.PullNever, + })) + + // index is not saved locally + h.AssertPathDoesNotExists(t, indexLocalPath) + + // Push operation was done + h.AssertTrue(t, index.PushCalled) + h.AssertTrue(t, index.PurgeOption) + + // index has the four expected manifests two for each architecture + indexManifest, err := index.IndexManifest() + h.AssertNil(t, err) + h.AssertEq(t, len(indexManifest.Manifests), 4) + }) + }) + }) + }) + + when("composite buildpack", func() { + var ( + target1 dist.Target + bp1URI string + target2 dist.Target + bp2URI string + ) + + it.Before(func() { + bp1URI = "localhost:3333/bp-1" + target1 = dist.Target{OS: "linux", Arch: "amd64"} + + bp2URI = "localhost:3333/bp-2" + target2 = dist.Target{OS: "linux", Arch: "arm"} + }) + + when("dependencies are saved on a registry", func() { + it.Before(func() { + // Check testdata/buildpack-multi-platform/buildpack-composite for configuration details + destBpPath := filepath.Join("testdata", "buildpack-multi-platform", "buildpack-composite") + + bpPathURI, err = paths.FilePathToURI(destBpPath, "") + h.AssertNil(t, err) + + prepareDownloadedBuildpackBlobAtURI(t, mockDownloader, destBpPath) + + indexAMD64Digest := newDigest(t, repoName, "sha256:b9d056b83bb6446fee29e89a7fcf10203c562c1f59586a6e2f39c903597bda40") + prepareRemoteMultiPlatformBuildpackPackage(t, mockImageFactory, mockImageFetcher, repoName, indexAMD64Digest, target1, []expectedMultiPlatformImage{ + {digest: newDigest(t, bp1URI, "sha256:b9d056b83bb6446fee29e89a7fcf10203c562c1f59586a6e2f39c903597bda34"), id: "samples/bp-1", version: "0.0.1", bpURI: bp1URI}, + {digest: newDigest(t, bp2URI, "sha256:b9d056b83bb6446fee29e89a7fcf10203c562c1f59586a6e2f39c903597bda35"), id: "samples/bp-2", version: "0.0.1", bpURI: bp2URI}, + }) + + indexARMDigest := newDigest(t, repoName, "sha256:b9d056b83bb6446fee29e89a7fcf10203c562c1f59586a6e2f39c903597bda41") + prepareRemoteMultiPlatformBuildpackPackage(t, mockImageFactory, mockImageFetcher, repoName, indexARMDigest, target2, []expectedMultiPlatformImage{ + {digest: newDigest(t, bp1URI, "sha256:b9d056b83bb6446fee29e89a7fcf10203c562c1f59586a6e2f39c903597bda36"), id: "samples/bp-1", version: "0.0.1", bpURI: bp1URI}, + {digest: newDigest(t, bp2URI, "sha256:b9d056b83bb6446fee29e89a7fcf10203c562c1f59586a6e2f39c903597bda37"), id: "samples/bp-2", version: "0.0.1", bpURI: bp2URI}, + }) + + // Define expected targets to package + targets = []dist.Target{target1, target2} + + // index stub returned to check if push operation was called + index = h.NewMockImageIndex(t, repoName, 0, 0) + + // We need to mock the index factory to inject a stub index to be pushed. + mockIndexFactory.EXPECT().Exists(gomock.Eq(repoName)).Return(false) + mockIndexFactory.EXPECT().CreateIndex(gomock.Eq(repoName), gomock.Any()).Return(index, nil) + }) + + it("creates a multi-platform buildpack and pushes it to a registry", func() { + h.AssertNil(t, subject.PackageBuildpack(context.TODO(), client.PackageBuildpackOptions{ + Format: client.FormatImage, + Publish: true, + RelativeBaseDir: "", + Name: repoName, + Config: pubbldpkg.Config{ + Buildpack: dist.BuildpackURI{URI: bpPathURI}, + Dependencies: []dist.ImageOrURI{ + {BuildpackURI: dist.BuildpackURI{URI: bp1URI}}, + {BuildpackURI: dist.BuildpackURI{URI: bp2URI}}, + }, + Targets: []dist.Target{}, + }, + Targets: targets, + })) + + // index is not saved locally + h.AssertPathDoesNotExists(t, indexLocalPath) + + // Push operation was done + h.AssertTrue(t, index.PushCalled) + h.AssertTrue(t, index.PurgeOption) + + // index has the two expected manifests amd64 and arm + indexManifest, err := index.IndexManifest() + h.AssertNil(t, err) + h.AssertEq(t, len(indexManifest.Manifests), 2) + }) + }) + + when("dependencies are on disk", func() { + it.Before(func() { + // Check testdata/buildpack-multi-platform/buildpack-composite for configuration details + destBpPath := filepath.Join("testdata", "buildpack-multi-platform", "buildpack-composite-with-dependencies-on-disk") + + bpPathURI, err = paths.FilePathToURI(destBpPath, "") + h.AssertNil(t, err) + + prepareDownloadedBuildpackBlobAtURI(t, mockDownloader, destBpPath) + + bp1URI = filepath.Join("testdata", "buildpack-multi-platform", "buildpack-new-format") + + // Define expected targets to package + targets = []dist.Target{target1, target2} + }) + + it("errors with a message", func() { + // If dependencies point to a file or a URL like https://example.com/buildpack.tgz + // we will need to define some conventions to fetch by target + // The OCI registry already solved the problem, that's why we do not allow this path for now + err = subject.PackageBuildpack(context.TODO(), client.PackageBuildpackOptions{ + Format: client.FormatImage, + Publish: true, + RelativeBaseDir: "", + Name: repoName, + Config: pubbldpkg.Config{ + Buildpack: dist.BuildpackURI{URI: bpPathURI}, + Dependencies: []dist.ImageOrURI{ + {BuildpackURI: dist.BuildpackURI{URI: bp1URI}}, + }, + Targets: []dist.Target{}, + }, + Targets: targets, + }) + h.AssertNotNil(t, err) + h.AssertError(t, err, "is not allowed when creating a composite multi-platform buildpack; push your dependencies to a registry and use 'docker://' instead") + }) + }) + + when("daemon target selection", func() { + when("publish is false", func() { + when("daemon is linux/amd64", func() { + it.Before(func() { + mockDockerClient.EXPECT().ServerVersion(gomock.Any(), gomock.Any()).Return(dockerclient.ServerVersionResult{ + Os: "linux", + Arch: "amd64", + }, nil).AnyTimes() + }) + + when("targets include exact match", func() { + it("selects the exact OS and architecture match", func() { + // Prepare buildpack + destBpPath := filepath.Join("testdata", "buildpack-multi-platform", "buildpack-new-format") + bpPathURI, err = paths.FilePathToURI(destBpPath, "") + h.AssertNil(t, err) + // The code will check for platform-specific folder and download from there + prepareDownloadedBuildpackBlobAtURI(t, mockDownloader, filepath.Join(destBpPath, "linux", "amd64")) + + // Mock docker info for validateOSPlatform + mockDockerClient.EXPECT().Info(gomock.Any(), gomock.Any()).Return(dockerclient.SystemInfoResult{Info: mobysystem.Info{OSType: "linux"}}, nil) + + // Mock expectations for the selected target + fakeImage := fakes.NewImage(repoName, "", nil) + mockImageFactory.EXPECT().NewImage(repoName, true, dist.Target{OS: "linux", Arch: "amd64"}).Return(fakeImage, nil) + + targets := []dist.Target{ + {OS: "linux", Arch: "arm64"}, + {OS: "linux", Arch: "amd64"}, // exact match + {OS: "windows", Arch: "amd64"}, + } + + err = subject.PackageBuildpack(context.TODO(), client.PackageBuildpackOptions{ + Format: client.FormatImage, + Publish: false, + RelativeBaseDir: "", + Name: repoName, + Config: pubbldpkg.Config{ + Buildpack: dist.BuildpackURI{URI: bpPathURI}, + }, + Targets: targets, + PullPolicy: image.PullNever, + }) + h.AssertNil(t, err) + + // Verify the image was saved (indicates successful packaging) + h.AssertEq(t, fakeImage.IsSaved(), true) + }) + }) + + when("targets only have OS match with different architectures", func() { + it("returns error when no architecture matches", func() { + targets := []dist.Target{ + {OS: "linux", Arch: "arm64"}, + {OS: "linux", Arch: "arm"}, + {OS: "windows", Arch: "amd64"}, + } + + err := subject.PackageBuildpack(context.TODO(), client.PackageBuildpackOptions{ + Format: client.FormatImage, + Publish: false, + RelativeBaseDir: "", + Name: repoName, + Config: pubbldpkg.Config{ + Buildpack: dist.BuildpackURI{URI: "some-bp-uri"}, + }, + Targets: targets, + PullPolicy: image.PullNever, + }) + h.AssertError(t, err, "could not find a target that matches daemon os=linux and architecture=amd64") + }) + }) + + when("targets have OS match with empty architecture", func() { + it("selects the target with matching OS and empty architecture", func() { + // Prepare buildpack + destBpPath := filepath.Join("testdata", "buildpack") + bpPathURI, err = paths.FilePathToURI(destBpPath, "") + h.AssertNil(t, err) + prepareDownloadedBuildpackBlobAtURI(t, mockDownloader, destBpPath) + + // Mock docker info for validateOSPlatform + mockDockerClient.EXPECT().Info(gomock.Any(), gomock.Any()).Return(dockerclient.SystemInfoResult{Info: mobysystem.Info{OSType: "linux"}}, nil) + + // Mock expectations for the selected target + fakeImage := fakes.NewImage(repoName, "", nil) + mockImageFactory.EXPECT().NewImage(repoName, true, dist.Target{OS: "linux", Arch: ""}).Return(fakeImage, nil) + + targets := []dist.Target{ + {OS: "linux", Arch: "arm64"}, + {OS: "linux", Arch: ""}, // OS match with empty arch + {OS: "windows", Arch: "amd64"}, + } + + err = subject.PackageBuildpack(context.TODO(), client.PackageBuildpackOptions{ + Format: client.FormatImage, + Publish: false, + RelativeBaseDir: "", + Name: repoName, + Config: pubbldpkg.Config{ + Buildpack: dist.BuildpackURI{URI: bpPathURI}, + }, + Targets: targets, + PullPolicy: image.PullNever, + }) + h.AssertNil(t, err) + + // Verify the image was saved + h.AssertEq(t, fakeImage.IsSaved(), true) + }) + }) + + when("multiple targets match", func() { + it("selects the first exact match", func() { + // Prepare buildpack + destBpPath := filepath.Join("testdata", "buildpack-multi-platform", "buildpack-new-format") + bpPathURI, err = paths.FilePathToURI(destBpPath, "") + h.AssertNil(t, err) + // The code will check for platform-specific folder and download from there + prepareDownloadedBuildpackBlobAtURI(t, mockDownloader, filepath.Join(destBpPath, "linux", "amd64")) + + // Mock docker info for validateOSPlatform + mockDockerClient.EXPECT().Info(gomock.Any(), gomock.Any()).Return(dockerclient.SystemInfoResult{Info: mobysystem.Info{OSType: "linux"}}, nil) + + // Mock expectations for the selected target + fakeImage := fakes.NewImage(repoName, "", nil) + mockImageFactory.EXPECT().NewImage(repoName, true, dist.Target{OS: "linux", Arch: "amd64", ArchVariant: "v1"}).Return(fakeImage, nil) + + targets := []dist.Target{ + {OS: "linux", Arch: "arm64"}, + {OS: "linux", Arch: "amd64", ArchVariant: "v1"}, // first exact match + {OS: "linux", Arch: "amd64", ArchVariant: "v2"}, // second exact match + {OS: "linux", Arch: ""}, + } + + err = subject.PackageBuildpack(context.TODO(), client.PackageBuildpackOptions{ + Format: client.FormatImage, + Publish: false, + RelativeBaseDir: "", + Name: repoName, + Config: pubbldpkg.Config{ + Buildpack: dist.BuildpackURI{URI: bpPathURI}, + }, + Targets: targets, + PullPolicy: image.PullNever, + }) + h.AssertNil(t, err) + + // Verify the image was saved + h.AssertEq(t, fakeImage.IsSaved(), true) + }) + }) + }) + + when("daemon is linux/arm64", func() { + it.Before(func() { + mockDockerClient.EXPECT().ServerVersion(gomock.Any(), gomock.Any()).Return(dockerclient.ServerVersionResult{ + Os: "linux", + Arch: "arm64", + }, nil).AnyTimes() + }) + + when("targets are ordered with amd64 first", func() { + it("selects arm64 even when amd64 appears first", func() { + // Prepare buildpack + destBpPath := filepath.Join("testdata", "buildpack-multi-platform", "buildpack-new-format") + bpPathURI, err = paths.FilePathToURI(destBpPath, "") + h.AssertNil(t, err) + // The code will check for platform-specific folder and download from there + // Mock both paths as PlatformRootFolder returns /linux when it exists + prepareDownloadedBuildpackBlobAtURI(t, mockDownloader, filepath.Join(destBpPath, "linux")) + prepareDownloadedBuildpackBlobAtURI(t, mockDownloader, filepath.Join(destBpPath, "linux", "arm")) + + // Mock docker info for validateOSPlatform + mockDockerClient.EXPECT().Info(gomock.Any(), gomock.Any()).Return(dockerclient.SystemInfoResult{Info: mobysystem.Info{OSType: "linux"}}, nil) + + // Mock expectations for the selected target + fakeImage := fakes.NewImage(repoName, "", nil) + mockImageFactory.EXPECT().NewImage(repoName, true, dist.Target{OS: "linux", Arch: "arm64"}).Return(fakeImage, nil) + + targets := []dist.Target{ + {OS: "linux", Arch: "amd64"}, // appears first but wrong arch + {OS: "linux", Arch: "arm64"}, // exact match + {OS: "windows", Arch: "arm64"}, + } + + err = subject.PackageBuildpack(context.TODO(), client.PackageBuildpackOptions{ + Format: client.FormatImage, + Publish: false, + RelativeBaseDir: "", + Name: repoName, + Config: pubbldpkg.Config{ + Buildpack: dist.BuildpackURI{URI: bpPathURI}, + }, + Targets: targets, + PullPolicy: image.PullNever, + }) + h.AssertNil(t, err) + + // Verify the image was saved + h.AssertEq(t, fakeImage.IsSaved(), true) + }) + }) + + when("only amd64 targets available", func() { + it("returns error", func() { + targets := []dist.Target{ + {OS: "linux", Arch: "amd64"}, + {OS: "windows", Arch: "amd64"}, + } + + err := subject.PackageBuildpack(context.TODO(), client.PackageBuildpackOptions{ + Format: client.FormatImage, + Publish: false, + RelativeBaseDir: "", + Name: repoName, + Config: pubbldpkg.Config{ + Buildpack: dist.BuildpackURI{URI: "some-bp-uri"}, + }, + Targets: targets, + PullPolicy: image.PullNever, + }) + h.AssertError(t, err, "could not find a target that matches daemon os=linux and architecture=arm64") + }) + }) + }) + + when("daemon is windows/amd64", func() { + it.Before(func() { + mockDockerClient.EXPECT().ServerVersion(gomock.Any(), gomock.Any()).Return(dockerclient.ServerVersionResult{ + Os: "windows", + Arch: "amd64", + }, nil).AnyTimes() + }) + + when("targets include windows", func() { + it("selects windows/amd64", func() { + // Create a Windows-compatible client + windowsClient, err := client.NewClient( + client.WithDockerClient(mockDockerClient), + client.WithLogger(logging.NewLogWithWriters(&out, &out)), + client.WithDownloader(mockDownloader), + client.WithImageFactory(mockImageFactory), + client.WithIndexFactory(mockIndexFactory), + client.WithFetcher(mockImageFetcher), + client.WithExperimental(true), + ) + h.AssertNil(t, err) + + // Prepare buildpack + destBpPath := filepath.Join("testdata", "buildpack") + bpPathURI, err = paths.FilePathToURI(destBpPath, "") + h.AssertNil(t, err) + prepareDownloadedBuildpackBlobAtURI(t, mockDownloader, destBpPath) + + // Mock docker info for validateOSPlatform + mockDockerClient.EXPECT().Info(gomock.Any(), gomock.Any()).Return(dockerclient.SystemInfoResult{Info: mobysystem.Info{OSType: "windows"}}, nil) + + // Mock expectations for the selected target + fakeImage := fakes.NewImage(repoName, "", nil) + mockImageFactory.EXPECT().NewImage(repoName, true, dist.Target{OS: "windows", Arch: "amd64"}).Return(fakeImage, nil) + + targets := []dist.Target{ + {OS: "linux", Arch: "amd64"}, + {OS: "windows", Arch: "amd64"}, // exact match + {OS: "darwin", Arch: "amd64"}, + } + + err = windowsClient.PackageBuildpack(context.TODO(), client.PackageBuildpackOptions{ + Format: client.FormatImage, + Publish: false, + RelativeBaseDir: "", + Name: repoName, + Config: pubbldpkg.Config{ + Buildpack: dist.BuildpackURI{URI: bpPathURI}, + }, + Targets: targets, + PullPolicy: image.PullNever, + }) + h.AssertNil(t, err) + + // Verify the image was saved + h.AssertEq(t, fakeImage.IsSaved(), true) + }) + }) + }) + + when("targets with distributions", func() { + it.Before(func() { + mockDockerClient.EXPECT().ServerVersion(gomock.Any(), gomock.Any()).Return(dockerclient.ServerVersionResult{ + Os: "linux", + Arch: "amd64", + }, nil).AnyTimes() + }) + + it("selects target ignoring distributions", func() { + // Prepare buildpack + destBpPath := filepath.Join("testdata", "buildpack-multi-platform", "buildpack-new-format") + bpPathURI, err = paths.FilePathToURI(destBpPath, "") + h.AssertNil(t, err) + prepareDownloadedBuildpackBlobAtURI(t, mockDownloader, filepath.Join(destBpPath, "linux", "amd64")) + + // Mock docker info for validateOSPlatform + mockDockerClient.EXPECT().Info(gomock.Any(), gomock.Any()).Return(dockerclient.SystemInfoResult{Info: mobysystem.Info{OSType: "linux"}}, nil) + + // Mock expectations for the selected target + fakeImage := fakes.NewImage(repoName, "", nil) + mockImageFactory.EXPECT().NewImage(repoName, true, dist.Target{ + OS: "linux", + Arch: "amd64", + Distributions: []dist.Distribution{ + {Name: "ubuntu", Version: "22.04"}, + }, + }).Return(fakeImage, nil) + + targets := []dist.Target{ + { + OS: "linux", + Arch: "amd64", + Distributions: []dist.Distribution{ + {Name: "ubuntu", Version: "22.04"}, + }, + }, + } + + err = subject.PackageBuildpack(context.TODO(), client.PackageBuildpackOptions{ + Format: client.FormatImage, + Publish: false, + RelativeBaseDir: "", + Name: repoName, + Config: pubbldpkg.Config{ + Buildpack: dist.BuildpackURI{URI: bpPathURI}, + }, + Targets: targets, + PullPolicy: image.PullNever, + }) + h.AssertNil(t, err) + + // Verify the image was saved + h.AssertEq(t, fakeImage.IsSaved(), true) + }) + }) + + when("empty targets list", func() { + it("uses default behavior without calling daemonTarget", func() { + // Prepare buildpack + bpPathURI, err = paths.FilePathToURI(filepath.Join("testdata", "buildpack"), "") + h.AssertNil(t, err) + prepareDownloadedBuildpackBlobAtURI(t, mockDownloader, filepath.Join("testdata", "buildpack")) + + // Mock expectations - ServerVersion should NOT be called + // as daemonTarget is not invoked for empty targets + mockDockerClient.EXPECT().Info(gomock.Any(), gomock.Any()).Return(dockerclient.SystemInfoResult{Info: mobysystem.Info{OSType: "linux"}}, nil) + fakeImage := fakes.NewImage(repoName, "", nil) + mockImageFactory.EXPECT().NewImage(repoName, true, dist.Target{OS: "linux"}).Return(fakeImage, nil) + + err = subject.PackageBuildpack(context.TODO(), client.PackageBuildpackOptions{ + Format: client.FormatImage, + Publish: false, + RelativeBaseDir: "", + Name: repoName, + Config: pubbldpkg.Config{ + Platform: dist.Platform{OS: "linux"}, + Buildpack: dist.BuildpackURI{URI: bpPathURI}, + }, + Targets: []dist.Target{}, // empty targets + PullPolicy: image.PullNever, + }) + h.AssertNil(t, err) + + // Verify the image was saved + h.AssertEq(t, fakeImage.IsSaved(), true) + }) + }) + }) + }) + }) + }) }) when("FormatFile", func() { when("simple package for both OS formats (experimental only)", func() { it("creates package image in either OS format", func() { - tmpDir, err := ioutil.TempDir("", "package-buildpack") + tmpDir, err := os.MkdirTemp("", "package-buildpack") h.AssertNil(t, err) defer os.Remove(tmpDir) for _, imageOS := range []string{"linux", "windows"} { - localMockDockerClient := testmocks.NewMockCommonAPIClient(mockController) - localMockDockerClient.EXPECT().Info(context.TODO()).Return(types.Info{OSType: imageOS}, nil).AnyTimes() + localMockDockerClient := testmocks.NewMockAPIClient(mockController) + localMockDockerClient.EXPECT().Info(context.TODO(), gomock.Any()).Return(dockerclient.SystemInfoResult{Info: mobysystem.Info{OSType: imageOS}}, nil).AnyTimes() packClientWithExperimental, err := client.NewClient( client.WithDockerClient(localMockDockerClient), @@ -446,9 +1234,9 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { Config: pubbldpkg.Config{ Platform: dist.Platform{OS: imageOS}, Buildpack: dist.BuildpackURI{URI: createBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.basic", Version: "2.3.4"}, - Stacks: []dist.Stack{{ID: "some.stack.id"}}, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.basic", Version: "2.3.4"}, + WithStacks: []dist.Stack{{ID: "some.stack.id"}}, })}, }, PullPolicy: image.PullNever, @@ -468,23 +1256,23 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { it.Before(func() { childDescriptor = dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.nested", Version: "2.3.4"}, - Stacks: []dist.Stack{{ID: "some.stack.id"}}, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.nested", Version: "2.3.4"}, + WithStacks: []dist.Stack{{ID: "some.stack.id"}}, } packageDescriptor = dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.1", Version: "1.2.3"}, - Order: dist.Order{{ - Group: []dist.BuildpackRef{{ - BuildpackInfo: dist.BuildpackInfo{ID: "bp.nested", Version: "2.3.4"}, - Optional: false, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.1", Version: "1.2.3"}, + WithOrder: dist.Order{{ + Group: []dist.ModuleRef{{ + ModuleInfo: dist.ModuleInfo{ID: "bp.nested", Version: "2.3.4"}, + Optional: false, }}, }}, } - tmpDir, err = ioutil.TempDir("", "package-buildpack") + tmpDir, err = os.MkdirTemp("", "package-buildpack") h.AssertNil(t, err) }) @@ -495,7 +1283,7 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { when("dependencies are packaged buildpack image", func() { it.Before(func() { nestedPackage = fakes.NewImage("nested/package-"+h.RandString(12), "", nil) - mockImageFactory.EXPECT().NewImage(nestedPackage.Name(), false, "linux").Return(nestedPackage, nil) + mockImageFactory.EXPECT().NewImage(nestedPackage.Name(), false, dist.Target{OS: "linux"}).Return(nestedPackage, nil) h.AssertNil(t, subject.PackageBuildpack(context.TODO(), client.PackageBuildpackOptions{ Name: nestedPackage.Name(), @@ -507,7 +1295,7 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { PullPolicy: image.PullAlways, })) - mockImageFetcher.EXPECT().Fetch(gomock.Any(), nestedPackage.Name(), image.FetchOptions{Daemon: true, PullPolicy: image.PullAlways}).Return(nestedPackage, nil) + mockImageFetcher.EXPECT().Fetch(gomock.Any(), nestedPackage.Name(), image.FetchOptions{Daemon: true, PullPolicy: image.PullAlways, Target: &dist.Target{OS: "linux"}}).Return(nestedPackage, nil) }) it("should pull and use local nested package image", func() { @@ -599,18 +1387,18 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { it.Before(func() { secondChildDescriptor = dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.nested1", Version: "2.3.4"}, - Stacks: []dist.Stack{{ID: "some.stack.id"}}, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.nested1", Version: "2.3.4"}, + WithStacks: []dist.Stack{{ID: "some.stack.id"}}, } - packageDescriptor.Order = append(packageDescriptor.Order, dist.OrderEntry{Group: []dist.BuildpackRef{{ - BuildpackInfo: dist.BuildpackInfo{ID: secondChildDescriptor.Info.ID, Version: secondChildDescriptor.Info.Version}, - Optional: false, + packageDescriptor.WithOrder = append(packageDescriptor.Order(), dist.OrderEntry{Group: []dist.ModuleRef{{ + ModuleInfo: dist.ModuleInfo{ID: secondChildDescriptor.Info().ID, Version: secondChildDescriptor.Info().Version}, + Optional: false, }}}) nestedPackage = fakes.NewImage("nested/package-"+h.RandString(12), "", nil) - mockImageFactory.EXPECT().NewImage(nestedPackage.Name(), false, "linux").Return(nestedPackage, nil) + mockImageFactory.EXPECT().NewImage(nestedPackage.Name(), false, dist.Target{OS: "linux"}).Return(nestedPackage, nil) h.AssertNil(t, subject.PackageBuildpack(context.TODO(), client.PackageBuildpackOptions{ Name: nestedPackage.Name(), @@ -622,7 +1410,7 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { PullPolicy: image.PullAlways, })) - mockImageFetcher.EXPECT().Fetch(gomock.Any(), nestedPackage.Name(), image.FetchOptions{Daemon: true, PullPolicy: image.PullAlways}).Return(nestedPackage, nil) + mockImageFetcher.EXPECT().Fetch(gomock.Any(), nestedPackage.Name(), image.FetchOptions{Daemon: true, PullPolicy: image.PullAlways, Target: &dist.Target{OS: "linux"}}).Return(nestedPackage, nil) }) it("should include both of them", func() { @@ -696,23 +1484,23 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { var err error childDescriptor = dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "example/foo", Version: "1.1.0"}, - Stacks: []dist.Stack{{ID: "some.stack.id"}}, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "example/foo", Version: "1.1.0"}, + WithStacks: []dist.Stack{{ID: "some.stack.id"}}, } packageDescriptor = dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.1", Version: "1.2.3"}, - Order: dist.Order{{ - Group: []dist.BuildpackRef{{ - BuildpackInfo: dist.BuildpackInfo{ID: "example/foo", Version: "1.1.0"}, - Optional: false, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.1", Version: "1.2.3"}, + WithOrder: dist.Order{{ + Group: []dist.ModuleRef{{ + ModuleInfo: dist.ModuleInfo{ID: "example/foo", Version: "1.1.0"}, + Optional: false, }}, }}, } - tmpDir, err = ioutil.TempDir("", "registry") + tmpDir, err = os.MkdirTemp("", "registry") h.AssertNil(t, err) packHome = filepath.Join(tmpDir, ".pack") @@ -730,7 +1518,7 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, err) err = packageImage.SetLabel("io.buildpacks.buildpack.layers", `{"example/foo":{"1.1.0":{"api": "0.2", "layerDiffID":"sha256:xxx", "stacks":[{"id":"some.stack.id"}]}}}`) h.AssertNil(t, err) - mockImageFetcher.EXPECT().Fetch(gomock.Any(), packageImage.Name(), image.FetchOptions{Daemon: true, PullPolicy: image.PullAlways}).Return(packageImage, nil) + mockImageFetcher.EXPECT().Fetch(gomock.Any(), packageImage.Name(), image.FetchOptions{Daemon: true, PullPolicy: image.PullAlways, Target: &dist.Target{OS: "linux"}}).Return(packageImage, nil) packHome := filepath.Join(tmpDir, "packHome") h.AssertNil(t, os.Setenv("PACK_HOME", packHome)) @@ -776,7 +1564,7 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { when("unknown format is provided", func() { it("should error", func() { - mockDockerClient.EXPECT().Info(context.TODO()).Return(types.Info{OSType: "linux"}, nil).AnyTimes() + mockDockerClient.EXPECT().Info(context.TODO(), gomock.Any()).Return(dockerclient.SystemInfoResult{Info: mobysystem.Info{OSType: "linux"}}, nil).AnyTimes() err := subject.PackageBuildpack(context.TODO(), client.PackageBuildpackOptions{ Name: "some-buildpack", @@ -784,9 +1572,9 @@ func testPackageBuildpack(t *testing.T, when spec.G, it spec.S) { Config: pubbldpkg.Config{ Platform: dist.Platform{OS: "linux"}, Buildpack: dist.BuildpackURI{URI: createBuildpack(dist.BuildpackDescriptor{ - API: api.MustParse("0.2"), - Info: dist.BuildpackInfo{ID: "bp.1", Version: "1.2.3"}, - Stacks: []dist.Stack{{ID: "some.stack.id"}}, + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "bp.1", Version: "1.2.3"}, + WithStacks: []dist.Stack{{ID: "some.stack.id"}}, })}, }, Publish: false, @@ -801,5 +1589,64 @@ func assertPackageBPFileHasBuildpacks(t *testing.T, path string, descriptors []d packageBlob := blob.NewBlob(path) mainBP, depBPs, err := buildpack.BuildpacksFromOCILayoutBlob(packageBlob) h.AssertNil(t, err) - h.AssertBuildpacksHaveDescriptors(t, append([]buildpack.Buildpack{mainBP}, depBPs...), descriptors) + h.AssertBuildpacksHaveDescriptors(t, append([]buildpack.BuildModule{mainBP}, depBPs...), descriptors) +} + +func prepareDownloadedBuildpackBlobAtURI(t *testing.T, mockDownloader *testmocks.MockBlobDownloader, path string) { + blob := blob.NewBlob(path) + uri, err := paths.FilePathToURI(path, "") + h.AssertNil(t, err) + mockDownloader.EXPECT().Download(gomock.Any(), uri).Return(blob, nil).AnyTimes() +} + +// prepareExpectedMultiPlaformImages creates a fake CNBImage that will be fetched from a registry +func prepareExpectedMultiPlaformImages(t *testing.T, mockImageFactory *testmocks.MockImageFactory, mockImageFetcher *testmocks.MockImageFetcher, repoName string, target dist.Target, expected expectedMultiPlatformImage) { + fakeImage := h.NewFakeWithRandomUnderlyingV1Image(t, repoName, expected.digest) + mockImageFactory.EXPECT().NewImage(repoName, false, gomock.Eq(target)).Return(fakeImage, nil) + mockImageFetcher.EXPECT().Fetch(gomock.Any(), expected.digest.Name(), gomock.Any()).Return(fakeImage, nil) +} + +// prepareRemoteMultiPlatformBuildpackPackage creates remotes buildpack packages required to create a composite buildapck +// repoName: image index reference name +// digest: manifest digest for the given target +// target: os/arch for the given manifest +func prepareRemoteMultiPlatformBuildpackPackage(t *testing.T, mockImageFactory *testmocks.MockImageFactory, mockImageFetcher *testmocks.MockImageFetcher, repoName string, digest name.Digest, target dist.Target, expected []expectedMultiPlatformImage) { + // crates each remote buildpack package for the given target + for _, v := range expected { + // it must already exist in a registry, pack will pull it from a registry and write its content on disk to create a .tar + fakeImage := h.NewFakeWithRandomUnderlyingV1Image(t, v.bpURI, v.digest) + // Each buildpack package is expected to have some labels + h.AssertNil(t, fakeImage.SetLabel("io.buildpacks.buildpackage.metadata", fmt.Sprintf(`{"id":"%s","version":"%s","stacks":[{"id":"*"}]}`, v.id, v.version))) + layers, err := fakeImage.UnderlyingImage().Layers() + h.AssertNil(t, err) + diffID, err := layers[0].DiffID() + h.AssertNil(t, err) + h.AssertNil(t, fakeImage.SetLabel("io.buildpacks.buildpack.layers", fmt.Sprintf(`{"%s":{"%s":{"api":"0.10","stacks":[{"id":"*"}],"layerDiffID":"%s"}}}`, v.id, v.version, diffID))) + + // pack will fetch the buildpack package from the registry by target + mockImageFetcher.EXPECT().Fetch(gomock.Any(), v.bpURI, gomock.Eq(image.FetchOptions{Daemon: false, Target: &target})).Return(fakeImage, nil) + } + + // Once all the buildpacks were written to disk as .tar giles + // pack will create a new OCI image adding all the .tar files as layers + compositeBuildpackImage := h.NewFakeWithRandomUnderlyingV1Image(t, repoName, digest) + mockImageFactory.EXPECT().NewImage(repoName, false, gomock.Eq(target)).Return(compositeBuildpackImage, nil) + + // Once the composite buildpack image was pushed to the registry, pack will create an Image Index adding + // each manifest by digest + mockImageFetcher.EXPECT().Fetch(gomock.Any(), digest.Name(), gomock.Any()).Return(compositeBuildpackImage, nil) +} + +func newDigest(t *testing.T, repoName, sha string) name.Digest { + digest, err := name.NewDigest(fmt.Sprintf("%s@%s", repoName, sha)) + h.AssertNil(t, err) + return digest +} + +// expectedMultiPlatformImage is a helper struct with the data needed to prepare a mock remote buildpack package +type expectedMultiPlatformImage struct { + id string + version string + bpURI string + digest name.Digest } diff --git a/pkg/client/package_extension.go b/pkg/client/package_extension.go new file mode 100644 index 0000000000..584700eb9e --- /dev/null +++ b/pkg/client/package_extension.go @@ -0,0 +1,122 @@ +package client + +import ( + "context" + "fmt" + "path/filepath" + + "github.com/pkg/errors" + + "github.com/buildpacks/pack/internal/layer" + "github.com/buildpacks/pack/internal/style" + "github.com/buildpacks/pack/pkg/buildpack" + "github.com/buildpacks/pack/pkg/dist" +) + +// PackageExtension packages extension(s) into either an image or file. +func (c *Client) PackageExtension(ctx context.Context, opts PackageBuildpackOptions) error { + if opts.Format == "" { + opts.Format = FormatImage + } + + targets, err := c.processPackageBuildpackTargets(ctx, opts) + if err != nil { + return err + } + multiArch := len(targets) > 1 && (opts.Publish || opts.Format == FormatFile) + + var digests []string + targets = dist.ExpandTargetsDistributions(targets...) + for _, target := range targets { + digest, err := c.packageExtensionTarget(ctx, opts, target, multiArch) + if err != nil { + return err + } + digests = append(digests, digest) + } + + if opts.Publish && len(digests) > 1 { + // Image Index must be created only when we pushed to registry + return c.CreateManifest(ctx, CreateManifestOptions{ + IndexRepoName: opts.Name, + RepoNames: digests, + Publish: true, + }) + } + + return nil +} + +func (c *Client) packageExtensionTarget(ctx context.Context, opts PackageBuildpackOptions, target dist.Target, multiArch bool) (string, error) { + var digest string + if target.OS == "windows" && !c.experimental { + return "", NewExperimentError("Windows extensionpackage support is currently experimental.") + } + + err := c.validateOSPlatform(ctx, target.OS, opts.Publish, opts.Format) + if err != nil { + return digest, err + } + + writerFactory, err := layer.NewWriterFactory(target.OS) + if err != nil { + return digest, errors.Wrap(err, "creating layer writer factory") + } + + packageBuilder := buildpack.NewBuilder(c.imageFactory) + + exURI := opts.Config.Extension.URI + if exURI == "" { + return digest, errors.New("extension URI must be provided") + } + + if ok, platformRootFolder := buildpack.PlatformRootFolder(exURI, target); ok { + exURI = platformRootFolder + } + + mainBlob, err := c.downloadBuildpackFromURI(ctx, exURI, opts.RelativeBaseDir) + if err != nil { + return digest, err + } + + ex, err := buildpack.FromExtensionRootBlob(mainBlob, writerFactory, c.logger) + if err != nil { + return digest, errors.Wrapf(err, "creating extension from %s", style.Symbol(exURI)) + } + + packageBuilder.SetExtension(ex) + + switch opts.Format { + case FormatFile: + name := opts.Name + if multiArch { + fileExtension := filepath.Ext(name) + origFileName := name[:len(name)-len(filepath.Ext(name))] + if target.Arch != "" { + name = fmt.Sprintf("%s-%s-%s%s", origFileName, target.OS, target.Arch, fileExtension) + } else { + name = fmt.Sprintf("%s-%s%s", origFileName, target.OS, fileExtension) + } + } + err = packageBuilder.SaveAsFile(name, target, opts.Labels) + if err != nil { + return digest, err + } + case FormatImage: + img, err := packageBuilder.SaveAsImage(opts.Name, opts.Publish, target, opts.Labels, opts.AdditionalTags...) + if err != nil { + return digest, errors.Wrapf(err, "saving image") + } + if multiArch { + // We need to keep the identifier to create the image index + id, err := img.Identifier() + if err != nil { + return digest, errors.Wrapf(err, "determining image manifest digest") + } + digest = id.String() + } + default: + return digest, errors.Errorf("unknown format: %s", style.Symbol(opts.Format)) + } + return digest, nil +} diff --git a/pkg/client/package_extension_test.go b/pkg/client/package_extension_test.go new file mode 100644 index 0000000000..61af2bdab3 --- /dev/null +++ b/pkg/client/package_extension_test.go @@ -0,0 +1,269 @@ +package client_test + +import ( + "bytes" + "context" + "fmt" + "os" + "path/filepath" + "testing" + + "github.com/buildpacks/imgutil/fakes" + "github.com/buildpacks/lifecycle/api" + "github.com/golang/mock/gomock" + "github.com/heroku/color" + mobysystem "github.com/moby/moby/api/types/system" + dockerclient "github.com/moby/moby/client" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + pubbldpkg "github.com/buildpacks/pack/buildpackage" + ifakes "github.com/buildpacks/pack/internal/fakes" + "github.com/buildpacks/pack/pkg/blob" + "github.com/buildpacks/pack/pkg/client" + "github.com/buildpacks/pack/pkg/dist" + "github.com/buildpacks/pack/pkg/image" + "github.com/buildpacks/pack/pkg/logging" + "github.com/buildpacks/pack/pkg/testmocks" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestPackageExtension(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "PackageExtension", testPackageExtension, spec.Parallel(), spec.Report(report.Terminal{})) +} + +func testPackageExtension(t *testing.T, when spec.G, it spec.S) { + var ( + subject *client.Client + mockController *gomock.Controller + mockDownloader *testmocks.MockBlobDownloader + mockImageFactory *testmocks.MockImageFactory + mockImageFetcher *testmocks.MockImageFetcher + mockDockerClient *testmocks.MockAPIClient + out bytes.Buffer + ) + + it.Before(func() { + mockController = gomock.NewController(t) + mockDownloader = testmocks.NewMockBlobDownloader(mockController) + mockImageFactory = testmocks.NewMockImageFactory(mockController) + mockImageFetcher = testmocks.NewMockImageFetcher(mockController) + mockDockerClient = testmocks.NewMockAPIClient(mockController) + + var err error + subject, err = client.NewClient( + client.WithLogger(logging.NewLogWithWriters(&out, &out)), + client.WithDownloader(mockDownloader), + client.WithImageFactory(mockImageFactory), + client.WithFetcher(mockImageFetcher), + client.WithDockerClient(mockDockerClient), + ) + h.AssertNil(t, err) + }) + + it.After(func() { + mockController.Finish() + }) + + createExtension := func(descriptor dist.ExtensionDescriptor) string { + ex, err := ifakes.NewFakeExtensionBlob(&descriptor, 0644) + h.AssertNil(t, err) + url := fmt.Sprintf("https://example.com/ex.%s.tgz", h.RandString(12)) + mockDownloader.EXPECT().Download(gomock.Any(), url).Return(ex, nil).AnyTimes() + return url + } + + when("extension has issues", func() { + when("extension has no URI", func() { + it("should fail", func() { + err := subject.PackageExtension(context.TODO(), client.PackageBuildpackOptions{ + Name: "Fake-Name", + Config: pubbldpkg.Config{ + Platform: dist.Platform{OS: "linux"}, + Extension: dist.BuildpackURI{URI: ""}, + }, + Publish: true, + }) + h.AssertError(t, err, "extension URI must be provided") + }) + }) + + when("can't download extension", func() { + it("should fail", func() { + exURL := fmt.Sprintf("https://example.com/ex.%s.tgz", h.RandString(12)) + mockDownloader.EXPECT().Download(gomock.Any(), exURL).Return(nil, image.ErrNotFound).AnyTimes() + + err := subject.PackageExtension(context.TODO(), client.PackageBuildpackOptions{ + Name: "Fake-Name", + Config: pubbldpkg.Config{ + Platform: dist.Platform{OS: "linux"}, + Extension: dist.BuildpackURI{URI: exURL}, + }, + Publish: true, + }) + h.AssertError(t, err, "downloading buildpack") + }) + }) + + when("extension isn't a valid extension", func() { + it("should fail", func() { + fakeBlob := blob.NewBlob(filepath.Join("testdata", "empty-file")) + exURL := fmt.Sprintf("https://example.com/ex.%s.tgz", h.RandString(12)) + mockDownloader.EXPECT().Download(gomock.Any(), exURL).Return(fakeBlob, nil).AnyTimes() + + err := subject.PackageExtension(context.TODO(), client.PackageBuildpackOptions{ + Name: "Fake-Name", + Config: pubbldpkg.Config{ + Platform: dist.Platform{OS: "linux"}, + Extension: dist.BuildpackURI{URI: exURL}, + }, + Publish: true, + }) + h.AssertError(t, err, "creating extension") + }) + }) + }) + + when("FormatImage", func() { + when("simple package for both OS formats (experimental only)", func() { + it("creates package image based on daemon OS", func() { + for _, daemonOS := range []string{"linux", "windows"} { + localMockDockerClient := testmocks.NewMockAPIClient(mockController) + localMockDockerClient.EXPECT().Info(context.TODO(), gomock.Any()).Return(dockerclient.SystemInfoResult{Info: mobysystem.Info{OSType: daemonOS}}, nil).AnyTimes() + + packClientWithExperimental, err := client.NewClient( + client.WithDockerClient(localMockDockerClient), + client.WithDownloader(mockDownloader), + client.WithImageFactory(mockImageFactory), + client.WithExperimental(true), + ) + h.AssertNil(t, err) + + fakeImage := fakes.NewImage("basic/package-"+h.RandString(12), "", nil) + mockImageFactory.EXPECT().NewImage(fakeImage.Name(), true, dist.Target{OS: daemonOS}).Return(fakeImage, nil) + + fakeBlob := blob.NewBlob(filepath.Join("testdata", "empty-file")) + exURL := fmt.Sprintf("https://example.com/ex.%s.tgz", h.RandString(12)) + mockDownloader.EXPECT().Download(gomock.Any(), exURL).Return(fakeBlob, nil).AnyTimes() + + h.AssertNil(t, packClientWithExperimental.PackageExtension(context.TODO(), client.PackageBuildpackOptions{ + Format: client.FormatImage, + Name: fakeImage.Name(), + Config: pubbldpkg.Config{ + Platform: dist.Platform{OS: daemonOS}, + Extension: dist.BuildpackURI{URI: createExtension(dist.ExtensionDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "ex.basic", Version: "2.3.4"}, + })}, + }, + PullPolicy: image.PullNever, + })) + } + }) + + it("fails without experimental on Windows daemons", func() { + windowsMockDockerClient := testmocks.NewMockAPIClient(mockController) + + packClientWithoutExperimental, err := client.NewClient( + client.WithDockerClient(windowsMockDockerClient), + client.WithExperimental(false), + ) + h.AssertNil(t, err) + + err = packClientWithoutExperimental.PackageExtension(context.TODO(), client.PackageBuildpackOptions{ + Config: pubbldpkg.Config{ + Platform: dist.Platform{ + OS: "windows", + }, + }, + }) + h.AssertError(t, err, "Windows extensionpackage support is currently experimental.") + }) + + it("fails for mismatched platform and daemon os", func() { + windowsMockDockerClient := testmocks.NewMockAPIClient(mockController) + windowsMockDockerClient.EXPECT().Info(context.TODO(), gomock.Any()).Return(dockerclient.SystemInfoResult{Info: mobysystem.Info{OSType: "windows"}}, nil).AnyTimes() + + packClientWithoutExperimental, err := client.NewClient( + client.WithDockerClient(windowsMockDockerClient), + client.WithExperimental(false), + ) + h.AssertNil(t, err) + + err = packClientWithoutExperimental.PackageExtension(context.TODO(), client.PackageBuildpackOptions{ + Config: pubbldpkg.Config{ + Platform: dist.Platform{ + OS: "linux", + }, + }, + }) + + h.AssertError(t, err, "invalid 'platform.os' specified: DOCKER_OS is 'windows'") + }) + }) + }) + + when("FormatFile", func() { + when("simple package for both OS formats (experimental only)", func() { + it("creates package image in either OS format", func() { + tmpDir, err := os.MkdirTemp("", "package-extension") + h.AssertNil(t, err) + defer os.Remove(tmpDir) + + for _, imageOS := range []string{"linux", "windows"} { + localMockDockerClient := testmocks.NewMockAPIClient(mockController) + localMockDockerClient.EXPECT().Info(context.TODO(), gomock.Any()).Return(dockerclient.SystemInfoResult{Info: mobysystem.Info{OSType: imageOS}}, nil).AnyTimes() + + packClientWithExperimental, err := client.NewClient( + client.WithDockerClient(localMockDockerClient), + client.WithLogger(logging.NewLogWithWriters(&out, &out)), + client.WithDownloader(mockDownloader), + client.WithExperimental(true), + ) + h.AssertNil(t, err) + + fakeBlob := blob.NewBlob(filepath.Join("testdata", "empty-file")) + exURL := fmt.Sprintf("https://example.com/ex.%s.tgz", h.RandString(12)) + mockDownloader.EXPECT().Download(gomock.Any(), exURL).Return(fakeBlob, nil).AnyTimes() + + packagePath := filepath.Join(tmpDir, h.RandString(12)+"-test.cnb") + h.AssertNil(t, packClientWithExperimental.PackageExtension(context.TODO(), client.PackageBuildpackOptions{ + Format: client.FormatFile, + Name: packagePath, + Config: pubbldpkg.Config{ + Platform: dist.Platform{OS: imageOS}, + Extension: dist.BuildpackURI{URI: createExtension(dist.ExtensionDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "ex.basic", Version: "2.3.4"}, + })}, + }, + PullPolicy: image.PullNever, + })) + } + }) + }) + }) + + when("unknown format is provided", func() { + it("should error", func() { + mockDockerClient.EXPECT().Info(context.TODO(), gomock.Any()).Return(dockerclient.SystemInfoResult{Info: mobysystem.Info{OSType: "linux"}}, nil).AnyTimes() + + err := subject.PackageExtension(context.TODO(), client.PackageBuildpackOptions{ + Name: "some-extension", + Format: "invalid-format", + Config: pubbldpkg.Config{ + Platform: dist.Platform{OS: "linux"}, + Extension: dist.BuildpackURI{URI: createExtension(dist.ExtensionDescriptor{ + WithAPI: api.MustParse("0.2"), + WithInfo: dist.ModuleInfo{ID: "ex.1", Version: "1.2.3"}, + })}, + }, + Publish: false, + PullPolicy: image.PullAlways, + }) + h.AssertError(t, err, "unknown format: 'invalid-format'") + }) + }) +} diff --git a/pkg/client/process_volumes.go b/pkg/client/process_volumes.go new file mode 100644 index 0000000000..6e36b8e2c9 --- /dev/null +++ b/pkg/client/process_volumes.go @@ -0,0 +1,53 @@ +//go:build linux || windows + +package client + +import ( + "fmt" + "runtime" + "strings" + + "github.com/docker/docker/volume/mounts" + "github.com/pkg/errors" + + "github.com/buildpacks/pack/internal/style" +) + +func processVolumes(imgOS string, volumes []string) (processed []string, warnings []string, err error) { + var parser mounts.Parser + switch "windows" { + case imgOS: + parser = mounts.NewWindowsParser() + case runtime.GOOS: + parser = mounts.NewLCOWParser() + default: + parser = mounts.NewLinuxParser() + } + for _, v := range volumes { + volume, err := parser.ParseMountRaw(v, "") + if err != nil { + return nil, nil, errors.Wrapf(err, "platform volume %q has invalid format", v) + } + + sensitiveDirs := []string{"/cnb", "/layers", "/workspace"} + if imgOS == "windows" { + sensitiveDirs = []string{`c:/cnb`, `c:\cnb`, `c:/layers`, `c:\layers`, `c:/workspace`, `c:\workspace`} + } + for _, p := range sensitiveDirs { + if strings.HasPrefix(strings.ToLower(volume.Spec.Target), p) { + warnings = append(warnings, fmt.Sprintf("Mounting to a sensitive directory %s", style.Symbol(volume.Spec.Target))) + } + } + + processed = append(processed, fmt.Sprintf("%s:%s:%s", volume.Spec.Source, volume.Spec.Target, processMode(volume.Mode))) + } + return processed, warnings, nil +} + +func processMode(mode string) string { + if mode == "" { + return "ro" + } + + return mode +} diff --git a/pkg/client/process_volumes_unix.go b/pkg/client/process_volumes_unix.go new file mode 100644 index 0000000000..7017de4261 --- /dev/null +++ b/pkg/client/process_volumes_unix.go @@ -0,0 +1,53 @@ +//go:build unix && !linux + +package client + +import ( + "fmt" + "strings" + + "github.com/docker/cli/cli/compose/loader" + "github.com/docker/cli/cli/compose/types" + "github.com/pkg/errors" + + "github.com/buildpacks/pack/internal/style" +) + +func processVolumes(imgOS string, volumes []string) (processed []string, warnings []string, err error) { + for _, v := range volumes { + volume, err := parseVolume(v) + if err != nil { + return nil, nil, err + } + sensitiveDirs := []string{"/cnb", "/layers", "/workspace"} + if imgOS == "windows" { + sensitiveDirs = []string{`c:/cnb`, `c:\cnb`, `c:/layers`, `c:\layers`} + } + for _, p := range sensitiveDirs { + if strings.HasPrefix(strings.ToLower(volume.Target), p) { + warnings = append(warnings, fmt.Sprintf("Mounting to a sensitive directory %s", style.Symbol(volume.Target))) + } + } + mode := "ro" + if strings.HasSuffix(v, ":rw") && !volume.ReadOnly { + mode = "rw" + } + processed = append(processed, fmt.Sprintf("%s:%s:%s", volume.Source, volume.Target, mode)) + } + return processed, warnings, nil +} + +func parseVolume(volume string) (types.ServiceVolumeConfig, error) { + // volume format: ':[:]' + split := strings.Split(volume, ":") + if len(split) == 3 { + if split[2] != "ro" && split[2] != "rw" && !strings.Contains(split[2], "volume-opt") { + return types.ServiceVolumeConfig{}, errors.New(fmt.Sprintf("platform volume %q has invalid format: invalid mode: %s", volume, split[2])) + } + } + config, err := loader.ParseVolume(volume) + if err != nil { + return config, errors.Wrapf(err, "platform volume %q has invalid format", volume) + } + return config, nil +} diff --git a/pkg/client/pull_buildpack.go b/pkg/client/pull_buildpack.go index d3942fbcfb..c4a3d5745e 100644 --- a/pkg/client/pull_buildpack.go +++ b/pkg/client/pull_buildpack.go @@ -24,7 +24,7 @@ type PullBuildpackOptions struct { // PullBuildpack pulls given buildpack to be stored locally func (c *Client) PullBuildpack(ctx context.Context, opts PullBuildpackOptions) error { - locatorType, err := buildpack.GetLocatorType(opts.URI, "", []dist.BuildpackInfo{}) + locatorType, err := buildpack.GetLocatorType(opts.URI, "", []dist.ModuleInfo{}) if err != nil { return err } diff --git a/pkg/client/pull_buildpack_test.go b/pkg/client/pull_buildpack_test.go index f1d1e023ff..fd78acf656 100644 --- a/pkg/client/pull_buildpack_test.go +++ b/pkg/client/pull_buildpack_test.go @@ -3,9 +3,10 @@ package client_test import ( "bytes" "context" - "io/ioutil" "os" "path/filepath" + "runtime" + "strings" "testing" "github.com/buildpacks/imgutil/fakes" @@ -36,7 +37,7 @@ func testPullBuildpack(t *testing.T, when spec.G, it spec.S) { mockDownloader *testmocks.MockBlobDownloader mockImageFactory *testmocks.MockImageFactory mockImageFetcher *testmocks.MockImageFetcher - mockDockerClient *testmocks.MockCommonAPIClient + mockDockerClient *testmocks.MockAPIClient out bytes.Buffer ) @@ -45,7 +46,7 @@ func testPullBuildpack(t *testing.T, when spec.G, it spec.S) { mockDownloader = testmocks.NewMockBlobDownloader(mockController) mockImageFactory = testmocks.NewMockImageFactory(mockController) mockImageFetcher = testmocks.NewMockImageFetcher(mockController) - mockDockerClient = testmocks.NewMockCommonAPIClient(mockController) + mockDockerClient = testmocks.NewMockAPIClient(mockController) var err error subject, err = client.NewClient( @@ -108,7 +109,7 @@ func testPullBuildpack(t *testing.T, when spec.G, it spec.S) { it.Before(func() { var err error - tmpDir, err = ioutil.TempDir("", "registry") + tmpDir, err = os.MkdirTemp("", "registry") h.AssertNil(t, err) packHome = filepath.Join(tmpDir, ".pack") @@ -140,7 +141,9 @@ func testPullBuildpack(t *testing.T, when spec.G, it spec.S) { it.After(func() { os.Unsetenv("PACK_HOME") err := os.RemoveAll(tmpDir) - h.AssertNil(t, err) + if runtime.GOOS != "windows" && err != nil && strings.Contains(err.Error(), "The process cannot access the file because it is being used by another process.") { + h.AssertNil(t, err) + } }) it("should fetch the image", func() { diff --git a/pkg/client/rebase.go b/pkg/client/rebase.go index a244c9fc67..a92ac38d00 100644 --- a/pkg/client/rebase.go +++ b/pkg/client/rebase.go @@ -2,9 +2,13 @@ package client import ( "context" + "os" + "path/filepath" - "github.com/buildpacks/lifecycle" + "github.com/BurntSushi/toml" + "github.com/buildpacks/lifecycle/phase" "github.com/buildpacks/lifecycle/platform" + "github.com/buildpacks/lifecycle/platform/files" "github.com/pkg/errors" "github.com/buildpacks/pack/internal/build" @@ -34,53 +38,103 @@ type RebaseOptions struct { // AdditionalMirrors gives us inputs to recalculate the 'best' run image // based on the registry we are publishing to. AdditionalMirrors map[string][]string + + // If provided, directory to which report.toml will be copied + ReportDestinationDir string + + // Pass-through force flag to lifecycle rebase command to skip target data + // validated (will not have any effect if API < 0.12). + Force bool + + InsecureRegistries []string + + // Image reference to use as the previous image for rebase. + PreviousImage string } // Rebase updates the run image layers in an app image. // This operation mutates the image specified in opts. func (c *Client) Rebase(ctx context.Context, opts RebaseOptions) error { + var flags = []string{"rebase"} imageRef, err := c.parseTagReference(opts.RepoName) if err != nil { return errors.Wrapf(err, "invalid image name '%s'", opts.RepoName) } - appImage, err := c.imageFetcher.Fetch(ctx, opts.RepoName, image.FetchOptions{Daemon: !opts.Publish, PullPolicy: opts.PullPolicy}) + repoName := opts.RepoName + + if opts.PreviousImage != "" { + repoName = opts.PreviousImage + } + + appImage, err := c.imageFetcher.Fetch(ctx, repoName, image.FetchOptions{Daemon: !opts.Publish, PullPolicy: opts.PullPolicy, InsecureRegistries: opts.InsecureRegistries}) if err != nil { return err } - var md platform.LayersMetadataCompat - if ok, err := dist.GetLabel(appImage, platform.LayerMetadataLabel, &md); err != nil { + appOS, err := appImage.OS() + if err != nil { + return errors.Wrapf(err, "getting app OS") + } + + appArch, err := appImage.Architecture() + if err != nil { + return errors.Wrapf(err, "getting app architecture") + } + + var md files.LayersMetadataCompat + if ok, err := dist.GetLabel(appImage, platform.LifecycleMetadataLabel, &md); err != nil { return err } else if !ok { - return errors.Errorf("could not find label %s on image", style.Symbol(platform.LayerMetadataLabel)) + return errors.Errorf("could not find label %s on image", style.Symbol(platform.LifecycleMetadataLabel)) + } + var runImageMD builder.RunImageMetadata + if md.RunImage.Image != "" { + runImageMD = builder.RunImageMetadata{ + Image: md.RunImage.Image, + Mirrors: md.RunImage.Mirrors, + } + } else if md.Stack != nil { + runImageMD = builder.RunImageMetadata{ + Image: md.Stack.RunImage.Image, + Mirrors: md.Stack.RunImage.Mirrors, + } + } + + target := &dist.Target{OS: appOS, Arch: appArch} + fetchOptions := image.FetchOptions{ + Daemon: !opts.Publish, + PullPolicy: opts.PullPolicy, + Target: target, + InsecureRegistries: opts.InsecureRegistries, } runImageName := c.resolveRunImage( opts.RunImage, imageRef.Context().RegistryStr(), "", - builder.StackMetadata{ - RunImage: builder.RunImageMetadata{ - Image: md.Stack.RunImage.Image, - Mirrors: md.Stack.RunImage.Mirrors, - }, - }, + runImageMD, opts.AdditionalMirrors, - opts.Publish) + opts.Publish, + fetchOptions, + ) if runImageName == "" { return errors.New("run image must be specified") } - baseImage, err := c.imageFetcher.Fetch(ctx, runImageName, image.FetchOptions{Daemon: !opts.Publish, PullPolicy: opts.PullPolicy}) + baseImage, err := c.imageFetcher.Fetch(ctx, runImageName, fetchOptions) if err != nil { return err } + for _, reg := range opts.InsecureRegistries { + flags = append(flags, "-insecure-registry", reg) + } + c.logger.Infof("Rebasing %s on run image %s", style.Symbol(appImage.Name()), style.Symbol(baseImage.Name())) - rebaser := &lifecycle.Rebaser{Logger: c.logger, PlatformAPI: build.SupportedPlatformAPIVersions.Latest()} - _, err = rebaser.Rebase(appImage, baseImage, nil) + rebaser := &phase.Rebaser{Logger: c.logger, PlatformAPI: build.SupportedPlatformAPIVersions.Latest(), Force: opts.Force} + report, err := rebaser.Rebase(appImage, baseImage, opts.RepoName, nil) if err != nil { return err } @@ -91,5 +145,21 @@ func (c *Client) Rebase(ctx context.Context, opts RebaseOptions) error { } c.logger.Infof("Rebased Image: %s", style.Symbol(appImageIdentifier.String())) + + if opts.ReportDestinationDir != "" { + reportPath := filepath.Join(opts.ReportDestinationDir, "report.toml") + reportFile, err := os.OpenFile(reportPath, os.O_RDWR|os.O_CREATE, 0644) + if err != nil { + c.logger.Warnf("unable to open %s for writing rebase report", reportPath) + return err + } + + defer reportFile.Close() + err = toml.NewEncoder(reportFile).Encode(report) + if err != nil { + c.logger.Warnf("unable to write rebase report to %s", reportPath) + return err + } + } return nil } diff --git a/pkg/client/rebase_test.go b/pkg/client/rebase_test.go index 286d6dc0e5..601999607d 100644 --- a/pkg/client/rebase_test.go +++ b/pkg/client/rebase_test.go @@ -3,9 +3,12 @@ package client import ( "bytes" "context" + "os" + "path/filepath" "testing" "github.com/buildpacks/imgutil/fakes" + "github.com/buildpacks/lifecycle/auth" "github.com/heroku/color" "github.com/sclevine/spec" "github.com/sclevine/spec/report" @@ -39,21 +42,25 @@ func testRebase(t *testing.T, when spec.G, it spec.S) { fakeAppImage = fakes.NewImage("some/app", "", &fakeIdentifier{name: "app-image"}) h.AssertNil(t, fakeAppImage.SetLabel("io.buildpacks.lifecycle.metadata", `{"stack":{"runImage":{"image":"some/run", "mirrors":["example.com/some/run"]}}}`)) - h.AssertNil(t, fakeAppImage.SetLabel("io.buildpacks.stack.id", "io.buildpacks.stacks.bionic")) + h.AssertNil(t, fakeAppImage.SetLabel("io.buildpacks.stack.id", "io.buildpacks.stacks.jammy")) fakeImageFetcher.LocalImages["some/app"] = fakeAppImage fakeRunImage = fakes.NewImage("some/run", "run-image-top-layer-sha", &fakeIdentifier{name: "run-image-digest"}) - h.AssertNil(t, fakeRunImage.SetLabel("io.buildpacks.stack.id", "io.buildpacks.stacks.bionic")) + h.AssertNil(t, fakeRunImage.SetLabel("io.buildpacks.stack.id", "io.buildpacks.stacks.jammy")) fakeImageFetcher.LocalImages["some/run"] = fakeRunImage fakeRunImageMirror = fakes.NewImage("example.com/some/run", "mirror-top-layer-sha", &fakeIdentifier{name: "mirror-digest"}) - h.AssertNil(t, fakeRunImageMirror.SetLabel("io.buildpacks.stack.id", "io.buildpacks.stacks.bionic")) + h.AssertNil(t, fakeRunImageMirror.SetLabel("io.buildpacks.stack.id", "io.buildpacks.stacks.jammy")) fakeImageFetcher.LocalImages["example.com/some/run"] = fakeRunImageMirror + keychain, err := auth.DefaultKeychain("pack-test/dummy") + h.AssertNil(t, err) + fakeLogger := logging.NewLogWithWriters(&out, &out) subject = &Client{ logger: fakeLogger, imageFetcher: fakeImageFetcher, + keychain: keychain, } }) @@ -70,7 +77,7 @@ func testRebase(t *testing.T, when spec.G, it spec.S) { it.Before(func() { fakeCustomRunImage = fakes.NewImage("custom/run", "custom-base-top-layer-sha", &fakeIdentifier{name: "custom-base-digest"}) - h.AssertNil(t, fakeCustomRunImage.SetLabel("io.buildpacks.stack.id", "io.buildpacks.stacks.bionic")) + h.AssertNil(t, fakeCustomRunImage.SetLabel("io.buildpacks.stack.id", "io.buildpacks.stacks.jammy")) fakeImageFetcher.LocalImages["custom/run"] = fakeCustomRunImage }) @@ -78,15 +85,26 @@ func testRebase(t *testing.T, when spec.G, it spec.S) { h.AssertNilE(t, fakeCustomRunImage.Cleanup()) }) - it("uses the run image provided by the user", func() { - h.AssertNil(t, subject.Rebase(context.TODO(), + when("--force", func() { + it("uses the run image provided by the user", func() { + h.AssertNil(t, subject.Rebase(context.TODO(), + RebaseOptions{ + RunImage: "custom/run", + RepoName: "some/app", + Force: true, + })) + h.AssertEq(t, fakeAppImage.Base(), "custom/run") + lbl, _ := fakeAppImage.Label("io.buildpacks.lifecycle.metadata") + h.AssertContains(t, lbl, `"runImage":{"topLayer":"custom-base-top-layer-sha","reference":"custom-base-digest"`) + }) + }) + + it("errors", func() { + h.AssertError(t, subject.Rebase(context.TODO(), RebaseOptions{ RunImage: "custom/run", RepoName: "some/app", - })) - h.AssertEq(t, fakeAppImage.Base(), "custom/run") - lbl, _ := fakeAppImage.Label("io.buildpacks.lifecycle.metadata") - h.AssertContains(t, lbl, `"runImage":{"topLayer":"custom-base-top-layer-sha","reference":"custom-base-digest"`) + }), "new base image 'custom/run' not found in existing run image metadata") }) }) }) @@ -126,24 +144,43 @@ func testRebase(t *testing.T, when spec.G, it spec.S) { it.Before(func() { fakeImageFetcher.LocalImages["example.com/some/app"] = fakeAppImage fakeLocalMirror = fakes.NewImage("example.com/some/local-run", "local-mirror-top-layer-sha", &fakeIdentifier{name: "local-mirror-digest"}) - h.AssertNil(t, fakeLocalMirror.SetLabel("io.buildpacks.stack.id", "io.buildpacks.stacks.bionic")) + h.AssertNil(t, fakeLocalMirror.SetLabel("io.buildpacks.stack.id", "io.buildpacks.stacks.jammy")) fakeImageFetcher.LocalImages["example.com/some/local-run"] = fakeLocalMirror }) it.After(func() { h.AssertNilE(t, fakeLocalMirror.Cleanup()) }) - - it("chooses a matching local mirror first", func() { + when("--force", func() { + it("chooses a matching local mirror first", func() { + h.AssertNil(t, subject.Rebase(context.TODO(), RebaseOptions{ + RepoName: "example.com/some/app", + AdditionalMirrors: map[string][]string{ + "some/run": {"example.com/some/local-run"}, + }, + Force: true, + })) + h.AssertEq(t, fakeAppImage.Base(), "example.com/some/local-run") + lbl, _ := fakeAppImage.Label("io.buildpacks.lifecycle.metadata") + h.AssertContains(t, lbl, `"runImage":{"topLayer":"local-mirror-top-layer-sha","reference":"local-mirror-digest"`) + }) + }) + }) + when("there is a label and it has a run image and no stack", func() { + it("reads the run image from the label", func() { + h.AssertNil(t, fakeAppImage.SetLabel("io.buildpacks.lifecycle.metadata", + `{"runImage":{"image":"some/run", "mirrors":["example.com/some/run"]}}`)) h.AssertNil(t, subject.Rebase(context.TODO(), RebaseOptions{ - RepoName: "example.com/some/app", - AdditionalMirrors: map[string][]string{ - "some/run": {"example.com/some/local-run"}, - }, + RepoName: "some/app", })) - h.AssertEq(t, fakeAppImage.Base(), "example.com/some/local-run") - lbl, _ := fakeAppImage.Label("io.buildpacks.lifecycle.metadata") - h.AssertContains(t, lbl, `"runImage":{"topLayer":"local-mirror-top-layer-sha","reference":"local-mirror-digest"`) + h.AssertEq(t, fakeAppImage.Base(), "some/run") + }) + }) + when("there is neither runImage nor stack", func() { + it("fails gracefully", func() { + h.AssertNil(t, fakeAppImage.SetLabel("io.buildpacks.lifecycle.metadata", `{}`)) + h.AssertError(t, subject.Rebase(context.TODO(), RebaseOptions{RepoName: "some/app"}), + "run image must be specified") }) }) }) @@ -166,7 +203,7 @@ func testRebase(t *testing.T, when spec.G, it spec.S) { it.Before(func() { fakeRemoteRunImage = fakes.NewImage("some/run", "remote-top-layer-sha", &fakeIdentifier{name: "remote-digest"}) - h.AssertNil(t, fakeRemoteRunImage.SetLabel("io.buildpacks.stack.id", "io.buildpacks.stacks.bionic")) + h.AssertNil(t, fakeRemoteRunImage.SetLabel("io.buildpacks.stack.id", "io.buildpacks.stacks.jammy")) fakeImageFetcher.RemoteImages["some/run"] = fakeRemoteRunImage }) @@ -200,6 +237,18 @@ func testRebase(t *testing.T, when spec.G, it spec.S) { }) }) + when("report directory is set", func() { + it("writes the report", func() { + tmpdir := t.TempDir() + h.AssertNil(t, subject.Rebase(context.TODO(), RebaseOptions{ + RepoName: "some/app", + ReportDestinationDir: tmpdir, + })) + _, err := os.Stat(filepath.Join(tmpdir, "report.toml")) + h.AssertNil(t, err) + }) + }) + when("is true", func() { it.Before(func() { fakeImageFetcher.RemoteImages["some/app"] = fakeAppImage @@ -214,10 +263,44 @@ func testRebase(t *testing.T, when spec.G, it spec.S) { h.AssertEq(t, fakeAppImage.Base(), "some/run") lbl, _ := fakeAppImage.Label("io.buildpacks.lifecycle.metadata") h.AssertContains(t, lbl, `"runImage":{"topLayer":"remote-top-layer-sha","reference":"remote-digest"`) + args := fakeImageFetcher.FetchCalls["some/run"] + h.AssertEq(t, args.Target.ValuesAsPlatform(), "linux/amd64") }) }) }) }) + when("previous image is provided", func() { + it("fetches the image using the previous image name", func() { + h.AssertNil(t, subject.Rebase(context.TODO(), RebaseOptions{ + RepoName: "new/app", + PreviousImage: "some/app", + })) + args := fakeImageFetcher.FetchCalls["some/app"] + h.AssertNotNil(t, args) + h.AssertEq(t, args.Daemon, true) + }) + }) + + when("previous image is set to new image name", func() { + it("returns error if Fetch function fails", func() { + err := subject.Rebase(context.TODO(), RebaseOptions{ + RepoName: "some/app", + PreviousImage: "new/app", + }) + h.AssertError(t, err, "image 'new/app' does not exist on the daemon: not found") + }) + }) + + when("previous image is not provided", func() { + it("fetches the image using the repo name", func() { + h.AssertNil(t, subject.Rebase(context.TODO(), RebaseOptions{ + RepoName: "some/app", + })) + args := fakeImageFetcher.FetchCalls["some/app"] + h.AssertNotNil(t, args) + h.AssertEq(t, args.Daemon, true) + }) + }) }) }) } diff --git a/pkg/client/register_buildpack.go b/pkg/client/register_buildpack.go index 1bae8e5d96..c3ac567ead 100644 --- a/pkg/client/register_buildpack.go +++ b/pkg/client/register_buildpack.go @@ -30,7 +30,7 @@ func (c *Client) RegisterBuildpack(ctx context.Context, opts RegisterBuildpackOp return err } - var buildpackInfo dist.BuildpackInfo + var buildpackInfo dist.ModuleInfo if _, err := dist.GetLabel(appImage, buildpack.MetadataLabel, &buildpackInfo); err != nil { return err } @@ -53,7 +53,8 @@ func (c *Client) RegisterBuildpack(ctx context.Context, opts RegisterBuildpackOp Yanked: false, } - if opts.Type == "github" { + switch opts.Type { + case "github": issueURL, err := registry.GetIssueURL(opts.URL) if err != nil { return err @@ -67,6 +68,7 @@ func (c *Client) RegisterBuildpack(ctx context.Context, opts RegisterBuildpackOp params := url.Values{} params.Add("title", issue.Title) params.Add("body", issue.Body) + params.Add("template", "add-buildpack.md") issueURL.RawQuery = params.Encode() c.logger.Debugf("Open URL in browser: %s", issueURL) @@ -76,7 +78,7 @@ func (c *Client) RegisterBuildpack(ctx context.Context, opts RegisterBuildpackOp } return cmd.Start() - } else if opts.Type == "git" { + case "git": registryCache, err := getRegistry(c.logger, opts.Name) if err != nil { return err diff --git a/pkg/client/register_buildpack_test.go b/pkg/client/register_buildpack_test.go index 1a2e883047..3886b04a8d 100644 --- a/pkg/client/register_buildpack_test.go +++ b/pkg/client/register_buildpack_test.go @@ -36,7 +36,7 @@ func testRegisterBuildpack(t *testing.T, when spec.G, it spec.S) { fakeAppImage = fakes.NewImage("buildpack/image", "", &fakeIdentifier{name: "buildpack-image"}) h.AssertNil(t, fakeAppImage.SetLabel("io.buildpacks.buildpackage.metadata", - `{"id":"heroku/java-function","version":"1.1.1","stacks":[{"id":"heroku-18"},{"id":"io.buildpacks.stacks.bionic"},{"id":"org.cloudfoundry.stacks.cflinuxfs3"}]}`)) + `{"id":"heroku/java-function","version":"1.1.1","stacks":[{"id":"heroku-18"},{"id":"io.buildpacks.stacks.jammy"},{"id":"org.cloudfoundry.stacks.cflinuxfs3"}]}`)) fakeImageFetcher.RemoteImages["buildpack/image"] = fakeAppImage fakeLogger := logging.NewLogWithWriters(&out, &out) diff --git a/pkg/client/testdata/buildpack-flatten/buildpack-1/buildpack.toml b/pkg/client/testdata/buildpack-flatten/buildpack-1/buildpack.toml new file mode 100644 index 0000000000..98774ad690 --- /dev/null +++ b/pkg/client/testdata/buildpack-flatten/buildpack-1/buildpack.toml @@ -0,0 +1,15 @@ +api = "0.3" + +[buildpack] +id = "flatten/bp-1" +version = "1" +homepage = "http://buildpack-1" + +[[order]] +[[order.group]] +id = "flatten/bp-2" +version = "2" + +[[order.group]] +id = "flatten/bp-3" +version = "3" diff --git a/pkg/client/testdata/buildpack-flatten/buildpack-2/bin/build b/pkg/client/testdata/buildpack-flatten/buildpack-2/bin/build new file mode 100644 index 0000000000..c76df1a291 --- /dev/null +++ b/pkg/client/testdata/buildpack-flatten/buildpack-2/bin/build @@ -0,0 +1 @@ +build-contents \ No newline at end of file diff --git a/pkg/client/testdata/buildpack-flatten/buildpack-2/bin/detect b/pkg/client/testdata/buildpack-flatten/buildpack-2/bin/detect new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pkg/client/testdata/buildpack-flatten/buildpack-2/buildpack.toml b/pkg/client/testdata/buildpack-flatten/buildpack-2/buildpack.toml new file mode 100644 index 0000000000..6ccbc185c2 --- /dev/null +++ b/pkg/client/testdata/buildpack-flatten/buildpack-2/buildpack.toml @@ -0,0 +1,9 @@ +api = "0.3" + +[buildpack] +id = "flatten/bp-2" +version = "2" +homepage = "http://buildpack-2" + +[[stacks]] +id = "*" diff --git a/pkg/client/testdata/buildpack-flatten/buildpack-3/buildpack.toml b/pkg/client/testdata/buildpack-flatten/buildpack-3/buildpack.toml new file mode 100644 index 0000000000..b743a1a2b4 --- /dev/null +++ b/pkg/client/testdata/buildpack-flatten/buildpack-3/buildpack.toml @@ -0,0 +1,15 @@ +api = "0.3" + +[buildpack] +id = "flatten/bp-3" +version = "3" +homepage = "http://buildpack-3" + +[[order]] +[[order.group]] +id = "flatten/bp-4" +version = "4" + +[[order.group]] +id = "flatten/bp-5" +version = "5" diff --git a/pkg/client/testdata/buildpack-flatten/buildpack-4/bin/build b/pkg/client/testdata/buildpack-flatten/buildpack-4/bin/build new file mode 100644 index 0000000000..c76df1a291 --- /dev/null +++ b/pkg/client/testdata/buildpack-flatten/buildpack-4/bin/build @@ -0,0 +1 @@ +build-contents \ No newline at end of file diff --git a/pkg/client/testdata/buildpack-flatten/buildpack-4/bin/detect b/pkg/client/testdata/buildpack-flatten/buildpack-4/bin/detect new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pkg/client/testdata/buildpack-flatten/buildpack-4/buildpack.toml b/pkg/client/testdata/buildpack-flatten/buildpack-4/buildpack.toml new file mode 100644 index 0000000000..52166824e9 --- /dev/null +++ b/pkg/client/testdata/buildpack-flatten/buildpack-4/buildpack.toml @@ -0,0 +1,9 @@ +api = "0.3" + +[buildpack] +id = "flatten/bp-4" +version = "4" +homepage = "http://buildpack-4" + +[[stacks]] +id = "*" diff --git a/pkg/client/testdata/buildpack-flatten/buildpack-5/buildpack.toml b/pkg/client/testdata/buildpack-flatten/buildpack-5/buildpack.toml new file mode 100644 index 0000000000..a7510147dc --- /dev/null +++ b/pkg/client/testdata/buildpack-flatten/buildpack-5/buildpack.toml @@ -0,0 +1,15 @@ +api = "0.3" + +[buildpack] +id = "flatten/bp-5" +version = "5" +homepage = "http://buildpack-5" + +[[order]] +[[order.group]] +id = "flatten/bp-6" +version = "6" + +[[order.group]] +id = "flatten/bp-7" +version = "7" diff --git a/pkg/client/testdata/buildpack-flatten/buildpack-6/bin/build b/pkg/client/testdata/buildpack-flatten/buildpack-6/bin/build new file mode 100644 index 0000000000..c76df1a291 --- /dev/null +++ b/pkg/client/testdata/buildpack-flatten/buildpack-6/bin/build @@ -0,0 +1 @@ +build-contents \ No newline at end of file diff --git a/pkg/client/testdata/buildpack-flatten/buildpack-6/bin/detect b/pkg/client/testdata/buildpack-flatten/buildpack-6/bin/detect new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pkg/client/testdata/buildpack-flatten/buildpack-6/buildpack.toml b/pkg/client/testdata/buildpack-flatten/buildpack-6/buildpack.toml new file mode 100644 index 0000000000..3cc46521ee --- /dev/null +++ b/pkg/client/testdata/buildpack-flatten/buildpack-6/buildpack.toml @@ -0,0 +1,10 @@ +api = "0.3" + +[buildpack] +id = "flatten/bp-6" +version = "6" +homepage = "http://buildpack-6" + +[[stacks]] +id = "*" + diff --git a/pkg/client/testdata/buildpack-flatten/buildpack-7/bin/build b/pkg/client/testdata/buildpack-flatten/buildpack-7/bin/build new file mode 100644 index 0000000000..c76df1a291 --- /dev/null +++ b/pkg/client/testdata/buildpack-flatten/buildpack-7/bin/build @@ -0,0 +1 @@ +build-contents \ No newline at end of file diff --git a/pkg/client/testdata/buildpack-flatten/buildpack-7/bin/detect b/pkg/client/testdata/buildpack-flatten/buildpack-7/bin/detect new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pkg/client/testdata/buildpack-flatten/buildpack-7/buildpack.toml b/pkg/client/testdata/buildpack-flatten/buildpack-7/buildpack.toml new file mode 100644 index 0000000000..3ce8c30375 --- /dev/null +++ b/pkg/client/testdata/buildpack-flatten/buildpack-7/buildpack.toml @@ -0,0 +1,9 @@ +api = "0.3" + +[buildpack] +id = "flatten/bp-7" +version = "7" +homepage = "http://buildpack-7" + +[[stacks]] +id = "*" diff --git a/pkg/client/testdata/buildpack-multi-platform/README.md b/pkg/client/testdata/buildpack-multi-platform/README.md new file mode 100644 index 0000000000..3f7f242b8f --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/README.md @@ -0,0 +1,6 @@ +When creating multi-platform buildpacks, the root buildpack.toml file must be copied into each +plaform root folder; this operation must be done by the caller of the method: + +`PackageBuildpack(ctx context.Context, opts PackageBuildpackOptions) error` + +To simplify the tests, the buildpack.toml is already copied in each buildpack folder. diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-composite-with-dependencies-on-disk/buildpack.toml b/pkg/client/testdata/buildpack-multi-platform/buildpack-composite-with-dependencies-on-disk/buildpack.toml new file mode 100644 index 0000000000..3dfd7a5909 --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-composite-with-dependencies-on-disk/buildpack.toml @@ -0,0 +1,12 @@ +api = "0.10" + +[buildpack] +id = "samples/composite-buildpack" +version = "0.0.1" + +# Order used for detection +[[order]] +[[order.group]] +id = "samples/bp-1" +version = "0.0.1" + diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-composite-with-dependencies-on-disk/package.toml b/pkg/client/testdata/buildpack-multi-platform/buildpack-composite-with-dependencies-on-disk/package.toml new file mode 100644 index 0000000000..d174b516fd --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-composite-with-dependencies-on-disk/package.toml @@ -0,0 +1,14 @@ +[buildpack] +uri = "." + +[[targets]] +os = "linux" +arch = "amd64" + +[[targets]] +os = "linux" +arch = "arm64" + +[[dependencies]] +uri = "../samples/bp-1" + diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-composite/buildpack.toml b/pkg/client/testdata/buildpack-multi-platform/buildpack-composite/buildpack.toml new file mode 100644 index 0000000000..236a11aaf6 --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-composite/buildpack.toml @@ -0,0 +1,15 @@ +api = "0.10" + +[buildpack] +id = "samples/composite-buildpack" +version = "0.0.1" + +# Order used for detection +[[order]] +[[order.group]] +id = "samples/bp-1" +version = "0.0.1" + +[[order.group]] +id = "samples/bp-2" +version = "0.0.1" diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-composite/package.toml b/pkg/client/testdata/buildpack-multi-platform/buildpack-composite/package.toml new file mode 100644 index 0000000000..67bb4ee9a8 --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-composite/package.toml @@ -0,0 +1,16 @@ +[buildpack] +uri = "." + +[[targets]] +os = "linux" +arch = "amd64" + +[[targets]] +os = "linux" +arch = "arm64" + +[[dependencies]] +uri = "localhost:3333/bp-1" + +[[dependencies]] +uri = "localhost:3333/bp-2" diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/amd64/v5/ubuntu@18.01/bin/build b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/amd64/v5/ubuntu@18.01/bin/build new file mode 100644 index 0000000000..fb0d852a61 --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/amd64/v5/ubuntu@18.01/bin/build @@ -0,0 +1 @@ +build-amd64-contents diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/amd64/v5/ubuntu@18.01/bin/detect b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/amd64/v5/ubuntu@18.01/bin/detect new file mode 100644 index 0000000000..27f6569700 --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/amd64/v5/ubuntu@18.01/bin/detect @@ -0,0 +1 @@ +detect-amd64-contents diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/amd64/v5/ubuntu@18.01/buildpack.toml b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/amd64/v5/ubuntu@18.01/buildpack.toml new file mode 100644 index 0000000000..eda6e1e65e --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/amd64/v5/ubuntu@18.01/buildpack.toml @@ -0,0 +1,23 @@ +api = "0.10" + +[buildpack] +id = "samples/multi-platform" +version = "0.0.1" + +[[targets]] +os = "linux" +arch = "amd64" +[[targets.distributions]] +name = "ubuntu" +versions = ["18.01", "21.01"] + +[[targets]] +os = "linux" +arch = "arm64" +variant = "v6" +[[targets.distributions]] +name = "ubuntu" +versions = ["18.01", "21.01"] + +[[stacks]] +id = "*" diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/amd64/v5/ubuntu@21.01/bin/build b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/amd64/v5/ubuntu@21.01/bin/build new file mode 100644 index 0000000000..fb0d852a61 --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/amd64/v5/ubuntu@21.01/bin/build @@ -0,0 +1 @@ +build-amd64-contents diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/amd64/v5/ubuntu@21.01/bin/detect b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/amd64/v5/ubuntu@21.01/bin/detect new file mode 100644 index 0000000000..27f6569700 --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/amd64/v5/ubuntu@21.01/bin/detect @@ -0,0 +1 @@ +detect-amd64-contents diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/amd64/v5/ubuntu@21.01/buildpack.toml b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/amd64/v5/ubuntu@21.01/buildpack.toml new file mode 100644 index 0000000000..802d22f733 --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/amd64/v5/ubuntu@21.01/buildpack.toml @@ -0,0 +1,16 @@ +api = "0.10" + +[buildpack] +id = "samples/multi-platform" +version = "0.0.1" + +[[targets]] +os = "linux" +arch = "amd64" + +[[targets]] +os = "linux" +arch = "arm64" + +[[stacks]] +id = "*" diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/arm/v6/ubuntu@18.01/bin/build b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/arm/v6/ubuntu@18.01/bin/build new file mode 100644 index 0000000000..d8744bb41c --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/arm/v6/ubuntu@18.01/bin/build @@ -0,0 +1 @@ +build-arm-contents diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/arm/v6/ubuntu@18.01/bin/detect b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/arm/v6/ubuntu@18.01/bin/detect new file mode 100644 index 0000000000..3788406fd2 --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/arm/v6/ubuntu@18.01/bin/detect @@ -0,0 +1 @@ +detect-arm-contents diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/arm/v6/ubuntu@18.01/buildpack.toml b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/arm/v6/ubuntu@18.01/buildpack.toml new file mode 100644 index 0000000000..802d22f733 --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/arm/v6/ubuntu@18.01/buildpack.toml @@ -0,0 +1,16 @@ +api = "0.10" + +[buildpack] +id = "samples/multi-platform" +version = "0.0.1" + +[[targets]] +os = "linux" +arch = "amd64" + +[[targets]] +os = "linux" +arch = "arm64" + +[[stacks]] +id = "*" diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/arm/v6/ubuntu@21.01/bin/build b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/arm/v6/ubuntu@21.01/bin/build new file mode 100644 index 0000000000..d8744bb41c --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/arm/v6/ubuntu@21.01/bin/build @@ -0,0 +1 @@ +build-arm-contents diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/arm/v6/ubuntu@21.01/bin/detect b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/arm/v6/ubuntu@21.01/bin/detect new file mode 100644 index 0000000000..3788406fd2 --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/arm/v6/ubuntu@21.01/bin/detect @@ -0,0 +1 @@ +detect-arm-contents diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/arm/v6/ubuntu@21.01/buildpack.toml b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/arm/v6/ubuntu@21.01/buildpack.toml new file mode 100644 index 0000000000..802d22f733 --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format-with-versions/linux/arm/v6/ubuntu@21.01/buildpack.toml @@ -0,0 +1,16 @@ +api = "0.10" + +[buildpack] +id = "samples/multi-platform" +version = "0.0.1" + +[[targets]] +os = "linux" +arch = "amd64" + +[[targets]] +os = "linux" +arch = "arm64" + +[[stacks]] +id = "*" diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format/linux/amd64/bin/build b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format/linux/amd64/bin/build new file mode 100644 index 0000000000..fb0d852a61 --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format/linux/amd64/bin/build @@ -0,0 +1 @@ +build-amd64-contents diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format/linux/amd64/bin/detect b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format/linux/amd64/bin/detect new file mode 100644 index 0000000000..27f6569700 --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format/linux/amd64/bin/detect @@ -0,0 +1 @@ +detect-amd64-contents diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format/linux/amd64/buildpack.toml b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format/linux/amd64/buildpack.toml new file mode 100644 index 0000000000..802d22f733 --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format/linux/amd64/buildpack.toml @@ -0,0 +1,16 @@ +api = "0.10" + +[buildpack] +id = "samples/multi-platform" +version = "0.0.1" + +[[targets]] +os = "linux" +arch = "amd64" + +[[targets]] +os = "linux" +arch = "arm64" + +[[stacks]] +id = "*" diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format/linux/arm/bin/build b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format/linux/arm/bin/build new file mode 100644 index 0000000000..d8744bb41c --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format/linux/arm/bin/build @@ -0,0 +1 @@ +build-arm-contents diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format/linux/arm/bin/detect b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format/linux/arm/bin/detect new file mode 100644 index 0000000000..3788406fd2 --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format/linux/arm/bin/detect @@ -0,0 +1 @@ +detect-arm-contents diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format/linux/arm/buildpack.toml b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format/linux/arm/buildpack.toml new file mode 100644 index 0000000000..802d22f733 --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format/linux/arm/buildpack.toml @@ -0,0 +1,16 @@ +api = "0.10" + +[buildpack] +id = "samples/multi-platform" +version = "0.0.1" + +[[targets]] +os = "linux" +arch = "amd64" + +[[targets]] +os = "linux" +arch = "arm64" + +[[stacks]] +id = "*" diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format/linux/buildpack.toml b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format/linux/buildpack.toml new file mode 100644 index 0000000000..52012a4f37 --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-new-format/linux/buildpack.toml @@ -0,0 +1,17 @@ +api = "0.10" + +[buildpack] +id = "samples/multi-platform" +version = "0.0.1" + +[[targets]] +os = "linux" +arch = "amd64" + +[[targets]] +os = "linux" +arch = "arm64" + +[[stacks]] +id = "*" + diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-old-format/bin/build b/pkg/client/testdata/buildpack-multi-platform/buildpack-old-format/bin/build new file mode 100644 index 0000000000..c76df1a291 --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-old-format/bin/build @@ -0,0 +1 @@ +build-contents \ No newline at end of file diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-old-format/bin/detect b/pkg/client/testdata/buildpack-multi-platform/buildpack-old-format/bin/detect new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pkg/client/testdata/buildpack-multi-platform/buildpack-old-format/buildpack.toml b/pkg/client/testdata/buildpack-multi-platform/buildpack-old-format/buildpack.toml new file mode 100644 index 0000000000..131cb045f6 --- /dev/null +++ b/pkg/client/testdata/buildpack-multi-platform/buildpack-old-format/buildpack.toml @@ -0,0 +1,10 @@ +api = "0.3" + +[buildpack] +id = "bp.one" +version = "1.2.3" +homepage = "http://one.buildpack" + +[[stacks]] +id = "some.stack.id" +mixins = ["mixinX", "build:mixinY", "run:mixinZ"] diff --git a/pkg/client/testdata/buildpack-non-deterministic/buildpack-1-version-1/bin/build b/pkg/client/testdata/buildpack-non-deterministic/buildpack-1-version-1/bin/build new file mode 100644 index 0000000000..c76df1a291 --- /dev/null +++ b/pkg/client/testdata/buildpack-non-deterministic/buildpack-1-version-1/bin/build @@ -0,0 +1 @@ +build-contents \ No newline at end of file diff --git a/pkg/client/testdata/buildpack-non-deterministic/buildpack-1-version-1/bin/detect b/pkg/client/testdata/buildpack-non-deterministic/buildpack-1-version-1/bin/detect new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pkg/client/testdata/buildpack-non-deterministic/buildpack-1-version-1/buildpack.toml b/pkg/client/testdata/buildpack-non-deterministic/buildpack-1-version-1/buildpack.toml new file mode 100644 index 0000000000..39273bdb1b --- /dev/null +++ b/pkg/client/testdata/buildpack-non-deterministic/buildpack-1-version-1/buildpack.toml @@ -0,0 +1,10 @@ +api = "0.4" + +[buildpack] +id = "buildpack-1-id" +version = "buildpack-1-version-1" +homepage = "http://non-deterministic.buildpack-1" + +[[stacks]] +id = "some.stack.id" +mixins = ["mixinX", "build:mixinY", "run:mixinZ"] diff --git a/pkg/client/testdata/buildpack-non-deterministic/buildpack-1-version-2/bin/build b/pkg/client/testdata/buildpack-non-deterministic/buildpack-1-version-2/bin/build new file mode 100644 index 0000000000..c76df1a291 --- /dev/null +++ b/pkg/client/testdata/buildpack-non-deterministic/buildpack-1-version-2/bin/build @@ -0,0 +1 @@ +build-contents \ No newline at end of file diff --git a/pkg/client/testdata/buildpack-non-deterministic/buildpack-1-version-2/bin/detect b/pkg/client/testdata/buildpack-non-deterministic/buildpack-1-version-2/bin/detect new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pkg/client/testdata/buildpack-non-deterministic/buildpack-1-version-2/buildpack.toml b/pkg/client/testdata/buildpack-non-deterministic/buildpack-1-version-2/buildpack.toml new file mode 100644 index 0000000000..9effa8e44d --- /dev/null +++ b/pkg/client/testdata/buildpack-non-deterministic/buildpack-1-version-2/buildpack.toml @@ -0,0 +1,11 @@ +api = "0.4" + +[buildpack] +id = "buildpack-1-id" +version = "buildpack-1-version-2" +homepage = "http://non-deterministic.buildpack-1" + +[[stacks]] +id = "some.stack.id" +mixins = ["mixinX", "build:mixinY", "run:mixinZ"] + diff --git a/pkg/client/testdata/buildpack-non-deterministic/buildpack-2-version-1/bin/build b/pkg/client/testdata/buildpack-non-deterministic/buildpack-2-version-1/bin/build new file mode 100644 index 0000000000..c76df1a291 --- /dev/null +++ b/pkg/client/testdata/buildpack-non-deterministic/buildpack-2-version-1/bin/build @@ -0,0 +1 @@ +build-contents \ No newline at end of file diff --git a/pkg/client/testdata/buildpack-non-deterministic/buildpack-2-version-1/bin/detect b/pkg/client/testdata/buildpack-non-deterministic/buildpack-2-version-1/bin/detect new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pkg/client/testdata/buildpack-non-deterministic/buildpack-2-version-1/buildpack.toml b/pkg/client/testdata/buildpack-non-deterministic/buildpack-2-version-1/buildpack.toml new file mode 100644 index 0000000000..0919a3bf68 --- /dev/null +++ b/pkg/client/testdata/buildpack-non-deterministic/buildpack-2-version-1/buildpack.toml @@ -0,0 +1,10 @@ +api = "0.4" + +[buildpack] +id = "buildpack-2-id" +version = "buildpack-2-version-1" +homepage = "http://non-deterministic.buildpack-2" + +[[stacks]] +id = "some.stack.id" +mixins = ["mixinX", "build:mixinY", "run:mixinZ"] diff --git a/pkg/client/testdata/buildpack-non-deterministic/buildpack-2-version-2/bin/build b/pkg/client/testdata/buildpack-non-deterministic/buildpack-2-version-2/bin/build new file mode 100644 index 0000000000..c76df1a291 --- /dev/null +++ b/pkg/client/testdata/buildpack-non-deterministic/buildpack-2-version-2/bin/build @@ -0,0 +1 @@ +build-contents \ No newline at end of file diff --git a/pkg/client/testdata/buildpack-non-deterministic/buildpack-2-version-2/bin/detect b/pkg/client/testdata/buildpack-non-deterministic/buildpack-2-version-2/bin/detect new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pkg/client/testdata/buildpack-non-deterministic/buildpack-2-version-2/buildpack.toml b/pkg/client/testdata/buildpack-non-deterministic/buildpack-2-version-2/buildpack.toml new file mode 100644 index 0000000000..f7297908e2 --- /dev/null +++ b/pkg/client/testdata/buildpack-non-deterministic/buildpack-2-version-2/buildpack.toml @@ -0,0 +1,10 @@ +api = "0.4" + +[buildpack] +id = "buildpack-2-id" +version = "buildpack-2-version-2" +homepage = "http://non-deterministic.buildpack-2" + +[[stacks]] +id = "some.stack.id" +mixins = ["mixinX", "build:mixinY", "run:mixinZ"] diff --git a/pkg/client/testdata/docker-context/error-cases/config-does-not-exist/README b/pkg/client/testdata/docker-context/error-cases/config-does-not-exist/README new file mode 100644 index 0000000000..155653676f --- /dev/null +++ b/pkg/client/testdata/docker-context/error-cases/config-does-not-exist/README @@ -0,0 +1 @@ +This folder is intentionally empty to test the scenario when the docker config.json file doesn't exist diff --git a/pkg/client/testdata/docker-context/error-cases/current-context-does-not-match/config.json b/pkg/client/testdata/docker-context/error-cases/current-context-does-not-match/config.json new file mode 100644 index 0000000000..b2887794be --- /dev/null +++ b/pkg/client/testdata/docker-context/error-cases/current-context-does-not-match/config.json @@ -0,0 +1,3 @@ +{ + "currentContext": "desktop-linux" +} diff --git a/pkg/client/testdata/docker-context/error-cases/current-context-does-not-match/contexts/meta/fe9c6bd7a66301f49ca9b6a70b217107cd1284598bfc254700c989b916da791e/meta.json b/pkg/client/testdata/docker-context/error-cases/current-context-does-not-match/contexts/meta/fe9c6bd7a66301f49ca9b6a70b217107cd1284598bfc254700c989b916da791e/meta.json new file mode 100644 index 0000000000..e27eab7d7b --- /dev/null +++ b/pkg/client/testdata/docker-context/error-cases/current-context-does-not-match/contexts/meta/fe9c6bd7a66301f49ca9b6a70b217107cd1284598bfc254700c989b916da791e/meta.json @@ -0,0 +1,8 @@ +{ + "Name": "bad-name", + "Endpoints": { + "docker": { + "Host": "unix:///Users/user/.docker/run/docker.sock" + } + } +} diff --git a/pkg/client/testdata/docker-context/error-cases/docker-endpoint-does-not-exist/config.json b/pkg/client/testdata/docker-context/error-cases/docker-endpoint-does-not-exist/config.json new file mode 100644 index 0000000000..b2887794be --- /dev/null +++ b/pkg/client/testdata/docker-context/error-cases/docker-endpoint-does-not-exist/config.json @@ -0,0 +1,3 @@ +{ + "currentContext": "desktop-linux" +} diff --git a/pkg/client/testdata/docker-context/error-cases/docker-endpoint-does-not-exist/contexts/meta/fe9c6bd7a66301f49ca9b6a70b217107cd1284598bfc254700c989b916da791e/meta.json b/pkg/client/testdata/docker-context/error-cases/docker-endpoint-does-not-exist/contexts/meta/fe9c6bd7a66301f49ca9b6a70b217107cd1284598bfc254700c989b916da791e/meta.json new file mode 100644 index 0000000000..9421315ff0 --- /dev/null +++ b/pkg/client/testdata/docker-context/error-cases/docker-endpoint-does-not-exist/contexts/meta/fe9c6bd7a66301f49ca9b6a70b217107cd1284598bfc254700c989b916da791e/meta.json @@ -0,0 +1,8 @@ +{ + "Name": "desktop-linux", + "Endpoints": { + "foo": { + "Host": "unix:///Users/user/.docker/run/docker.sock" + } + } +} diff --git a/pkg/client/testdata/docker-context/error-cases/empty-context/config.json b/pkg/client/testdata/docker-context/error-cases/empty-context/config.json new file mode 100644 index 0000000000..002beb1198 --- /dev/null +++ b/pkg/client/testdata/docker-context/error-cases/empty-context/config.json @@ -0,0 +1,3 @@ +{ + "currentContext": "some-bad-context" +} diff --git a/pkg/client/testdata/docker-context/error-cases/invalid-config/config.json b/pkg/client/testdata/docker-context/error-cases/invalid-config/config.json new file mode 100644 index 0000000000..9925ab85e7 --- /dev/null +++ b/pkg/client/testdata/docker-context/error-cases/invalid-config/config.json @@ -0,0 +1,3 @@ +{ + "currentContext": "some-bad-context +} diff --git a/pkg/client/testdata/docker-context/error-cases/invalid-metadata/config.json b/pkg/client/testdata/docker-context/error-cases/invalid-metadata/config.json new file mode 100644 index 0000000000..b2887794be --- /dev/null +++ b/pkg/client/testdata/docker-context/error-cases/invalid-metadata/config.json @@ -0,0 +1,3 @@ +{ + "currentContext": "desktop-linux" +} diff --git a/pkg/client/testdata/docker-context/error-cases/invalid-metadata/contexts/meta/fe9c6bd7a66301f49ca9b6a70b217107cd1284598bfc254700c989b916da791e/meta.json b/pkg/client/testdata/docker-context/error-cases/invalid-metadata/contexts/meta/fe9c6bd7a66301f49ca9b6a70b217107cd1284598bfc254700c989b916da791e/meta.json new file mode 100644 index 0000000000..24b749b43e --- /dev/null +++ b/pkg/client/testdata/docker-context/error-cases/invalid-metadata/contexts/meta/fe9c6bd7a66301f49ca9b6a70b217107cd1284598bfc254700c989b916da791e/meta.json @@ -0,0 +1,8 @@ +{ + "Name": "desktop-linux", + "Endpoints": { + "docker": { + "Host": "unix:///Users/user/.docker/run/docker.sock + } + } +} diff --git a/pkg/client/testdata/docker-context/happy-cases/current-context-not-defined/config.json b/pkg/client/testdata/docker-context/happy-cases/current-context-not-defined/config.json new file mode 100644 index 0000000000..b7fba5df5a --- /dev/null +++ b/pkg/client/testdata/docker-context/happy-cases/current-context-not-defined/config.json @@ -0,0 +1,7 @@ +{ + "auths": { + "https://index.docker.io/v1/": {} + }, + "credsStore": "desktop", + "experimental": "disabled" +} diff --git a/pkg/client/testdata/docker-context/happy-cases/custom-context/config.json b/pkg/client/testdata/docker-context/happy-cases/custom-context/config.json new file mode 100644 index 0000000000..b2887794be --- /dev/null +++ b/pkg/client/testdata/docker-context/happy-cases/custom-context/config.json @@ -0,0 +1,3 @@ +{ + "currentContext": "desktop-linux" +} diff --git a/pkg/client/testdata/docker-context/happy-cases/custom-context/contexts/meta/fe9c6bd7a66301f49ca9b6a70b217107cd1284598bfc254700c989b916da791e/meta.json b/pkg/client/testdata/docker-context/happy-cases/custom-context/contexts/meta/fe9c6bd7a66301f49ca9b6a70b217107cd1284598bfc254700c989b916da791e/meta.json new file mode 100644 index 0000000000..7a8aa5ff45 --- /dev/null +++ b/pkg/client/testdata/docker-context/happy-cases/custom-context/contexts/meta/fe9c6bd7a66301f49ca9b6a70b217107cd1284598bfc254700c989b916da791e/meta.json @@ -0,0 +1,8 @@ +{ + "Name": "desktop-linux", + "Endpoints": { + "docker": { + "Host": "unix:///Users/user/.docker/run/docker.sock" + } + } +} diff --git a/pkg/client/testdata/docker-context/happy-cases/default-context/config.json b/pkg/client/testdata/docker-context/happy-cases/default-context/config.json new file mode 100644 index 0000000000..6eaf50253d --- /dev/null +++ b/pkg/client/testdata/docker-context/happy-cases/default-context/config.json @@ -0,0 +1,3 @@ +{ + "currentContext": "default" +} diff --git a/pkg/client/testdata/docker-context/happy-cases/two-endpoints-context/config.json b/pkg/client/testdata/docker-context/happy-cases/two-endpoints-context/config.json new file mode 100644 index 0000000000..b2887794be --- /dev/null +++ b/pkg/client/testdata/docker-context/happy-cases/two-endpoints-context/config.json @@ -0,0 +1,3 @@ +{ + "currentContext": "desktop-linux" +} diff --git a/pkg/client/testdata/docker-context/happy-cases/two-endpoints-context/contexts/meta/fe9c6bd7a66301f49ca9b6a70b217107cd1284598bfc254700c989b916da791e/meta.json b/pkg/client/testdata/docker-context/happy-cases/two-endpoints-context/contexts/meta/fe9c6bd7a66301f49ca9b6a70b217107cd1284598bfc254700c989b916da791e/meta.json new file mode 100644 index 0000000000..d2fb55da6e --- /dev/null +++ b/pkg/client/testdata/docker-context/happy-cases/two-endpoints-context/contexts/meta/fe9c6bd7a66301f49ca9b6a70b217107cd1284598bfc254700c989b916da791e/meta.json @@ -0,0 +1,11 @@ +{ + "Name": "desktop-linux", + "Endpoints": { + "docker": { + "Host": "unix:///Users/user/.docker/run/docker.sock" + }, + "foo": { + "Host": "something else" + } + } +} diff --git a/pkg/client/testdata/extension-api-0.9/bin/detect b/pkg/client/testdata/extension-api-0.9/bin/detect new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pkg/client/testdata/extension-api-0.9/bin/generate b/pkg/client/testdata/extension-api-0.9/bin/generate new file mode 100644 index 0000000000..28f453ed08 --- /dev/null +++ b/pkg/client/testdata/extension-api-0.9/bin/generate @@ -0,0 +1 @@ +generate-contents \ No newline at end of file diff --git a/pkg/client/testdata/extension-api-0.9/extension.toml b/pkg/client/testdata/extension-api-0.9/extension.toml new file mode 100644 index 0000000000..3ca26a4206 --- /dev/null +++ b/pkg/client/testdata/extension-api-0.9/extension.toml @@ -0,0 +1,6 @@ +api = "0.9" + +[extension] +id = "ext.one" +version = "1.2.3" +homepage = "http://one.extension" diff --git a/pkg/client/testdata/extension/bin/detect b/pkg/client/testdata/extension/bin/detect new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pkg/client/testdata/extension/bin/generate b/pkg/client/testdata/extension/bin/generate new file mode 100644 index 0000000000..28f453ed08 --- /dev/null +++ b/pkg/client/testdata/extension/bin/generate @@ -0,0 +1 @@ +generate-contents \ No newline at end of file diff --git a/pkg/client/testdata/extension/extension.toml b/pkg/client/testdata/extension/extension.toml new file mode 100644 index 0000000000..dd64e33d0b --- /dev/null +++ b/pkg/client/testdata/extension/extension.toml @@ -0,0 +1,6 @@ +api = "0.3" + +[extension] +id = "ext.one" +version = "1.2.3" +homepage = "http://one.extension" diff --git a/pkg/client/testdata/lifecycle/lifecycle.tar b/pkg/client/testdata/lifecycle/lifecycle.tar new file mode 100644 index 0000000000..3a7c9893d8 Binary files /dev/null and b/pkg/client/testdata/lifecycle/lifecycle.tar differ diff --git a/pkg/client/testdata/lifecycle/platform-0.13/lifecycle-v0.0.0-arch/analyzer b/pkg/client/testdata/lifecycle/platform-0.13/lifecycle-v0.0.0-arch/analyzer new file mode 100755 index 0000000000..2c7cce34c1 --- /dev/null +++ b/pkg/client/testdata/lifecycle/platform-0.13/lifecycle-v0.0.0-arch/analyzer @@ -0,0 +1 @@ +analyzer \ No newline at end of file diff --git a/pkg/client/testdata/lifecycle/platform-0.13/lifecycle-v0.0.0-arch/builder b/pkg/client/testdata/lifecycle/platform-0.13/lifecycle-v0.0.0-arch/builder new file mode 100755 index 0000000000..b05c21cd9d --- /dev/null +++ b/pkg/client/testdata/lifecycle/platform-0.13/lifecycle-v0.0.0-arch/builder @@ -0,0 +1 @@ +builder \ No newline at end of file diff --git a/pkg/client/testdata/lifecycle/platform-0.13/lifecycle-v0.0.0-arch/creator b/pkg/client/testdata/lifecycle/platform-0.13/lifecycle-v0.0.0-arch/creator new file mode 100755 index 0000000000..2ceb327f07 --- /dev/null +++ b/pkg/client/testdata/lifecycle/platform-0.13/lifecycle-v0.0.0-arch/creator @@ -0,0 +1 @@ +creator \ No newline at end of file diff --git a/pkg/client/testdata/lifecycle/platform-0.13/lifecycle-v0.0.0-arch/detector b/pkg/client/testdata/lifecycle/platform-0.13/lifecycle-v0.0.0-arch/detector new file mode 100755 index 0000000000..4ca7e105c9 --- /dev/null +++ b/pkg/client/testdata/lifecycle/platform-0.13/lifecycle-v0.0.0-arch/detector @@ -0,0 +1 @@ +detector \ No newline at end of file diff --git a/pkg/client/testdata/lifecycle/platform-0.13/lifecycle-v0.0.0-arch/exporter b/pkg/client/testdata/lifecycle/platform-0.13/lifecycle-v0.0.0-arch/exporter new file mode 100755 index 0000000000..76a0149ce4 --- /dev/null +++ b/pkg/client/testdata/lifecycle/platform-0.13/lifecycle-v0.0.0-arch/exporter @@ -0,0 +1 @@ +exporter \ No newline at end of file diff --git a/pkg/client/testdata/lifecycle/platform-0.13/lifecycle-v0.0.0-arch/launcher b/pkg/client/testdata/lifecycle/platform-0.13/lifecycle-v0.0.0-arch/launcher new file mode 100755 index 0000000000..89f76d0bc2 --- /dev/null +++ b/pkg/client/testdata/lifecycle/platform-0.13/lifecycle-v0.0.0-arch/launcher @@ -0,0 +1 @@ +launcher \ No newline at end of file diff --git a/pkg/client/testdata/lifecycle/platform-0.13/lifecycle-v0.0.0-arch/restorer b/pkg/client/testdata/lifecycle/platform-0.13/lifecycle-v0.0.0-arch/restorer new file mode 100755 index 0000000000..f6d18366f2 --- /dev/null +++ b/pkg/client/testdata/lifecycle/platform-0.13/lifecycle-v0.0.0-arch/restorer @@ -0,0 +1 @@ +restorer \ No newline at end of file diff --git a/pkg/client/testdata/lifecycle/platform-0.13/lifecycle.toml b/pkg/client/testdata/lifecycle/platform-0.13/lifecycle.toml new file mode 100644 index 0000000000..d40ac40b6b --- /dev/null +++ b/pkg/client/testdata/lifecycle/platform-0.13/lifecycle.toml @@ -0,0 +1,11 @@ +[lifecycle] +version = "0.0.0" + +[apis] +[apis.buildpack] +deprecated = ["0.2", "0.3"] +supported = ["0.2", "0.3", "0.4", "0.9"] + +[apis.platform] +deprecated = ["0.2"] +supported = ["0.3", "0.4", "0.5", "0.6", "0.7", "0.8", "0.9", "0.10", "0.11", "0.12", "0.13"] \ No newline at end of file diff --git a/pkg/client/testdata/lifecycle/platform-0.4/lifecycle.toml b/pkg/client/testdata/lifecycle/platform-0.4/lifecycle.toml index 2803b8eb7d..5cc795d165 100644 --- a/pkg/client/testdata/lifecycle/platform-0.4/lifecycle.toml +++ b/pkg/client/testdata/lifecycle/platform-0.4/lifecycle.toml @@ -4,7 +4,7 @@ version = "0.0.0" [apis] [apis.buildpack] deprecated = ["0.2", "0.3"] -supported = ["0.2", "0.3", "0.4"] +supported = ["0.2", "0.3", "0.4", "0.9"] [apis.platform] deprecated = ["0.2"] diff --git a/pkg/client/yank_buildpack_test.go b/pkg/client/yank_buildpack_test.go index c080754694..206fd8b354 100644 --- a/pkg/client/yank_buildpack_test.go +++ b/pkg/client/yank_buildpack_test.go @@ -34,7 +34,7 @@ func testYankBuildpack(t *testing.T, when spec.G, it spec.S) { fakeAppImage = fakes.NewImage("buildpack/image", "", &fakeIdentifier{name: "buildpack-image"}) h.AssertNil(t, fakeAppImage.SetLabel("io.buildpacks.buildpackage.metadata", - `{"id":"heroku/java-function","version":"1.1.1","stacks":[{"id":"heroku-18"},{"id":"io.buildpacks.stacks.bionic"},{"id":"org.cloudfoundry.stacks.cflinuxfs3"}]}`)) + `{"id":"heroku/java-function","version":"1.1.1","stacks":[{"id":"heroku-18"},{"id":"io.buildpacks.stacks.jammy"},{"id":"org.cloudfoundry.stacks.cflinuxfs3"}]}`)) fakeImageFetcher.RemoteImages["buildpack/image"] = fakeAppImage fakeLogger := logging.NewLogWithWriters(&out, &out) diff --git a/pkg/dist/buildmodule.go b/pkg/dist/buildmodule.go new file mode 100644 index 0000000000..b5f665e7e9 --- /dev/null +++ b/pkg/dist/buildmodule.go @@ -0,0 +1,130 @@ +package dist + +import ( + "fmt" + "strings" + + "github.com/pkg/errors" + + "github.com/buildpacks/pack/internal/style" +) + +const AssumedBuildpackAPIVersion = "0.1" +const BuildpacksDir = "/cnb/buildpacks" +const ExtensionsDir = "/cnb/extensions" + +type ModuleInfo struct { + ID string `toml:"id,omitempty" json:"id,omitempty" yaml:"id,omitempty"` + Name string `toml:"name,omitempty" json:"name,omitempty" yaml:"name,omitempty"` + Version string `toml:"version,omitempty" json:"version,omitempty" yaml:"version,omitempty"` + Description string `toml:"description,omitempty" json:"description,omitempty" yaml:"description,omitempty"` + Homepage string `toml:"homepage,omitempty" json:"homepage,omitempty" yaml:"homepage,omitempty"` + Keywords []string `toml:"keywords,omitempty" json:"keywords,omitempty" yaml:"keywords,omitempty"` + ExecEnv []string `toml:"exec-env,omitempty" json:"exec-env,omitempty" yaml:"exec-env,omitempty"` + Licenses []License `toml:"licenses,omitempty" json:"licenses,omitempty" yaml:"licenses,omitempty"` + ClearEnv bool `toml:"clear-env,omitempty" json:"clear-env,omitempty" yaml:"clear-env,omitempty"` +} + +func (b ModuleInfo) FullName() string { + if b.Version != "" { + return b.ID + "@" + b.Version + } + return b.ID +} + +func (b ModuleInfo) FullNameWithVersion() (string, error) { + if b.Version == "" { + return b.ID, errors.Errorf("buildpack %s does not have a version defined", style.Symbol(b.ID)) + } + return b.ID + "@" + b.Version, nil +} + +// Satisfy stringer +func (b ModuleInfo) String() string { return b.FullName() } + +// Match compares two buildpacks by ID and Version +func (b ModuleInfo) Match(o ModuleInfo) bool { + return b.ID == o.ID && b.Version == o.Version +} + +type License struct { + Type string `toml:"type"` + URI string `toml:"uri"` +} + +type Stack struct { + ID string `json:"id" toml:"id"` + Mixins []string `json:"mixins,omitempty" toml:"mixins,omitempty"` +} + +type Target struct { + OS string `json:"os" toml:"os"` + Arch string `json:"arch" toml:"arch"` + ArchVariant string `json:"variant,omitempty" toml:"variant,omitempty"` + Distributions []Distribution `json:"distros,omitempty" toml:"distros,omitempty"` +} + +// ValuesAsSlice converts the internal representation of a target (os, arch, variant, etc.) into a string slice, +// where each value included in the final array must be not empty. +func (t *Target) ValuesAsSlice() []string { + var targets []string + if t.OS != "" { + targets = append(targets, t.OS) + } + if t.Arch != "" { + targets = append(targets, t.Arch) + } + if t.ArchVariant != "" { + targets = append(targets, t.ArchVariant) + } + + for _, d := range t.Distributions { + targets = append(targets, fmt.Sprintf("%s@%s", d.Name, d.Version)) + } + return targets +} + +func (t *Target) ValuesAsPlatform() string { + return strings.Join(t.ValuesAsSlice(), "/") +} + +// ExpandTargetsDistributions expands each provided target (with multiple distribution versions) to multiple targets (each with a single distribution version). +// For example, given an array with ONE target with the format: +// +// [ +// {OS:"linux", Distributions: []dist.Distribution{{Name: "ubuntu", Version: "18.01"},{Name: "ubuntu", Version: "21.01"}}} +// ] +// +// it returns an array with TWO targets each with the format: +// +// [ +// {OS:"linux",Distributions: []dist.Distribution{{Name: "ubuntu", Version: "18.01"}}}, +// {OS:"linux",Distributions: []dist.Distribution{{Name: "ubuntu", Version: "21.01"}}} +// ] +func ExpandTargetsDistributions(targets ...Target) []Target { + var expandedTargets []Target + for _, target := range targets { + expandedTargets = append(expandedTargets, expandTargetDistributions(target)...) + } + return expandedTargets +} + +func expandTargetDistributions(target Target) []Target { + var expandedTargets []Target + if (len(target.Distributions)) > 1 { + originalDistros := target.Distributions + for _, distro := range originalDistros { + copyTarget := target + copyTarget.Distributions = []Distribution{distro} + expandedTargets = append(expandedTargets, copyTarget) + } + } else { + expandedTargets = append(expandedTargets, target) + } + return expandedTargets +} + +type Distribution struct { + Name string `json:"name,omitempty" toml:"name,omitempty"` + Version string `json:"version,omitempty" toml:"version,omitempty"` +} diff --git a/pkg/dist/buildmodule_test.go b/pkg/dist/buildmodule_test.go new file mode 100644 index 0000000000..6a0f4dd93b --- /dev/null +++ b/pkg/dist/buildmodule_test.go @@ -0,0 +1,103 @@ +package dist_test + +import ( + "testing" + + "github.com/buildpacks/pack/pkg/dist" + h "github.com/buildpacks/pack/testhelpers" + + "github.com/heroku/color" + + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" +) + +func TestBuildModule(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "testBuildModule", testBuildModule, spec.Parallel(), spec.Report(report.Terminal{})) +} + +func testBuildModule(t *testing.T, when spec.G, it spec.S) { + var info dist.ModuleInfo + + it.Before(func() { + info = dist.ModuleInfo{ + ID: "some-id", + Name: "some-name", + Version: "some-version", + } + }) + + when("#FullName", func() { + when("version", func() { + when("blank", func() { + it.Before(func() { + info.Version = "" + }) + + it("prints ID", func() { + h.AssertEq(t, info.FullName(), "some-id") + }) + }) + + when("not blank", func() { + it("prints ID and version", func() { + h.AssertEq(t, info.FullName(), "some-id@some-version") + }) + }) + }) + }) + + when("#FullNameWithVersion", func() { + when("version", func() { + when("blank", func() { + it.Before(func() { + info.Version = "" + }) + + it("errors", func() { + _, err := info.FullNameWithVersion() + h.AssertNotNil(t, err) + }) + }) + + when("not blank", func() { + it("prints ID and version", func() { + actual, err := info.FullNameWithVersion() + h.AssertNil(t, err) + h.AssertEq(t, actual, "some-id@some-version") + }) + }) + }) + }) + + when("#String", func() { + it("returns #FullName", func() { + info.Version = "" + h.AssertEq(t, info.String(), info.FullName()) + }) + }) + + when("#Match", func() { + when("IDs and versions match", func() { + it("returns true", func() { + other := dist.ModuleInfo{ + ID: "some-id", + Version: "some-version", + } + h.AssertEq(t, info.Match(other), true) + }) + }) + + when("only IDs match", func() { + it("returns false", func() { + other := dist.ModuleInfo{ + ID: "some-id", + Version: "some-other-version", + } + h.AssertEq(t, info.Match(other), false) + }) + }) + }) +} diff --git a/pkg/dist/buildpack.go b/pkg/dist/buildpack.go deleted file mode 100644 index 4042c717d7..0000000000 --- a/pkg/dist/buildpack.go +++ /dev/null @@ -1,39 +0,0 @@ -package dist - -const AssumedBuildpackAPIVersion = "0.1" -const BuildpacksDir = "/cnb/buildpacks" - -type BuildpackInfo struct { - ID string `toml:"id,omitempty" json:"id,omitempty" yaml:"id,omitempty"` - Name string `toml:"name,omitempty" json:"name,omitempty" yaml:"name,omitempty"` - Version string `toml:"version,omitempty" json:"version,omitempty" yaml:"version,omitempty"` - Description string `toml:"description,omitempty" json:"description,omitempty" yaml:"description,omitempty"` - Homepage string `toml:"homepage,omitempty" json:"homepage,omitempty" yaml:"homepage,omitempty"` - Keywords []string `toml:"keywords,omitempty" json:"keywords,omitempty" yaml:"keywords,omitempty"` - Licenses []License `toml:"licenses,omitempty" json:"licenses,omitempty" yaml:"licenses,omitempty"` -} - -func (b BuildpackInfo) FullName() string { - if b.Version != "" { - return b.ID + "@" + b.Version - } - return b.ID -} - -// Satisfy stringer -func (b BuildpackInfo) String() string { return b.FullName() } - -// Match compares two buildpacks by ID and Version -func (b BuildpackInfo) Match(o BuildpackInfo) bool { - return b.ID == o.ID && b.Version == o.Version -} - -type License struct { - Type string `toml:"type"` - URI string `toml:"uri"` -} - -type Stack struct { - ID string `json:"id" toml:"id"` - Mixins []string `json:"mixins,omitempty" toml:"mixins,omitempty"` -} diff --git a/pkg/dist/buildpack_descriptor.go b/pkg/dist/buildpack_descriptor.go index fea7f4816f..77ed9028d4 100644 --- a/pkg/dist/buildpack_descriptor.go +++ b/pkg/dist/buildpack_descriptor.go @@ -1,6 +1,7 @@ package dist import ( + "encoding/json" "fmt" "sort" "strings" @@ -12,19 +13,22 @@ import ( ) type BuildpackDescriptor struct { - API *api.Version `toml:"api"` - Info BuildpackInfo `toml:"buildpack"` - Stacks []Stack `toml:"stacks"` - Order Order `toml:"order"` + WithAPI *api.Version `toml:"api"` + WithInfo ModuleInfo `toml:"buildpack"` + WithStacks []Stack `toml:"stacks,omitempty"` + WithTargets []Target `toml:"targets,omitempty"` + WithOrder Order `toml:"order"` + WithWindowsBuild bool + WithLinuxBuild bool } func (b *BuildpackDescriptor) EscapedID() string { - return strings.ReplaceAll(b.Info.ID, "/", "_") + return strings.ReplaceAll(b.Info().ID, "/", "_") } func (b *BuildpackDescriptor) EnsureStackSupport(stackID string, providedMixins []string, validateRunStageMixins bool) error { - if len(b.Stacks) == 0 { - return nil // Order buildpack, no validation required + if len(b.Stacks()) == 0 { + return nil // Order buildpack or a buildpack using Targets, no validation required } bpMixins, err := b.findMixinsForStack(stackID) @@ -45,16 +49,93 @@ func (b *BuildpackDescriptor) EnsureStackSupport(stackID string, providedMixins _, missing, _ := stringset.Compare(providedMixins, bpMixins) if len(missing) > 0 { sort.Strings(missing) - return fmt.Errorf("buildpack %s requires missing mixin(s): %s", style.Symbol(b.Info.FullName()), strings.Join(missing, ", ")) + return fmt.Errorf("buildpack %s requires missing mixin(s): %s", style.Symbol(b.Info().FullName()), strings.Join(missing, ", ")) } return nil } +func (b *BuildpackDescriptor) EnsureTargetSupport(givenOS, givenArch, givenDistroName, givenDistroVersion string) error { + if len(b.Targets()) == 0 { + if (!b.WithLinuxBuild && !b.WithWindowsBuild) || len(b.Stacks()) > 0 { // nolint + return nil // Order buildpack or stack buildpack, no validation required + } else if b.WithLinuxBuild && givenOS == DefaultTargetOSLinux && givenArch == DefaultTargetArch { + return nil + } else if b.WithWindowsBuild && givenOS == DefaultTargetOSWindows && givenArch == DefaultTargetArch { + return nil + } + } + for _, bpTarget := range b.Targets() { + if bpTarget.OS == givenOS { + if bpTarget.Arch == "" || givenArch == "" || bpTarget.Arch == givenArch { + if len(bpTarget.Distributions) == 0 || givenDistroName == "" || givenDistroVersion == "" { + return nil + } + for _, bpDistro := range bpTarget.Distributions { + if bpDistro.Name == givenDistroName && bpDistro.Version == givenDistroVersion { + return nil + } + } + } + } + } + type osDistribution struct { + Name string `json:"name,omitempty"` + Version string `json:"version,omitempty"` + } + type target struct { + OS string `json:"os"` + Arch string `json:"arch"` + Distribution osDistribution `json:"distribution"` + } + return fmt.Errorf( + "unable to satisfy target os/arch constraints; build image: %s, buildpack %s: %s", + toJSONMaybe(target{ + OS: givenOS, + Arch: givenArch, + Distribution: osDistribution{Name: givenDistroName, Version: givenDistroVersion}, + }), + style.Symbol(b.Info().FullName()), + toJSONMaybe(b.Targets()), + ) +} + +func toJSONMaybe(v interface{}) string { + b, err := json.Marshal(v) + if err != nil { + return fmt.Sprintf("%s", v) // hopefully v is a Stringer + } + return string(b) +} + +func (b *BuildpackDescriptor) Kind() string { + return "buildpack" +} + +func (b *BuildpackDescriptor) API() *api.Version { + return b.WithAPI +} + +func (b *BuildpackDescriptor) Info() ModuleInfo { + return b.WithInfo +} + +func (b *BuildpackDescriptor) Order() Order { + return b.WithOrder +} + +func (b *BuildpackDescriptor) Stacks() []Stack { + return b.WithStacks +} + +func (b *BuildpackDescriptor) Targets() []Target { + return b.WithTargets +} + func (b *BuildpackDescriptor) findMixinsForStack(stackID string) ([]string, error) { - for _, s := range b.Stacks { + for _, s := range b.Stacks() { if s.ID == stackID || s.ID == "*" { return s.Mixins, nil } } - return nil, fmt.Errorf("buildpack %s does not support stack %s", style.Symbol(b.Info.FullName()), style.Symbol(stackID)) + return nil, fmt.Errorf("buildpack %s does not support stack %s", style.Symbol(b.Info().FullName()), style.Symbol(stackID)) } diff --git a/pkg/dist/buildpack_descriptor_test.go b/pkg/dist/buildpack_descriptor_test.go index c45898c2b6..99f2e91124 100644 --- a/pkg/dist/buildpack_descriptor_test.go +++ b/pkg/dist/buildpack_descriptor_test.go @@ -3,10 +3,12 @@ package dist_test import ( "testing" + "github.com/buildpacks/lifecycle/api" "github.com/heroku/color" "github.com/sclevine/spec" "github.com/sclevine/spec/report" + "github.com/buildpacks/pack/pkg/buildpack" "github.com/buildpacks/pack/pkg/dist" h "github.com/buildpacks/pack/testhelpers" ) @@ -18,15 +20,24 @@ func TestBuildpackDescriptor(t *testing.T) { } func testBuildpackDescriptor(t *testing.T, when spec.G, it spec.S) { + when("#EscapedID", func() { + it("returns escaped ID", func() { + bpDesc := dist.BuildpackDescriptor{ + WithInfo: dist.ModuleInfo{ID: "some/id"}, + } + h.AssertEq(t, bpDesc.EscapedID(), "some_id") + }) + }) + when("#EnsureStackSupport", func() { when("not validating against run image mixins", func() { it("ignores run-only mixins", func() { bp := dist.BuildpackDescriptor{ - Info: dist.BuildpackInfo{ + WithInfo: dist.ModuleInfo{ ID: "some.buildpack.id", Version: "some.buildpack.version", }, - Stacks: []dist.Stack{{ + WithStacks: []dist.Stack{{ ID: "some.stack.id", Mixins: []string{"mixinA", "build:mixinB", "run:mixinD"}, }}, @@ -38,11 +49,11 @@ func testBuildpackDescriptor(t *testing.T, when spec.G, it spec.S) { it("works with wildcard stack", func() { bp := dist.BuildpackDescriptor{ - Info: dist.BuildpackInfo{ + WithInfo: dist.ModuleInfo{ ID: "some.buildpack.id", Version: "some.buildpack.version", }, - Stacks: []dist.Stack{{ + WithStacks: []dist.Stack{{ ID: "*", Mixins: []string{"mixinA", "build:mixinB", "run:mixinD"}, }}, @@ -54,11 +65,11 @@ func testBuildpackDescriptor(t *testing.T, when spec.G, it spec.S) { it("returns an error with any missing (and non-ignored) mixins", func() { bp := dist.BuildpackDescriptor{ - Info: dist.BuildpackInfo{ + WithInfo: dist.ModuleInfo{ ID: "some.buildpack.id", Version: "some.buildpack.version", }, - Stacks: []dist.Stack{{ + WithStacks: []dist.Stack{{ ID: "some.stack.id", Mixins: []string{"mixinX", "mixinY", "run:mixinZ"}, }}, @@ -74,11 +85,11 @@ func testBuildpackDescriptor(t *testing.T, when spec.G, it spec.S) { when("validating against run image mixins", func() { it("requires run-only mixins", func() { bp := dist.BuildpackDescriptor{ - Info: dist.BuildpackInfo{ + WithInfo: dist.ModuleInfo{ ID: "some.buildpack.id", Version: "some.buildpack.version", }, - Stacks: []dist.Stack{{ + WithStacks: []dist.Stack{{ ID: "some.stack.id", Mixins: []string{"mixinA", "build:mixinB", "run:mixinD"}, }}, @@ -91,11 +102,11 @@ func testBuildpackDescriptor(t *testing.T, when spec.G, it spec.S) { it("returns an error with any missing mixins", func() { bp := dist.BuildpackDescriptor{ - Info: dist.BuildpackInfo{ + WithInfo: dist.ModuleInfo{ ID: "some.buildpack.id", Version: "some.buildpack.version", }, - Stacks: []dist.Stack{{ + WithStacks: []dist.Stack{{ ID: "some.stack.id", Mixins: []string{"mixinX", "mixinY", "run:mixinZ"}, }}, @@ -111,11 +122,11 @@ func testBuildpackDescriptor(t *testing.T, when spec.G, it spec.S) { it("returns an error when buildpack does not support stack", func() { bp := dist.BuildpackDescriptor{ - Info: dist.BuildpackInfo{ + WithInfo: dist.ModuleInfo{ ID: "some.buildpack.id", Version: "some.buildpack.version", }, - Stacks: []dist.Stack{{ + WithStacks: []dist.Stack{{ ID: "some.stack.id", Mixins: []string{"mixinX", "mixinY"}, }}, @@ -128,14 +139,200 @@ func testBuildpackDescriptor(t *testing.T, when spec.G, it spec.S) { it("skips validating order buildpack", func() { bp := dist.BuildpackDescriptor{ - Info: dist.BuildpackInfo{ + WithInfo: dist.ModuleInfo{ ID: "some.buildpack.id", Version: "some.buildpack.version", }, - Stacks: []dist.Stack{}, + WithStacks: []dist.Stack{}, } h.AssertNil(t, bp.EnsureStackSupport("some.stack.id", []string{"mixinA"}, true)) }) }) + + when("validating against run image target", func() { + it("succeeds with no distribution", func() { + bp := dist.BuildpackDescriptor{ + WithInfo: dist.ModuleInfo{ + ID: "some.buildpack.id", + Version: "some.buildpack.version", + }, + WithTargets: []dist.Target{{ + OS: "fake-os", + Arch: "fake-arch", + }}, + } + + h.AssertNil(t, bp.EnsureStackSupport("some.stack.id", []string{}, true)) + h.AssertNil(t, bp.EnsureTargetSupport("fake-os", "fake-arch", "fake-distro", "0.0")) + }) + + it("succeeds with no target and bin/build.exe", func() { + bp := dist.BuildpackDescriptor{ + WithInfo: dist.ModuleInfo{ + ID: "some.buildpack.id", + Version: "some.buildpack.version", + }, + WithWindowsBuild: true, + } + + h.AssertNil(t, bp.EnsureStackSupport("some.stack.id", []string{}, true)) + h.AssertNil(t, bp.EnsureTargetSupport("windows", "amd64", "fake-distro", "0.0")) + }) + + it("succeeds with no target and bin/build", func() { + bp := dist.BuildpackDescriptor{ + WithInfo: dist.ModuleInfo{ + ID: "some.buildpack.id", + Version: "some.buildpack.version", + }, + WithLinuxBuild: true, + } + + h.AssertNil(t, bp.EnsureStackSupport("some.stack.id", []string{}, true)) + h.AssertNil(t, bp.EnsureTargetSupport("linux", "amd64", "fake-distro", "0.0")) + }) + + it("returns an error when no match", func() { + bp := dist.BuildpackDescriptor{ + WithInfo: dist.ModuleInfo{ + ID: "some.buildpack.id", + Version: "some.buildpack.version", + }, + WithTargets: []dist.Target{{ + OS: "fake-os", + Arch: "fake-arch", + }}, + } + + h.AssertNil(t, bp.EnsureStackSupport("some.stack.id", []string{}, true)) + h.AssertError(t, bp.EnsureTargetSupport("some-other-os", "fake-arch", "fake-distro", "0.0"), + `unable to satisfy target os/arch constraints; build image: {"os":"some-other-os","arch":"fake-arch","distribution":{"name":"fake-distro","version":"0.0"}}, buildpack 'some.buildpack.id@some.buildpack.version': [{"os":"fake-os","arch":"fake-arch"}]`) + }) + + it("succeeds with distribution", func() { + bp := dist.BuildpackDescriptor{ + WithInfo: dist.ModuleInfo{ + ID: "some.buildpack.id", + Version: "some.buildpack.version", + }, + WithTargets: []dist.Target{{ + OS: "fake-os", + Arch: "fake-arch", + Distributions: []dist.Distribution{ + { + Name: "fake-distro", + Version: "0.1", + }, + { + Name: "another-distro", + Version: "0.22", + }, + }, + }}, + } + + h.AssertNil(t, bp.EnsureStackSupport("some.stack.id", []string{}, true)) + h.AssertNil(t, bp.EnsureTargetSupport("fake-os", "fake-arch", "fake-distro", "0.1")) + }) + + it("returns an error when no distribution matches", func() { + bp := dist.BuildpackDescriptor{ + WithInfo: dist.ModuleInfo{ + ID: "some.buildpack.id", + Version: "some.buildpack.version", + }, + WithTargets: []dist.Target{{ + OS: "fake-os", + Arch: "fake-arch", + Distributions: []dist.Distribution{ + { + Name: "fake-distro", + Version: "0.1", + }, + { + Name: "another-distro", + Version: "0.22", + }, + }, + }}, + } + + h.AssertNil(t, bp.EnsureStackSupport("some.stack.id", []string{}, true)) + h.AssertError(t, bp.EnsureTargetSupport("some-other-os", "fake-arch", "fake-distro", "0.0"), + `unable to satisfy target os/arch constraints; build image: {"os":"some-other-os","arch":"fake-arch","distribution":{"name":"fake-distro","version":"0.0"}}, buildpack 'some.buildpack.id@some.buildpack.version': [{"os":"fake-os","arch":"fake-arch","distros":[{"name":"fake-distro","version":"0.1"},{"name":"another-distro","version":"0.22"}]}]`) + }) + + it("succeeds with missing arch", func() { + bp := dist.BuildpackDescriptor{ + WithInfo: dist.ModuleInfo{ + ID: "some.buildpack.id", + Version: "some.buildpack.version", + }, + WithTargets: []dist.Target{{ + OS: "fake-os", + }}, + } + + h.AssertNil(t, bp.EnsureTargetSupport("fake-os", "fake-arch", "fake-distro", "0.1")) + }) + }) + + when("#Kind", func() { + it("returns 'buildpack'", func() { + bpDesc := dist.BuildpackDescriptor{} + h.AssertEq(t, bpDesc.Kind(), buildpack.KindBuildpack) + }) + }) + + when("#API", func() { + it("returns the api", func() { + bpDesc := dist.BuildpackDescriptor{ + WithAPI: api.MustParse("0.99"), + } + h.AssertEq(t, bpDesc.API().String(), "0.99") + }) + }) + + when("#Info", func() { + it("returns the module info", func() { + info := dist.ModuleInfo{ + ID: "some-id", + Name: "some-name", + Version: "some-version", + } + bpDesc := dist.BuildpackDescriptor{ + WithInfo: info, + } + h.AssertEq(t, bpDesc.Info(), info) + }) + }) + + when("#Order", func() { + it("returns the order", func() { + order := dist.Order{ + dist.OrderEntry{Group: []dist.ModuleRef{ + {ModuleInfo: dist.ModuleInfo{ + ID: "some-id", Name: "some-name", Version: "some-version", + }}, + }}, + } + bpDesc := dist.BuildpackDescriptor{ + WithOrder: order, + } + h.AssertEq(t, bpDesc.Order(), order) + }) + }) + + when("#Stacks", func() { + it("returns the stacks", func() { + stacks := []dist.Stack{ + {ID: "some-id", Mixins: []string{"some-mixin"}}, + } + bpDesc := dist.BuildpackDescriptor{ + WithStacks: stacks, + } + h.AssertEq(t, bpDesc.Stacks(), stacks) + }) + }) } diff --git a/pkg/dist/dist.go b/pkg/dist/dist.go index 6ccee5bcc9..fa2ea2488b 100644 --- a/pkg/dist/dist.go +++ b/pkg/dist/dist.go @@ -4,7 +4,14 @@ import ( "github.com/buildpacks/lifecycle/api" ) -const BuildpackLayersLabel = "io.buildpacks.buildpack.layers" +const ( + BuildpackLayersLabel = "io.buildpacks.buildpack.layers" + ExtensionLayersLabel = "io.buildpacks.extension.layers" + ExtensionMetadataLabel = "io.buildpacks.extension.metadata" + DefaultTargetOSLinux = "linux" + DefaultTargetOSWindows = "windows" + DefaultTargetArch = "amd64" +) type BuildpackURI struct { URI string `toml:"uri"` @@ -20,11 +27,11 @@ type ImageOrURI struct { } func (c *ImageOrURI) DisplayString() string { - if c.BuildpackURI.URI != "" { - return c.BuildpackURI.URI + if c.URI != "" { + return c.URI } - return c.ImageRef.ImageName + return c.ImageName } type Platform struct { @@ -34,29 +41,39 @@ type Platform struct { type Order []OrderEntry type OrderEntry struct { - Group []BuildpackRef `toml:"group" json:"group"` + Group []ModuleRef `toml:"group" json:"group"` +} + +type System struct { + Pre SystemBuildpacks `toml:"pre,omitempty" json:"pre,omitempty"` + Post SystemBuildpacks `toml:"post,omitempty" json:"post,omitempty"` +} + +type SystemBuildpacks struct { + Buildpacks []ModuleRef `toml:"buildpacks,omitempty" json:"buildpacks,omitempty"` } -type BuildpackRef struct { - BuildpackInfo `yaml:"buildpackinfo,inline"` - Optional bool `toml:"optional,omitempty" json:"optional,omitempty" yaml:"optional,omitempty"` +type ModuleRef struct { + ModuleInfo `yaml:"buildpackinfo,inline"` + Optional bool `toml:"optional,omitempty" json:"optional,omitempty" yaml:"optional,omitempty"` } -type BuildpackLayers map[string]map[string]BuildpackLayerInfo +type ModuleLayers map[string]map[string]ModuleLayerInfo -type BuildpackLayerInfo struct { +type ModuleLayerInfo struct { API *api.Version `json:"api"` Stacks []Stack `json:"stacks,omitempty"` + Targets []Target `json:"targets,omitempty"` Order Order `json:"order,omitempty"` LayerDiffID string `json:"layerDiffID"` Homepage string `json:"homepage,omitempty"` Name string `json:"name,omitempty"` } -func (b BuildpackLayers) Get(id, version string) (BuildpackLayerInfo, bool) { +func (b ModuleLayers) Get(id, version string) (ModuleLayerInfo, bool) { buildpackLayerEntries, ok := b[id] if !ok { - return BuildpackLayerInfo{}, false + return ModuleLayerInfo{}, false } if len(buildpackLayerEntries) == 1 && version == "" { for key := range buildpackLayerEntries { diff --git a/pkg/dist/dist_test.go b/pkg/dist/dist_test.go index e8cb94fdf4..1dfc7be77a 100644 --- a/pkg/dist/dist_test.go +++ b/pkg/dist/dist_test.go @@ -19,10 +19,10 @@ func TestDist(t *testing.T) { } func testDist(t *testing.T, when spec.G, it spec.S) { - when("BuildpackLayers", func() { + when("ModuleLayers", func() { when("Get", func() { var ( - buildpackLayers dist.BuildpackLayers + buildpackLayers dist.ModuleLayers apiVersion *api.Version ) it.Before(func() { @@ -30,7 +30,7 @@ func testDist(t *testing.T, when spec.G, it spec.S) { apiVersion, err = api.NewVersion("0.0") h.AssertNil(t, err) - buildpackLayers = dist.BuildpackLayers{ + buildpackLayers = dist.ModuleLayers{ "buildpack": { "version1": { API: apiVersion, @@ -54,7 +54,7 @@ func testDist(t *testing.T, when spec.G, it spec.S) { it("succeeds", func() { out, ok := buildpackLayers.Get("buildpack", "version1") h.AssertEq(t, ok, true) - h.AssertEq(t, out, dist.BuildpackLayerInfo{ + h.AssertEq(t, out, dist.ModuleLayerInfo{ API: apiVersion, LayerDiffID: "buildpack-v1-diff", }) @@ -65,7 +65,7 @@ func testDist(t *testing.T, when spec.G, it spec.S) { it("succeeds", func() { out, ok := buildpackLayers.Get("buildpack", "") h.AssertEq(t, ok, true) - h.AssertEq(t, out, dist.BuildpackLayerInfo{ + h.AssertEq(t, out, dist.ModuleLayerInfo{ API: apiVersion, LayerDiffID: "buildpack-v1-diff", }) @@ -86,16 +86,48 @@ func testDist(t *testing.T, when spec.G, it spec.S) { }) }) }) + when("Add", func() { when("a new buildpack is added", func() { it("succeeds", func() { - layers := dist.BuildpackLayers{} + layers := dist.ModuleLayers{} apiVersion, _ := api.NewVersion("0.0") - descriptor := dist.BuildpackDescriptor{API: apiVersion, Info: dist.BuildpackInfo{ID: "test", Name: "test", Version: "1.0"}} - dist.AddBuildpackToLayersMD(layers, descriptor, "") - layerInfo, ok := layers.Get(descriptor.Info.ID, descriptor.Info.Version) + descriptor := dist.BuildpackDescriptor{WithAPI: apiVersion, WithInfo: dist.ModuleInfo{ID: "test", Name: "test", Version: "1.0"}} + dist.AddToLayersMD(layers, &descriptor, "") + layerInfo, ok := layers.Get(descriptor.Info().ID, descriptor.Info().Version) h.AssertEq(t, ok, true) - h.AssertEq(t, layerInfo.Name, descriptor.Info.Name) + h.AssertEq(t, layerInfo.Name, descriptor.Info().Name) + }) + }) + }) + }) + + when("ImageOrURI", func() { + when("DisplayString", func() { + when("uri", func() { + when("blank", func() { + it("returns image", func() { + toTest := dist.ImageOrURI{ + ImageRef: dist.ImageRef{ + ImageName: "some-image-name", + }, + } + h.AssertEq(t, toTest.DisplayString(), "some-image-name") + }) + }) + + when("not blank", func() { + it("returns uri", func() { + toTest := dist.ImageOrURI{ + BuildpackURI: dist.BuildpackURI{ + URI: "some-uri", + }, + ImageRef: dist.ImageRef{ + ImageName: "some-image-name", + }, + } + h.AssertEq(t, toTest.DisplayString(), "some-uri") + }) }) }) }) diff --git a/pkg/dist/extension_descriptor.go b/pkg/dist/extension_descriptor.go new file mode 100644 index 0000000000..38b3e99a6a --- /dev/null +++ b/pkg/dist/extension_descriptor.go @@ -0,0 +1,49 @@ +package dist + +import ( + "strings" + + "github.com/buildpacks/lifecycle/api" +) + +type ExtensionDescriptor struct { + WithAPI *api.Version `toml:"api"` + WithInfo ModuleInfo `toml:"extension"` + WithTargets []Target `toml:"targets,omitempty"` +} + +func (e *ExtensionDescriptor) EnsureStackSupport(_ string, _ []string, _ bool) error { + return nil +} + +func (e *ExtensionDescriptor) EnsureTargetSupport(_, _, _, _ string) error { + return nil +} + +func (e *ExtensionDescriptor) EscapedID() string { + return strings.ReplaceAll(e.Info().ID, "/", "_") +} + +func (e *ExtensionDescriptor) Kind() string { + return "extension" +} + +func (e *ExtensionDescriptor) API() *api.Version { + return e.WithAPI +} + +func (e *ExtensionDescriptor) Info() ModuleInfo { + return e.WithInfo +} + +func (e *ExtensionDescriptor) Order() Order { + return nil +} + +func (e *ExtensionDescriptor) Stacks() []Stack { + return nil +} + +func (e *ExtensionDescriptor) Targets() []Target { + return e.WithTargets +} diff --git a/pkg/dist/extension_descriptor_test.go b/pkg/dist/extension_descriptor_test.go new file mode 100644 index 0000000000..b8817a3a99 --- /dev/null +++ b/pkg/dist/extension_descriptor_test.go @@ -0,0 +1,90 @@ +package dist_test + +import ( + "testing" + + "github.com/buildpacks/lifecycle/api" + "github.com/heroku/color" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + "github.com/buildpacks/pack/pkg/buildpack" + "github.com/buildpacks/pack/pkg/dist" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestExtensionDescriptor(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "testExtensionDescriptor", testExtensionDescriptor, spec.Parallel(), spec.Report(report.Terminal{})) +} + +func testExtensionDescriptor(t *testing.T, when spec.G, it spec.S) { + when("#EscapedID", func() { + it("returns escaped ID", func() { + extDesc := dist.ExtensionDescriptor{ + WithInfo: dist.ModuleInfo{ID: "some/id"}, + } + h.AssertEq(t, extDesc.EscapedID(), "some_id") + }) + }) + + when("#Kind", func() { + it("returns 'extension'", func() { + extDesc := dist.ExtensionDescriptor{} + h.AssertEq(t, extDesc.Kind(), buildpack.KindExtension) + }) + }) + + when("#API", func() { + it("returns the api", func() { + extDesc := dist.ExtensionDescriptor{ + WithAPI: api.MustParse("0.99"), + } + h.AssertEq(t, extDesc.API().String(), "0.99") + }) + }) + + when("#Info", func() { + it("returns the module info", func() { + info := dist.ModuleInfo{ + ID: "some-id", + Name: "some-name", + Version: "some-version", + } + extDesc := dist.ExtensionDescriptor{ + WithInfo: info, + } + h.AssertEq(t, extDesc.Info(), info) + }) + }) + + when("#Order", func() { + it("returns empty", func() { + var empty dist.Order + extDesc := dist.ExtensionDescriptor{} + h.AssertEq(t, extDesc.Order(), empty) + }) + }) + + when("#Stacks", func() { + it("returns empty", func() { + var empty []dist.Stack + extDesc := dist.ExtensionDescriptor{} + h.AssertEq(t, extDesc.Stacks(), empty) + }) + }) + + when("#Targets", func() { + it("returns the api", func() { + targets := []dist.Target{{ + OS: "fake-os", + Arch: "fake-arch", + }} + extDesc := dist.ExtensionDescriptor{ + WithTargets: targets, + } + h.AssertEq(t, extDesc.Targets(), targets) + }) + }) +} diff --git a/pkg/dist/image_test.go b/pkg/dist/image_test.go new file mode 100644 index 0000000000..a971df7cca --- /dev/null +++ b/pkg/dist/image_test.go @@ -0,0 +1,59 @@ +package dist_test + +import ( + "testing" + + "github.com/heroku/color" + "github.com/pkg/errors" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + "github.com/buildpacks/pack/internal/builder/fakes" + "github.com/buildpacks/pack/pkg/dist" + h "github.com/buildpacks/pack/testhelpers" +) + +func TestImage(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "testImage", testImage, spec.Parallel(), spec.Report(report.Terminal{})) +} + +func testImage(t *testing.T, when spec.G, it spec.S) { + when("A label needs to be get", func() { + it("sets a label successfully", func() { + var outputLabel bool + mockInspectable := fakes.FakeInspectable{ReturnForLabel: "true", ErrorForLabel: nil} + + isPresent, err := dist.GetLabel(&mockInspectable, "random-label", &outputLabel) + + h.AssertNil(t, err) + h.AssertEq(t, isPresent, true) + h.AssertEq(t, outputLabel, true) + }) + + it("returns an error", func() { + var outputLabel bool + mockInspectable := fakes.FakeInspectable{ReturnForLabel: "", ErrorForLabel: errors.New("random-error")} + + isPresent, err := dist.GetLabel(&mockInspectable, "random-label", &outputLabel) + + h.AssertNotNil(t, err) + h.AssertEq(t, isPresent, false) + h.AssertEq(t, outputLabel, false) + }) + }) + + when("Try to get an empty label", func() { + it("returns isPresent but it doesn't set the label", func() { + var outputLabel bool + mockInspectable := fakes.FakeInspectable{ReturnForLabel: "", ErrorForLabel: nil} + + isPresent, err := dist.GetLabel(&mockInspectable, "random-label", &outputLabel) + + h.AssertNil(t, err) + h.AssertEq(t, isPresent, false) + h.AssertEq(t, outputLabel, false) + }) + }) +} diff --git a/pkg/dist/layers.go b/pkg/dist/layers.go index 873f86ca4f..590454980e 100644 --- a/pkg/dist/layers.go +++ b/pkg/dist/layers.go @@ -4,11 +4,20 @@ import ( "os" "path/filepath" + "github.com/buildpacks/lifecycle/api" v1 "github.com/google/go-containerregistry/pkg/v1" "github.com/google/go-containerregistry/pkg/v1/tarball" "github.com/pkg/errors" ) +type Descriptor interface { + API() *api.Version + Info() ModuleInfo + Order() Order + Stacks() []Stack + Targets() []Target +} + func LayerDiffID(layerTarPath string) (v1.Hash, error) { fh, err := os.Open(filepath.Clean(layerTarPath)) if err != nil { @@ -29,17 +38,18 @@ func LayerDiffID(layerTarPath string) (v1.Hash, error) { return hash, nil } -func AddBuildpackToLayersMD(layerMD BuildpackLayers, descriptor BuildpackDescriptor, diffID string) { - bpInfo := descriptor.Info - if _, ok := layerMD[bpInfo.ID]; !ok { - layerMD[bpInfo.ID] = map[string]BuildpackLayerInfo{} +func AddToLayersMD(layerMD ModuleLayers, descriptor Descriptor, diffID string) { + info := descriptor.Info() + if _, ok := layerMD[info.ID]; !ok { + layerMD[info.ID] = map[string]ModuleLayerInfo{} } - layerMD[bpInfo.ID][bpInfo.Version] = BuildpackLayerInfo{ - API: descriptor.API, - Stacks: descriptor.Stacks, - Order: descriptor.Order, + layerMD[info.ID][info.Version] = ModuleLayerInfo{ + API: descriptor.API(), + Stacks: descriptor.Stacks(), + Targets: descriptor.Targets(), + Order: descriptor.Order(), LayerDiffID: diffID, - Homepage: bpInfo.Homepage, - Name: bpInfo.Name, + Homepage: info.Homepage, + Name: info.Name, } } diff --git a/pkg/image/fetcher.go b/pkg/image/fetcher.go index 4ce9f2e81a..f325a30ca8 100644 --- a/pkg/image/fetcher.go +++ b/pkg/image/fetcher.go @@ -4,22 +4,28 @@ import ( "context" "encoding/base64" "encoding/json" + "fmt" "io" "strings" + "github.com/buildpacks/imgutil/layout" + "github.com/buildpacks/imgutil/layout/sparse" + cerrdefs "github.com/containerd/errdefs" + "github.com/buildpacks/imgutil" "github.com/buildpacks/imgutil/local" "github.com/buildpacks/imgutil/remote" "github.com/buildpacks/lifecycle/auth" - "github.com/docker/docker/api/types" - "github.com/docker/docker/client" "github.com/docker/docker/pkg/jsonmessage" "github.com/google/go-containerregistry/pkg/authn" + "github.com/moby/moby/client" + ocispec "github.com/opencontainers/image-spec/specs-go/v1" "github.com/pkg/errors" pname "github.com/buildpacks/pack/internal/name" "github.com/buildpacks/pack/internal/style" "github.com/buildpacks/pack/internal/term" + "github.com/buildpacks/pack/pkg/dist" "github.com/buildpacks/pack/pkg/logging" ) @@ -27,6 +33,11 @@ import ( // Values in these functions are set through currying. type FetcherOption func(c *Fetcher) +type LayoutOption struct { + Path string + Sparse bool +} + // WithRegistryMirrors supply your own mirrors for registry. func WithRegistryMirrors(registryMirrors map[string]string) FetcherOption { return func(c *Fetcher) { @@ -40,20 +51,27 @@ func WithKeychain(keychain authn.Keychain) FetcherOption { } } +type DockerClient interface { + local.DockerClient + ImagePull(ctx context.Context, ref string, options client.ImagePullOptions) (client.ImagePullResponse, error) +} + type Fetcher struct { - docker client.CommonAPIClient + docker DockerClient logger logging.Logger registryMirrors map[string]string keychain authn.Keychain } type FetchOptions struct { - Daemon bool - Platform string - PullPolicy PullPolicy + Daemon bool + Target *dist.Target + PullPolicy PullPolicy + LayoutOption LayoutOption + InsecureRegistries []string } -func NewFetcher(logger logging.Logger, docker client.CommonAPIClient, opts ...FetcherOption) *Fetcher { +func NewFetcher(logger logging.Logger, docker DockerClient, opts ...FetcherOption) *Fetcher { fetcher := &Fetcher{ logger: logger, docker: docker, @@ -75,8 +93,12 @@ func (f *Fetcher) Fetch(ctx context.Context, name string, options FetchOptions) return nil, err } + if (options.LayoutOption != LayoutOption{}) { + return f.fetchLayoutImage(name, options.LayoutOption) + } + if !options.Daemon { - return f.fetchRemoteImage(name) + return f.fetchRemoteImage(name, options.Target, options.InsecureRegistries) } switch options.PullPolicy { @@ -90,8 +112,22 @@ func (f *Fetcher) Fetch(ctx context.Context, name string, options FetchOptions) } } - f.logger.Debugf("Pulling image %s", style.Symbol(name)) - err = f.pullImage(ctx, name, options.Platform) + msg := fmt.Sprintf("Pulling image %s", style.Symbol(name)) + if options.Target != nil { + msg = fmt.Sprintf("Pulling image %s with platform %s", style.Symbol(name), style.Symbol(options.Target.ValuesAsPlatform())) + } + f.logger.Debug(msg) + if err = f.pullImage(ctx, name, options.Target); err != nil { + // FIXME: this matching is brittle and the fallback should be removed when https://github.com/buildpacks/pack/issues/2079 + // has been fixed for a sufficient amount of time. + // Sample error from docker engine: + // `image with reference was found but does not match the specified platform: wanted linux/amd64, actual: linux` or + // `image with reference was found but its platform (linux) does not match the specified platform (linux/amd64)` + if strings.Contains(err.Error(), "does not match the specified platform") { + f.logger.Debugf(fmt.Sprintf("Pulling image %s", style.Symbol(name))) + err = f.pullImage(ctx, name, nil) + } + } if err != nil && !errors.Is(err, ErrNotFound) { return nil, err } @@ -99,6 +135,41 @@ func (f *Fetcher) Fetch(ctx context.Context, name string, options FetchOptions) return f.fetchDaemonImage(name) } +func (f *Fetcher) CheckReadAccess(repo string, options FetchOptions) bool { + if !options.Daemon || options.PullPolicy == PullAlways { + return f.checkRemoteReadAccess(repo) + } + if _, err := f.fetchDaemonImage(repo); err != nil { + if errors.Is(err, ErrNotFound) { + // Image doesn't exist in the daemon + // Pull Never: should fail + // Pull If Not Present: need to check the registry + if options.PullPolicy == PullNever { + return false + } + return f.checkRemoteReadAccess(repo) + } + f.logger.Debugf("failed reading image '%s' from the daemon, error: %s", repo, err.Error()) + return false + } + return true +} + +func (f *Fetcher) checkRemoteReadAccess(repo string) bool { + img, err := remote.NewImage(repo, f.keychain) + if err != nil { + f.logger.Debugf("failed accessing remote image %s, error: %s", repo, err.Error()) + return false + } + if ok, err := img.CheckReadAccess(); ok { + f.logger.Debugf("CheckReadAccess succeeded for the run image %s", repo) + return true + } else { + f.logger.Debugf("CheckReadAccess failed for the run image %s, error: %s", repo, err.Error()) + return false + } +} + func (f *Fetcher) fetchDaemonImage(name string) (imgutil.Image, error) { image, err := local.NewImage(name, f.docker, local.FromBaseImage(name)) if err != nil { @@ -112,8 +183,26 @@ func (f *Fetcher) fetchDaemonImage(name string) (imgutil.Image, error) { return image, nil } -func (f *Fetcher) fetchRemoteImage(name string) (imgutil.Image, error) { - image, err := remote.NewImage(name, f.keychain, remote.FromBaseImage(name)) +func (f *Fetcher) fetchRemoteImage(name string, target *dist.Target, insecureRegistries []string) (imgutil.Image, error) { + var ( + image imgutil.Image + options []imgutil.ImageOption + err error + ) + + if len(insecureRegistries) > 0 { + for _, registry := range insecureRegistries { + options = append(options, remote.WithRegistrySetting(registry, true)) + } + } + + if target == nil { + image, err = remote.NewImage(name, f.keychain, append(options, remote.FromBaseImage(name))...) + } else { + platform := imgutil.Platform{OS: target.OS, Architecture: target.Arch, Variant: target.ArchVariant} + image, err = remote.NewImage(name, f.keychain, append(append(options, remote.FromBaseImage(name)), remote.WithDefaultPlatform(platform))...) + } + if err != nil { return nil, err } @@ -125,15 +214,99 @@ func (f *Fetcher) fetchRemoteImage(name string) (imgutil.Image, error) { return image, nil } -func (f *Fetcher) pullImage(ctx context.Context, imageID string, platform string) error { +func (f *Fetcher) fetchLayoutImage(name string, options LayoutOption) (imgutil.Image, error) { + var ( + image imgutil.Image + err error + ) + + v1Image, err := remote.NewV1Image(name, f.keychain) + if err != nil { + return nil, err + } + + if options.Sparse { + image, err = sparse.NewImage(options.Path, v1Image) + } else { + image, err = layout.NewImage(options.Path, layout.FromBaseImageInstance(v1Image)) + } + + if err != nil { + return nil, err + } + + err = image.Save() + if err != nil { + return nil, err + } + + return image, nil +} + +// FetchForPlatform fetches an image and resolves it to a platform-specific digest before fetching. +// This ensures that multi-platform images are always resolved to the correct platform-specific manifest. +func (f *Fetcher) FetchForPlatform(ctx context.Context, name string, options FetchOptions) (imgutil.Image, error) { + // If no target is specified, fall back to regular fetch + if options.Target == nil { + return f.Fetch(ctx, name, options) + } + + name, err := pname.TranslateRegistry(name, f.registryMirrors, f.logger) + if err != nil { + return nil, err + } + + platformStr := options.Target.ValuesAsPlatform() + + // When PullPolicy is PullNever, skip platform-specific digest resolution as it requires + // network access to fetch the manifest list. Instead, use the image as-is from the daemon. + // Note: This may cause issues with containerd storage. Users should pre-pull the platform-specific + // digest if they encounter errors. + if options.Daemon && options.PullPolicy == PullNever { + f.logger.Debugf("Using lifecycle %s with platform %s (skipping digest resolution due to --pull-policy never)", name, platformStr) + return f.Fetch(ctx, name, options) + } + + // Build platform and registry settings from options + platform := imgutil.Platform{ + OS: options.Target.OS, + Architecture: options.Target.Arch, + Variant: options.Target.ArchVariant, + } + registrySettings := make(map[string]imgutil.RegistrySetting) + for _, registry := range options.InsecureRegistries { + registrySettings[registry] = imgutil.RegistrySetting{Insecure: true} + } + + // Resolve to platform-specific digest + resolvedName, err := resolvePlatformSpecificDigest(name, &platform, f.keychain, registrySettings) + if err != nil { + return nil, errors.Wrapf(err, "resolving image %s to platform-specific digest", style.Symbol(name)) + } + + // Log the resolution for visibility + f.logger.Debugf("Using lifecycle %s; pulling digest %s for platform %s", name, resolvedName, platformStr) + + return f.Fetch(ctx, resolvedName, options) +} + +func (f *Fetcher) pullImage(ctx context.Context, imageID string, target *dist.Target) error { regAuth, err := f.registryAuth(imageID) if err != nil { return err } - rc, err := f.docker.ImagePull(ctx, imageID, types.ImagePullOptions{RegistryAuth: regAuth, Platform: platform}) + pullOpts := client.ImagePullOptions{RegistryAuth: regAuth} + if target != nil { + pullOpts.Platforms = []ocispec.Platform{{ + OS: target.OS, + Architecture: target.Arch, + Variant: target.ArchVariant, + }} + } + pullResult, err := f.docker.ImagePull(ctx, imageID, pullOpts) if err != nil { - if client.IsErrNotFound(err) { + if cerrdefs.IsNotFound(err) { return errors.Wrapf(ErrNotFound, "image %s does not exist on the daemon", style.Symbol(imageID)) } @@ -143,12 +316,12 @@ func (f *Fetcher) pullImage(ctx context.Context, imageID string, platform string writer := logging.GetWriterForLevel(f.logger, logging.InfoLevel) termFd, isTerm := term.IsTerminal(writer) - err = jsonmessage.DisplayJSONMessagesStream(rc, &colorizedWriter{writer}, termFd, isTerm, nil) + err = jsonmessage.DisplayJSONMessagesStream(pullResult, &colorizedWriter{writer}, termFd, isTerm, nil) if err != nil { return err } - return rc.Close() + return pullResult.Close() } func (f *Fetcher) registryAuth(ref string) (string, error) { @@ -193,3 +366,5 @@ func (w *colorizedWriter) Write(p []byte) (n int, err error) { } return w.writer.Write([]byte(msg)) } + +// WrapDockerClient wraps a moby docker client to match our DockerClient interface diff --git a/pkg/image/fetcher_test.go b/pkg/image/fetcher_test.go index 4d4d4c0a34..ebe5f02f7f 100644 --- a/pkg/image/fetcher_test.go +++ b/pkg/image/fetcher_test.go @@ -4,30 +4,33 @@ import ( "bytes" "context" "fmt" - "math/rand" "os" + "path/filepath" + "runtime" "testing" - "time" + "github.com/buildpacks/imgutil" "github.com/buildpacks/imgutil/local" "github.com/buildpacks/imgutil/remote" - "github.com/docker/docker/client" + "github.com/golang/mock/gomock" "github.com/google/go-containerregistry/pkg/authn" "github.com/heroku/color" + "github.com/moby/moby/client" + "github.com/pkg/errors" "github.com/sclevine/spec" "github.com/sclevine/spec/report" + "github.com/buildpacks/pack/pkg/dist" "github.com/buildpacks/pack/pkg/image" "github.com/buildpacks/pack/pkg/logging" + "github.com/buildpacks/pack/pkg/testmocks" h "github.com/buildpacks/pack/testhelpers" ) -var docker client.CommonAPIClient +var docker *client.Client var registryConfig *h.TestRegistryConfig func TestFetcher(t *testing.T) { - rand.Seed(time.Now().UTC().UnixNano()) - color.Disable(true) defer color.Disable(false) @@ -40,9 +43,9 @@ func TestFetcher(t *testing.T) { os.Setenv("DOCKER_CONFIG", registryConfig.DockerConfigDir) var err error - docker, err = client.NewClientWithOpts(client.FromEnv, client.WithVersion("1.38")) + docker, err = client.New(client.FromEnv) h.AssertNil(t, err) - spec.Run(t, "Fetcher", testFetcher, spec.Report(report.Terminal{})) + spec.Run(t, "Fetcher", testFetcher, spec.Parallel(), spec.Report(report.Terminal{})) } func testFetcher(t *testing.T, when spec.G, it spec.S) { @@ -51,28 +54,79 @@ func testFetcher(t *testing.T, when spec.G, it spec.S) { repoName string repo string outBuf bytes.Buffer + osType string ) it.Before(func() { repo = "some-org/" + h.RandString(10) repoName = registryConfig.RepoName(repo) - imageFetcher = image.NewFetcher(logging.NewLogWithWriters(&outBuf, &outBuf), docker) + imageFetcher = image.NewFetcher(logging.NewLogWithWriters(&outBuf, &outBuf, logging.WithVerbose()), docker) + + infoResult, err := docker.Info(context.TODO(), client.InfoOptions{}) + h.AssertNil(t, err) + osType = infoResult.Info.OSType }) when("#Fetch", func() { when("daemon is false", func() { when("PullAlways", func() { when("there is a remote image", func() { - it.Before(func() { - img, err := remote.NewImage(repoName, authn.DefaultKeychain) - h.AssertNil(t, err) + when("default platform", func() { + // default is linux/runtime.GOARCH + it.Before(func() { + img, err := remote.NewImage(repoName, authn.DefaultKeychain) + h.AssertNil(t, err) - h.AssertNil(t, img.Save()) + h.AssertNil(t, img.Save()) + }) + + it("returns the remote image", func() { + _, err := imageFetcher.Fetch(context.TODO(), repoName, image.FetchOptions{Daemon: false, PullPolicy: image.PullAlways}) + h.AssertNil(t, err) + }) + + it("returns the remote image when insecure registry", func() { + insecureRegistry := fmt.Sprintf("%s:%s", registryConfig.RunRegistryHost, registryConfig.RunRegistryPort) + _, err := imageFetcher.Fetch(context.TODO(), repoName, image.FetchOptions{Daemon: false, PullPolicy: image.PullAlways, InsecureRegistries: []string{insecureRegistry}}) + h.AssertNil(t, err) + }) }) - it("returns the remote image", func() { - _, err := imageFetcher.Fetch(context.TODO(), repoName, image.FetchOptions{Daemon: false, PullPolicy: image.PullAlways}) - h.AssertNil(t, err) + when("platform with variant and version", func() { + var target dist.Target + + // default is linux/runtime.GOARCH + it.Before(func() { + img, err := remote.NewImage(repoName, authn.DefaultKeychain, remote.WithDefaultPlatform(imgutil.Platform{ + OS: runtime.GOOS, + Architecture: runtime.GOARCH, + Variant: "v1", + OSVersion: "my-version", + })) + h.AssertNil(t, err) + h.AssertNil(t, img.Save()) + }) + + it("returns the remote image", func() { + target = dist.Target{ + OS: runtime.GOOS, + Arch: runtime.GOARCH, + ArchVariant: "v1", + Distributions: []dist.Distribution{ + {Name: "some-name", Version: "my-version"}, + }, + } + + img, err := imageFetcher.Fetch(context.TODO(), repoName, image.FetchOptions{Daemon: false, PullPolicy: image.PullAlways, Target: &target}) + h.AssertNil(t, err) + variant, err := img.Variant() + h.AssertNil(t, err) + h.AssertEq(t, variant, "v1") + + osVersion, err := img.OSVersion() + h.AssertNil(t, err) + h.AssertEq(t, osVersion, "my-version") + }) }) }) @@ -213,9 +267,22 @@ func testFetcher(t *testing.T, when spec.G, it spec.S) { when("image platform is specified", func() { it("passes the platform argument to the daemon", func() { - _, err := imageFetcher.Fetch(context.TODO(), repoName, image.FetchOptions{Daemon: true, PullPolicy: image.PullAlways, Platform: "some-unsupported-platform"}) + _, err := imageFetcher.Fetch(context.TODO(), repoName, image.FetchOptions{Daemon: true, PullPolicy: image.PullAlways, Target: &dist.Target{OS: "some-unsupported-platform"}}) h.AssertError(t, err, "unknown operating system or architecture") }) + + when("remote platform does not match", func() { + it.Before(func() { + img, err := remote.NewImage(repoName, authn.DefaultKeychain, remote.WithDefaultPlatform(imgutil.Platform{OS: osType, Architecture: ""})) + h.AssertNil(t, err) + h.AssertNil(t, img.Save()) + }) + + it("retries without setting platform", func() { + _, err := imageFetcher.Fetch(context.TODO(), repoName, image.FetchOptions{Daemon: true, PullPolicy: image.PullAlways, Target: &dist.Target{OS: osType, Arch: runtime.GOARCH}}) + h.AssertNil(t, err) + }) + }) }) }) @@ -319,11 +386,501 @@ func testFetcher(t *testing.T, when spec.G, it spec.S) { when("image platform is specified", func() { it("passes the platform argument to the daemon", func() { - _, err := imageFetcher.Fetch(context.TODO(), repoName, image.FetchOptions{Daemon: true, PullPolicy: image.PullIfNotPresent, Platform: "some-unsupported-platform"}) + _, err := imageFetcher.Fetch(context.TODO(), repoName, image.FetchOptions{Daemon: true, PullPolicy: image.PullIfNotPresent, Target: &dist.Target{OS: "some-unsupported-platform"}}) h.AssertError(t, err, "unknown operating system or architecture") }) }) }) }) + + when("layout option is provided", func() { + var ( + layoutOption image.LayoutOption + imagePath string + tmpDir string + err error + ) + + it.Before(func() { + // set up local layout repo + tmpDir, err = os.MkdirTemp("", "pack.fetcher.test") + h.AssertNil(t, err) + + // dummy layer to validate sparse behavior + tarDir := filepath.Join(tmpDir, "layer") + err = os.MkdirAll(tarDir, os.ModePerm) + h.AssertNil(t, err) + layerPath := h.CreateTAR(t, tarDir, ".", -1) + + // set up the remote image to be used + img, err := remote.NewImage(repoName, authn.DefaultKeychain) + img.AddLayer(layerPath) + h.AssertNil(t, err) + h.AssertNil(t, img.Save()) + + // set up layout options for the tests + imagePath = filepath.Join(tmpDir, repo) + layoutOption = image.LayoutOption{ + Path: imagePath, + Sparse: false, + } + }) + + it.After(func() { + err = os.RemoveAll(tmpDir) + h.AssertNil(t, err) + }) + + when("sparse is false", func() { + it("returns and layout image on disk", func() { + _, err := imageFetcher.Fetch(context.TODO(), repoName, image.FetchOptions{LayoutOption: layoutOption}) + h.AssertNil(t, err) + + // all layers were written + h.AssertBlobsLen(t, imagePath, 3) + }) + }) + + when("sparse is true", func() { + it("returns and layout image on disk", func() { + layoutOption.Sparse = true + _, err := imageFetcher.Fetch(context.TODO(), repoName, image.FetchOptions{LayoutOption: layoutOption}) + h.AssertNil(t, err) + + // only manifest and config was written + h.AssertBlobsLen(t, imagePath, 2) + }) + }) + }) + }) + + when("#FetchForPlatform", func() { + when("target is nil", func() { + it.Before(func() { + img, err := remote.NewImage(repoName, authn.DefaultKeychain) + h.AssertNil(t, err) + h.AssertNil(t, img.Save()) + }) + + it("delegates to regular Fetch method", func() { + fetchedImg, err := imageFetcher.FetchForPlatform(context.TODO(), repoName, image.FetchOptions{ + Daemon: false, + PullPolicy: image.PullAlways, + Target: nil, + }) + h.AssertNil(t, err) + h.AssertNotNil(t, fetchedImg) + }) + }) + + when("target is specified", func() { + when("multi-platform image", func() { + when("matching platform exists", func() { + it.Before(func() { + // Create a multi-platform image by creating an index + // For testing purposes, we'll create a single-platform image with the current architecture + img, err := remote.NewImage(repoName, authn.DefaultKeychain, remote.WithDefaultPlatform(imgutil.Platform{ + OS: runtime.GOOS, + Architecture: runtime.GOARCH, + })) + h.AssertNil(t, err) + h.AssertNil(t, img.Save()) + }) + + it("successfully fetches the platform-specific image", func() { + target := dist.Target{ + OS: runtime.GOOS, + Arch: runtime.GOARCH, + } + + fetchedImg, err := imageFetcher.FetchForPlatform(context.TODO(), repoName, image.FetchOptions{ + Daemon: false, + PullPolicy: image.PullAlways, + Target: &target, + }) + h.AssertNil(t, err) + h.AssertNotNil(t, fetchedImg) + + // Verify the platform matches + os, err := fetchedImg.OS() + h.AssertNil(t, err) + h.AssertEq(t, os, runtime.GOOS) + + arch, err := fetchedImg.Architecture() + h.AssertNil(t, err) + h.AssertEq(t, arch, runtime.GOARCH) + }) + }) + + when("true manifest list with multiple platforms", func() { + it.Before(func() { + // Create a random image index with platform annotations + h.SetUpRandomRemoteIndexWithPlatforms(t, repoName, []struct{ OS, Arch string }{ + {OS: "linux", Arch: "amd64"}, + {OS: "linux", Arch: "arm64"}, + }) + }) + + it("resolves to the correct platform-specific digest for amd64", func() { + target := dist.Target{ + OS: "linux", + Arch: "amd64", + } + + fetchedImg, err := imageFetcher.FetchForPlatform(context.TODO(), repoName, image.FetchOptions{ + Daemon: false, + PullPolicy: image.PullAlways, + Target: &target, + }) + h.AssertNil(t, err) + h.AssertNotNil(t, fetchedImg) + + // Verify the platform matches + arch, err := fetchedImg.Architecture() + h.AssertNil(t, err) + h.AssertEq(t, arch, "amd64") + + os, err := fetchedImg.OS() + h.AssertNil(t, err) + h.AssertEq(t, os, "linux") + }) + + it("resolves to the correct platform-specific digest for arm64", func() { + target := dist.Target{ + OS: "linux", + Arch: "arm64", + } + + fetchedImg, err := imageFetcher.FetchForPlatform(context.TODO(), repoName, image.FetchOptions{ + Daemon: false, + PullPolicy: image.PullAlways, + Target: &target, + }) + h.AssertNil(t, err) + h.AssertNotNil(t, fetchedImg) + + // Verify the platform matches + arch, err := fetchedImg.Architecture() + h.AssertNil(t, err) + h.AssertEq(t, arch, "arm64") + + os, err := fetchedImg.OS() + h.AssertNil(t, err) + h.AssertEq(t, os, "linux") + }) + }) + + when("matching platform does not exist", func() { + it.Before(func() { + // Create an image with a specific platform + img, err := remote.NewImage(repoName, authn.DefaultKeychain, remote.WithDefaultPlatform(imgutil.Platform{ + OS: runtime.GOOS, + Architecture: runtime.GOARCH, + })) + h.AssertNil(t, err) + h.AssertNil(t, img.Save()) + }) + + it("returns an error", func() { + // Request a different platform that doesn't exist + differentArch := "nonexistent-arch" + target := dist.Target{ + OS: runtime.GOOS, + Arch: differentArch, + } + + _, err := imageFetcher.FetchForPlatform(context.TODO(), repoName, image.FetchOptions{ + Daemon: false, + PullPolicy: image.PullAlways, + Target: &target, + }) + h.AssertError(t, err, "does not match requested platform") + }) + }) + }) + + when("single-platform image", func() { + when("platform matches", func() { + it.Before(func() { + img, err := remote.NewImage(repoName, authn.DefaultKeychain, remote.WithDefaultPlatform(imgutil.Platform{ + OS: runtime.GOOS, + Architecture: runtime.GOARCH, + })) + h.AssertNil(t, err) + h.AssertNil(t, img.Save()) + }) + + it("successfully fetches the image", func() { + target := dist.Target{ + OS: runtime.GOOS, + Arch: runtime.GOARCH, + } + + fetchedImg, err := imageFetcher.FetchForPlatform(context.TODO(), repoName, image.FetchOptions{ + Daemon: false, + PullPolicy: image.PullAlways, + Target: &target, + }) + h.AssertNil(t, err) + h.AssertNotNil(t, fetchedImg) + }) + }) + + when("platform does not match", func() { + it.Before(func() { + img, err := remote.NewImage(repoName, authn.DefaultKeychain, remote.WithDefaultPlatform(imgutil.Platform{ + OS: runtime.GOOS, + Architecture: runtime.GOARCH, + })) + h.AssertNil(t, err) + h.AssertNil(t, img.Save()) + }) + + it("returns a platform mismatch error", func() { + // Use a different OS to ensure mismatch + differentOS := "nonexistent-os" + target := dist.Target{ + OS: differentOS, + Arch: runtime.GOARCH, + } + + _, err := imageFetcher.FetchForPlatform(context.TODO(), repoName, image.FetchOptions{ + Daemon: false, + PullPolicy: image.PullAlways, + Target: &target, + }) + h.AssertError(t, err, "does not match requested platform") + }) + }) + }) + + when("with insecure registries", func() { + it.Before(func() { + img, err := remote.NewImage(repoName, authn.DefaultKeychain, remote.WithDefaultPlatform(imgutil.Platform{ + OS: runtime.GOOS, + Architecture: runtime.GOARCH, + })) + h.AssertNil(t, err) + h.AssertNil(t, img.Save()) + }) + + it("successfully fetches using insecure registry settings", func() { + target := dist.Target{ + OS: runtime.GOOS, + Arch: runtime.GOARCH, + } + insecureRegistry := fmt.Sprintf("%s:%s", registryConfig.RunRegistryHost, registryConfig.RunRegistryPort) + + fetchedImg, err := imageFetcher.FetchForPlatform(context.TODO(), repoName, image.FetchOptions{ + Daemon: false, + PullPolicy: image.PullAlways, + Target: &target, + InsecureRegistries: []string{insecureRegistry}, + }) + h.AssertNil(t, err) + h.AssertNotNil(t, fetchedImg) + }) + }) + + when("with platform variant", func() { + it.Before(func() { + img, err := remote.NewImage(repoName, authn.DefaultKeychain, remote.WithDefaultPlatform(imgutil.Platform{ + OS: runtime.GOOS, + Architecture: runtime.GOARCH, + Variant: "v7", + })) + h.AssertNil(t, err) + h.AssertNil(t, img.Save()) + }) + + it("successfully fetches the image with matching variant", func() { + target := dist.Target{ + OS: runtime.GOOS, + Arch: runtime.GOARCH, + ArchVariant: "v7", + } + + fetchedImg, err := imageFetcher.FetchForPlatform(context.TODO(), repoName, image.FetchOptions{ + Daemon: false, + PullPolicy: image.PullAlways, + Target: &target, + }) + h.AssertNil(t, err) + h.AssertNotNil(t, fetchedImg) + + variant, err := fetchedImg.Variant() + h.AssertNil(t, err) + h.AssertEq(t, variant, "v7") + }) + }) + }) + + when("image does not exist", func() { + it("returns an error", func() { + target := dist.Target{ + OS: runtime.GOOS, + Arch: runtime.GOARCH, + } + + nonExistentImage := registryConfig.RepoName("nonexistent/" + h.RandString(10)) + _, err := imageFetcher.FetchForPlatform(context.TODO(), nonExistentImage, image.FetchOptions{ + Daemon: false, + PullPolicy: image.PullAlways, + Target: &target, + }) + h.AssertError(t, err, "") + }) + }) + + when("pull policy is PullNever with daemon", func() { + var localImageName string + + it.Before(func() { + // Use a different name for the local image to avoid conflicts + localImageName = "pack.local/test-" + h.RandString(10) + + // Create a local daemon image with platform information + // Use osType (daemon OS) instead of runtime.GOOS to handle cases where + // Windows runner is running Linux containers + img, err := local.NewImage(localImageName, docker, local.WithDefaultPlatform(imgutil.Platform{ + OS: osType, + Architecture: runtime.GOARCH, + })) + h.AssertNil(t, err) + h.AssertNil(t, img.Save()) + }) + + it.After(func() { + h.DockerRmi(docker, localImageName) + }) + + it("skips platform-specific digest resolution and uses tag directly", func() { + target := dist.Target{ + OS: osType, + Arch: runtime.GOARCH, + } + + fetchedImg, err := imageFetcher.FetchForPlatform(context.TODO(), localImageName, image.FetchOptions{ + Daemon: true, + PullPolicy: image.PullNever, + Target: &target, + }) + + // Should succeed without network access (digest resolution skipped) + h.AssertNil(t, err) + h.AssertNotNil(t, fetchedImg) + + // Verify debug message about skipping digest resolution + h.AssertContains(t, outBuf.String(), "skipping digest resolution due to --pull-policy never") + }) + }) + }) + + when("#CheckReadAccess", func() { + var daemon bool + + when("Daemon is true", func() { + it.Before(func() { + daemon = true + }) + + when("an error is thrown by the daemon", func() { + it.Before(func() { + mockController := gomock.NewController(t) + mockDockerClient := testmocks.NewMockAPIClient(mockController) + mockDockerClient.EXPECT().ServerVersion(gomock.Any(), gomock.Any()).Return(client.ServerVersionResult{}, errors.New("something wrong happened")) + imageFetcher = image.NewFetcher(logging.NewLogWithWriters(&outBuf, &outBuf, logging.WithVerbose()), mockDockerClient) + }) + when("PullNever", func() { + it("read access must be false", func() { + h.AssertFalse(t, imageFetcher.CheckReadAccess("pack.test/dummy", image.FetchOptions{Daemon: daemon, PullPolicy: image.PullNever})) + h.AssertContains(t, outBuf.String(), "failed reading image 'pack.test/dummy' from the daemon") + }) + }) + + when("PullIfNotPresent", func() { + it("read access must be false", func() { + h.AssertFalse(t, imageFetcher.CheckReadAccess("pack.test/dummy", image.FetchOptions{Daemon: daemon, PullPolicy: image.PullIfNotPresent})) + h.AssertContains(t, outBuf.String(), "failed reading image 'pack.test/dummy' from the daemon") + }) + }) + }) + + when("image exists only in the daemon", func() { + it.Before(func() { + img, err := local.NewImage("pack.test/dummy", docker) + h.AssertNil(t, err) + h.AssertNil(t, img.Save()) + }) + when("PullAlways", func() { + it("read access must be false", func() { + h.AssertFalse(t, imageFetcher.CheckReadAccess("pack.test/dummy", image.FetchOptions{Daemon: daemon, PullPolicy: image.PullAlways})) + }) + }) + + when("PullNever", func() { + it("read access must be true", func() { + h.AssertTrue(t, imageFetcher.CheckReadAccess("pack.test/dummy", image.FetchOptions{Daemon: daemon, PullPolicy: image.PullNever})) + }) + }) + + when("PullIfNotPresent", func() { + it("read access must be true", func() { + h.AssertTrue(t, imageFetcher.CheckReadAccess("pack.test/dummy", image.FetchOptions{Daemon: daemon, PullPolicy: image.PullIfNotPresent})) + }) + }) + }) + + when("image doesn't exist in the daemon but in remote", func() { + it.Before(func() { + img, err := remote.NewImage(repoName, authn.DefaultKeychain) + h.AssertNil(t, err) + h.AssertNil(t, img.Save()) + }) + when("PullAlways", func() { + it("read access must be true", func() { + h.AssertTrue(t, imageFetcher.CheckReadAccess(repoName, image.FetchOptions{Daemon: daemon, PullPolicy: image.PullAlways})) + }) + }) + + when("PullNever", func() { + it("read access must be false", func() { + h.AssertFalse(t, imageFetcher.CheckReadAccess(repoName, image.FetchOptions{Daemon: daemon, PullPolicy: image.PullNever})) + }) + }) + + when("PullIfNotPresent", func() { + it("read access must be true", func() { + h.AssertTrue(t, imageFetcher.CheckReadAccess(repoName, image.FetchOptions{Daemon: daemon, PullPolicy: image.PullIfNotPresent})) + }) + }) + }) + }) + + when("Daemon is false", func() { + it.Before(func() { + daemon = false + }) + + when("remote image doesn't exists", func() { + it("fails when checking dummy image", func() { + h.AssertFalse(t, imageFetcher.CheckReadAccess("pack.test/dummy", image.FetchOptions{Daemon: daemon})) + h.AssertContains(t, outBuf.String(), "CheckReadAccess failed for the run image pack.test/dummy") + }) + }) + + when("remote image exists", func() { + it.Before(func() { + img, err := remote.NewImage(repoName, authn.DefaultKeychain) + h.AssertNil(t, err) + h.AssertNil(t, img.Save()) + }) + + it("read access is valid", func() { + h.AssertTrue(t, imageFetcher.CheckReadAccess(repoName, image.FetchOptions{Daemon: daemon})) + h.AssertContains(t, outBuf.String(), fmt.Sprintf("CheckReadAccess succeeded for the run image %s", repoName)) + }) + }) + }) }) } diff --git a/pkg/image/platform.go b/pkg/image/platform.go new file mode 100644 index 0000000000..6ffe7162cd --- /dev/null +++ b/pkg/image/platform.go @@ -0,0 +1,205 @@ +package image + +import ( + "fmt" + "strings" + + "github.com/buildpacks/imgutil" + "github.com/google/go-containerregistry/pkg/authn" + "github.com/google/go-containerregistry/pkg/name" + "github.com/google/go-containerregistry/pkg/v1/remote" + "github.com/google/go-containerregistry/pkg/v1/types" + "github.com/pkg/errors" +) + +// resolvePlatformSpecificDigest resolves a multi-platform image reference to a platform-specific digest. +// If the image is a manifest list, it finds the manifest for the specified platform and returns its digest. +// If the image is a single-platform image, it validates the platform matches and returns a digest reference. +func resolvePlatformSpecificDigest(imageRef string, platform *imgutil.Platform, keychain authn.Keychain, registrySettings map[string]imgutil.RegistrySetting) (string, error) { + // If platform is nil, return the reference unchanged + if platform == nil { + return imageRef, nil + } + + // Parse the reference (could be digest or tag) + ref, err := name.ParseReference(imageRef, name.WeakValidation) + if err != nil { + return "", errors.Wrapf(err, "parsing image reference %q", imageRef) + } + + // Get registry settings for the reference + reg := getRegistrySetting(imageRef, registrySettings) + + // Get authentication + auth, err := keychain.Resolve(ref.Context().Registry) + if err != nil { + return "", errors.Wrapf(err, "resolving authentication for registry %q", ref.Context().Registry) + } + + // Fetch the descriptor + desc, err := remote.Get(ref, remote.WithAuth(auth), remote.WithTransport(imgutil.GetTransport(reg.Insecure))) + if err != nil { + return "", errors.Wrapf(err, "fetching descriptor for %q", imageRef) + } + + // Check if it's a manifest list + if desc.MediaType == types.OCIImageIndex || desc.MediaType == types.DockerManifestList { + // Get the index + index, err := desc.ImageIndex() + if err != nil { + return "", errors.Wrapf(err, "getting image index for %q", imageRef) + } + + // Get the manifest list + manifestList, err := index.IndexManifest() + if err != nil { + return "", errors.Wrapf(err, "getting manifest list for %q", imageRef) + } + + // Find the platform-specific manifest + for _, manifest := range manifestList.Manifests { + if manifest.Platform != nil { + manifestPlatform := &imgutil.Platform{ + OS: manifest.Platform.OS, + Architecture: manifest.Platform.Architecture, + Variant: manifest.Platform.Variant, + OSVersion: manifest.Platform.OSVersion, + } + + if platformsMatch(platform, manifestPlatform) { + // Create a new digest reference for the platform-specific manifest + platformDigestRef, err := name.NewDigest( + fmt.Sprintf("%s@%s", ref.Context().Name(), manifest.Digest.String()), + name.WeakValidation, + ) + if err != nil { + return "", errors.Wrapf(err, "creating platform-specific digest reference") + } + return platformDigestRef.String(), nil + } + } + } + + return "", errors.Errorf("no manifest found for platform %s/%s%s in manifest list %q", + platform.OS, + platform.Architecture, + platformString(platform), + imageRef) + } + + // If it's a single manifest, validate that the platform matches + img, err := desc.Image() + if err != nil { + return "", errors.Wrapf(err, "getting image for %q", imageRef) + } + + configFile, err := img.ConfigFile() + if err != nil { + return "", errors.Wrapf(err, "getting config file for %q", imageRef) + } + + // Create platform from image config + imagePlatform := &imgutil.Platform{ + OS: configFile.OS, + Architecture: configFile.Architecture, + Variant: configFile.Variant, + OSVersion: configFile.OSVersion, + } + + // Check if the image's platform matches the requested platform + if !platformsMatch(platform, imagePlatform) { + return "", errors.Errorf("image platform %s/%s%s does not match requested platform %s/%s%s for %q", + configFile.OS, + configFile.Architecture, + platformString(imagePlatform), + platform.OS, + platform.Architecture, + platformString(platform), + imageRef) + } + + // Platform matches - if input was a digest reference, return it unchanged + // If input was a tag reference, return the digest reference for consistency + if _, ok := ref.(name.Digest); ok { + return imageRef, nil + } + + // Convert tag reference to digest reference + digest, err := img.Digest() + if err != nil { + return "", errors.Wrapf(err, "getting digest for image %q", imageRef) + } + + digestRef, err := name.NewDigest( + fmt.Sprintf("%s@%s", ref.Context().Name(), digest.String()), + name.WeakValidation, + ) + if err != nil { + return "", errors.Wrapf(err, "creating digest reference for %q", imageRef) + } + + return digestRef.String(), nil +} + +// platformsMatch checks if two platforms match. +// OS and Architecture must match exactly. +// For Variant and OSVersion, if either is blank, it's considered a match. +func platformsMatch(p1, p2 *imgutil.Platform) bool { + if p1 == nil || p2 == nil { + return false + } + + // OS and Architecture must match exactly + if p1.OS != p2.OS || p1.Architecture != p2.Architecture { + return false + } + + // For Variant and OSVersion, if either is blank, consider it a match + variantMatch := p1.Variant == "" || p2.Variant == "" || p1.Variant == p2.Variant + osVersionMatch := p1.OSVersion == "" || p2.OSVersion == "" || p1.OSVersion == p2.OSVersion + + return variantMatch && osVersionMatch +} + +// platformString returns a pretty-printed string representation of a platform's variant and OS version. +// Returns empty string if both are blank, otherwise returns "/variant:osversion" format. +func platformString(platform *imgutil.Platform) string { + if platform == nil { + return "" + } + + var parts []string + + if platform.Variant != "" { + parts = append(parts, platform.Variant) + } + + if platform.OSVersion != "" { + parts = append(parts, platform.OSVersion) + } + + if len(parts) == 0 { + return "" + } + + result := "/" + parts[0] + if len(parts) > 1 { + result += ":" + parts[1] + } + + return result +} + +// getRegistrySetting returns the registry setting for a given repository name. +// It checks if any prefix in the settings map matches the repository name. +func getRegistrySetting(forRepoName string, givenSettings map[string]imgutil.RegistrySetting) imgutil.RegistrySetting { + if givenSettings == nil { + return imgutil.RegistrySetting{} + } + for prefix, r := range givenSettings { + if strings.HasPrefix(forRepoName, prefix) { + return r + } + } + return imgutil.RegistrySetting{} +} diff --git a/pkg/index/index_factory.go b/pkg/index/index_factory.go new file mode 100644 index 0000000000..aaa5d70e1e --- /dev/null +++ b/pkg/index/index_factory.go @@ -0,0 +1,84 @@ +package index + +import ( + "fmt" + "os" + "path/filepath" + + "github.com/buildpacks/imgutil" + "github.com/buildpacks/imgutil/layout" + "github.com/buildpacks/imgutil/remote" + "github.com/google/go-containerregistry/pkg/authn" + "github.com/pkg/errors" +) + +type IndexFactory struct { + keychain authn.Keychain + path string +} + +func NewIndexFactory(keychain authn.Keychain, path string) *IndexFactory { + return &IndexFactory{ + keychain: keychain, + path: path, + } +} + +func (f *IndexFactory) Exists(repoName string) bool { + return layoutImageExists(f.localPath(repoName)) +} + +func (f *IndexFactory) LoadIndex(repoName string, opts ...imgutil.IndexOption) (index imgutil.ImageIndex, err error) { + if !f.Exists(repoName) { + return nil, errors.New(fmt.Sprintf("Image: '%s' not found", repoName)) + } + opts = appendOption(opts, imgutil.FromBaseIndex(f.localPath(repoName))) + return layout.NewIndex(repoName, appendDefaultOptions(opts, f.keychain, f.path)...) +} + +func (f *IndexFactory) FetchIndex(name string, opts ...imgutil.IndexOption) (idx imgutil.ImageIndex, err error) { + return remote.NewIndex(name, appendDefaultOptions(opts, f.keychain, f.path)...) +} + +func (f *IndexFactory) FindIndex(repoName string, opts ...imgutil.IndexOption) (idx imgutil.ImageIndex, err error) { + if f.Exists(repoName) { + return f.LoadIndex(repoName, opts...) + } + return f.FetchIndex(repoName, opts...) +} + +func (f *IndexFactory) CreateIndex(repoName string, opts ...imgutil.IndexOption) (idx imgutil.ImageIndex, err error) { + return layout.NewIndex(repoName, appendDefaultOptions(opts, f.keychain, f.path)...) +} + +func (f *IndexFactory) localPath(repoName string) string { + return filepath.Join(f.path, imgutil.MakeFileSafeName(repoName)) +} + +func layoutImageExists(path string) bool { + if !pathExists(path) { + return false + } + index := filepath.Join(path, "index.json") + if _, err := os.Stat(index); os.IsNotExist(err) { + return false + } + return true +} + +func pathExists(path string) bool { + if path != "" { + if _, err := os.Stat(path); !os.IsNotExist(err) { + return true + } + } + return false +} + +func appendOption(ops []imgutil.IndexOption, op imgutil.IndexOption) []imgutil.IndexOption { + return append(ops, op) +} + +func appendDefaultOptions(ops []imgutil.IndexOption, keychain authn.Keychain, path string) []imgutil.IndexOption { + return append(ops, imgutil.WithKeychain(keychain), imgutil.WithXDGRuntimePath(path)) +} diff --git a/pkg/index/index_factory_test.go b/pkg/index/index_factory_test.go new file mode 100644 index 0000000000..da3c791e46 --- /dev/null +++ b/pkg/index/index_factory_test.go @@ -0,0 +1,199 @@ +package index_test + +import ( + "fmt" + "os" + "testing" + + "github.com/buildpacks/imgutil" + "github.com/google/go-containerregistry/pkg/authn" + "github.com/google/go-containerregistry/pkg/name" + v1 "github.com/google/go-containerregistry/pkg/v1" + "github.com/google/go-containerregistry/pkg/v1/random" + "github.com/google/go-containerregistry/pkg/v1/remote" + "github.com/heroku/color" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + "github.com/buildpacks/pack/pkg/index" + h "github.com/buildpacks/pack/testhelpers" +) + +var dockerRegistry *h.TestRegistryConfig + +func TestIndexFactory(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + + h.RequireDocker(t) + + dockerRegistry = h.RunRegistry(t) + defer dockerRegistry.StopRegistry(t) + + os.Setenv("DOCKER_CONFIG", dockerRegistry.DockerConfigDir) + spec.Run(t, "Fetcher", testIndexFactory, spec.Parallel(), spec.Report(report.Terminal{})) +} + +func testIndexFactory(t *testing.T, when spec.G, it spec.S) { + var ( + indexFactory *index.IndexFactory + imageIndex imgutil.ImageIndex + indexRepoName string + err error + tmpDir string + ) + + it.Before(func() { + tmpDir, err = os.MkdirTemp("", "index-factory-test") + h.AssertNil(t, err) + indexFactory = index.NewIndexFactory(authn.DefaultKeychain, tmpDir) + }) + + it.After(func() { + os.RemoveAll(tmpDir) + }) + + when("#CreateIndex", func() { + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + }) + + when("no options are provided", func() { + it("creates an image index", func() { + imageIndex, err = indexFactory.CreateIndex(indexRepoName) + h.AssertNil(t, err) + h.AssertNotNil(t, imageIndex) + }) + }) + }) + + when("#Exists", func() { + when("index exists on disk", func() { + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + setUpLocalIndex(t, indexFactory, indexRepoName) + }) + + it("returns true", func() { + h.AssertTrue(t, indexFactory.Exists(indexRepoName)) + }) + }) + + when("index does not exist on disk", func() { + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + }) + + it("returns false", func() { + h.AssertFalse(t, indexFactory.Exists(indexRepoName)) + }) + }) + }) + + when("#LoadIndex", func() { + when("index exists on disk", func() { + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + setUpLocalIndex(t, indexFactory, indexRepoName) + }) + + it("loads the index from disk", func() { + imageIndex, err = indexFactory.LoadIndex(indexRepoName) + h.AssertNil(t, err) + h.AssertNotNil(t, imageIndex) + }) + }) + + when("index does not exist on disk", func() { + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + }) + + it("errors with a message", func() { + _, err = indexFactory.LoadIndex(indexRepoName) + h.AssertError(t, err, fmt.Sprintf("Image: '%s' not found", indexRepoName)) + }) + }) + }) + + when("#FetchIndex", func() { + when("index exists in a remote registry", func() { + var remoteIndexRepoName string + + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + remoteIndexRepoName = newTestImageIndexName("fetch-remote") + setUpRandomRemoteIndex(t, remoteIndexRepoName, 1, 1) + }) + + it("creates an index with the underlying remote index", func() { + _, err = indexFactory.FetchIndex(indexRepoName, imgutil.FromBaseIndex(remoteIndexRepoName)) + h.AssertNil(t, err) + }) + }) + + when("index does not exist in a remote registry", func() { + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + }) + + it("errors with a message", func() { + _, err = indexFactory.FetchIndex(indexRepoName, imgutil.FromBaseIndex(indexRepoName)) + h.AssertNotNil(t, err) + }) + }) + }) + + when("#FindIndex", func() { + when("index exists on disk", func() { + it.Before(func() { + indexRepoName = h.NewRandomIndexRepoName() + setUpLocalIndex(t, indexFactory, indexRepoName) + }) + + it("finds the index on disk", func() { + imageIndex, err = indexFactory.FindIndex(indexRepoName) + h.AssertNil(t, err) + h.AssertNotNil(t, imageIndex) + }) + }) + + when("index exists in a remote registry", func() { + it.Before(func() { + indexRepoName = newTestImageIndexName("find-remote") + setUpRandomRemoteIndex(t, indexRepoName, 1, 1) + }) + + it("finds the index in the remote registry", func() { + imageIndex, err = indexFactory.FindIndex(indexRepoName) + h.AssertNil(t, err) + h.AssertNotNil(t, imageIndex) + }) + }) + }) +} + +func setUpLocalIndex(t *testing.T, indexFactory *index.IndexFactory, indexRepoName string) { + imageIndex, err := indexFactory.CreateIndex(indexRepoName) + h.AssertNil(t, err) + h.AssertNil(t, imageIndex.SaveDir()) +} + +func newTestImageIndexName(name string) string { + return dockerRegistry.RepoName(name + "-" + h.RandString(10)) +} + +// setUpRandomRemoteIndex creates a random image index with the provided (count) number of manifest +// each manifest will have the provided number of layers +func setUpRandomRemoteIndex(t *testing.T, repoName string, layers, count int64) v1.ImageIndex { + ref, err := name.ParseReference(repoName, name.WeakValidation) + h.AssertNil(t, err) + + randomIndex, err := random.Index(1024, layers, count) + h.AssertNil(t, err) + + err = remote.WriteIndex(ref, randomIndex, remote.WithAuthFromKeychain(authn.DefaultKeychain)) + h.AssertNil(t, err) + + return randomIndex +} diff --git a/pkg/logging/logger_writers.go b/pkg/logging/logger_writers.go index dceb66c462..8d32a853c6 100644 --- a/pkg/logging/logger_writers.go +++ b/pkg/logging/logger_writers.go @@ -4,7 +4,6 @@ package logging import ( "fmt" "io" - "io/ioutil" "regexp" "sync" "time" @@ -54,7 +53,7 @@ func NewLogWithWriters(stdout, stderr io.Writer, opts ...func(*LogWithWriters)) out: stdout, errOut: stderr, } - lw.Logger.Handler = lw + lw.Handler = lw for _, opt := range opts { opt(lw) @@ -91,7 +90,7 @@ func (lw *LogWithWriters) HandleLog(e *log.Entry) error { // WriterForLevel returns a Writer for the given Level func (lw *LogWithWriters) WriterForLevel(level Level) io.Writer { if lw.Level > log.Level(level) { - return ioutil.Discard + return io.Discard } if level == ErrorLevel { diff --git a/pkg/logging/logger_writers_test.go b/pkg/logging/logger_writers_test.go index 42a6a63b5d..12f40bc6be 100644 --- a/pkg/logging/logger_writers_test.go +++ b/pkg/logging/logger_writers_test.go @@ -3,7 +3,6 @@ package logging_test import ( "fmt" "io" - "io/ioutil" "testing" "time" @@ -78,7 +77,7 @@ func testLogWithWriters(t *testing.T, when spec.G, it spec.S) { it("will return correct writers", func() { h.AssertSameInstance(t, logger.Writer(), outCons) - h.AssertSameInstance(t, logger.WriterForLevel(logging.DebugLevel), ioutil.Discard) + h.AssertSameInstance(t, logger.WriterForLevel(logging.DebugLevel), io.Discard) }) it("is only verbose for debug level", func() { @@ -166,8 +165,8 @@ func testLogWithWriters(t *testing.T, when spec.G, it spec.S) { it("will return correct writers", func() { h.AssertSameInstance(t, logger.Writer(), outCons) - h.AssertSameInstance(t, logger.WriterForLevel(logging.DebugLevel), ioutil.Discard) - h.AssertSameInstance(t, logger.WriterForLevel(logging.InfoLevel), ioutil.Discard) + h.AssertSameInstance(t, logger.WriterForLevel(logging.DebugLevel), io.Discard) + h.AssertSameInstance(t, logger.WriterForLevel(logging.InfoLevel), io.Discard) }) }) diff --git a/pkg/logging/logging.go b/pkg/logging/logging.go index 6d9a026950..c03fe777e1 100644 --- a/pkg/logging/logging.go +++ b/pkg/logging/logging.go @@ -3,7 +3,6 @@ package logging import ( "io" - "io/ioutil" "github.com/buildpacks/pack/internal/style" ) @@ -53,7 +52,7 @@ func GetWriterForLevel(logger Logger, level Level) io.Writer { // IsQuiet defines whether a pack logger is set to quiet mode func IsQuiet(logger Logger) bool { - if writer := GetWriterForLevel(logger, InfoLevel); writer == ioutil.Discard { + if writer := GetWriterForLevel(logger, InfoLevel); writer == io.Discard { return true } diff --git a/pkg/project/project.go b/pkg/project/project.go index d50a0258d3..a531e06b5a 100644 --- a/pkg/project/project.go +++ b/pkg/project/project.go @@ -2,12 +2,16 @@ package project import ( "fmt" - "io/ioutil" + "os" "path/filepath" + "strings" + + v03 "github.com/buildpacks/pack/pkg/project/v03" "github.com/BurntSushi/toml" "github.com/pkg/errors" + "github.com/buildpacks/pack/pkg/logging" "github.com/buildpacks/pack/pkg/project/types" v01 "github.com/buildpacks/pack/pkg/project/v01" v02 "github.com/buildpacks/pack/pkg/project/v02" @@ -21,13 +25,14 @@ type VersionDescriptor struct { Project Project `toml:"_"` } -var parsers = map[string]func(string) (types.Descriptor, error){ +var parsers = map[string]func(string) (types.Descriptor, toml.MetaData, error){ "0.1": v01.NewDescriptor, "0.2": v02.NewDescriptor, + "0.3": v03.NewDescriptor, } -func ReadProjectDescriptor(pathToFile string) (types.Descriptor, error) { - projectTomlContents, err := ioutil.ReadFile(filepath.Clean(pathToFile)) +func ReadProjectDescriptor(pathToFile string, logger logging.Logger) (types.Descriptor, error) { + projectTomlContents, err := os.ReadFile(filepath.Clean(pathToFile)) if err != nil { return types.Descriptor{}, err } @@ -45,6 +50,7 @@ func ReadProjectDescriptor(pathToFile string) (types.Descriptor, error) { version := versionDescriptor.Project.Version if version == "" { + logger.Warn("No schema version declared in project.toml, defaulting to schema version 0.1") version = "0.1" } @@ -52,14 +58,47 @@ func ReadProjectDescriptor(pathToFile string) (types.Descriptor, error) { return types.Descriptor{}, fmt.Errorf("unknown project descriptor schema version %s", version) } - descriptor, err := parsers[version](string(projectTomlContents)) + descriptor, tomlMetaData, err := parsers[version](string(projectTomlContents)) if err != nil { return types.Descriptor{}, err } + warnIfTomlContainsKeysNotSupportedBySchema(version, tomlMetaData, logger) + return descriptor, validate(descriptor) } +func warnIfTomlContainsKeysNotSupportedBySchema(schemaVersion string, tomlMetaData toml.MetaData, logger logging.Logger) { + unsupportedKeys := []string{} + + for _, undecoded := range tomlMetaData.Undecoded() { + keyName := undecoded.String() + if unsupportedKey(keyName, schemaVersion) { + unsupportedKeys = append(unsupportedKeys, keyName) + } + } + + if len(unsupportedKeys) != 0 { + logger.Warnf("The following keys declared in project.toml are not supported in schema version %s:\n", schemaVersion) + for _, unsupported := range unsupportedKeys { + logger.Warnf("- %s\n", unsupported) + } + logger.Warn("The above keys will be ignored. If this is not intentional, try updating your schema version.\n") + } +} + +func unsupportedKey(keyName, schemaVersion string) bool { + switch schemaVersion { + case "0.1": + // filter out any keys from [metadata] and any other custom table defined by end-users + return strings.HasPrefix(keyName, "project.") || strings.HasPrefix(keyName, "build.") || strings.Contains(keyName, "io.buildpacks") + case "0.2": + // filter out any keys from [_.metadata] and any other custom table defined by end-users + return strings.Contains(keyName, "io.buildpacks") || (strings.HasPrefix(keyName, "_.") && !strings.HasPrefix(keyName, "_.metadata")) + } + return true +} + func validate(p types.Descriptor) error { if p.Build.Exclude != nil && p.Build.Include != nil { return errors.New("project.toml: cannot have both include and exclude defined") diff --git a/pkg/project/project_test.go b/pkg/project/project_test.go index 4440da5ca5..1f9541b2c9 100644 --- a/pkg/project/project_test.go +++ b/pkg/project/project_test.go @@ -1,19 +1,19 @@ package project import ( - "io/ioutil" "log" - "math/rand" "os" "reflect" "testing" - "time" + + "github.com/buildpacks/pack/pkg/project/types" "github.com/buildpacks/lifecycle/api" "github.com/heroku/color" "github.com/sclevine/spec" "github.com/sclevine/spec/report" + "github.com/buildpacks/pack/pkg/logging" h "github.com/buildpacks/pack/testhelpers" ) @@ -21,13 +21,146 @@ func TestProject(t *testing.T) { h.RequireDocker(t) color.Disable(true) defer color.Disable(false) - rand.Seed(time.Now().UTC().UnixNano()) spec.Run(t, "Provider", testProject, spec.Parallel(), spec.Report(report.Terminal{})) } func testProject(t *testing.T, when spec.G, it spec.S) { + var ( + logger *logging.LogWithWriters + readStdout func() string + ) + + it.Before(func() { + var stdout *color.Console + stdout, readStdout = h.MockWriterAndOutput() + stderr, _ := h.MockWriterAndOutput() + logger = logging.NewLogWithWriters(stdout, stderr) + }) + when("#ReadProjectDescriptor", func() { + when("valid 0.3 project.toml file is provided", func() { + it("should exec-env on [[io.buildpacks.group]]", func() { + projectToml := ` +[_] +name = "gallant 0.3" +schema-version = "0.3" + +[[io.buildpacks.group]] +id = "buildpacks/metrics-agent" +version = "latest" +exec-env = ["production"] +` + tmpProjectToml, err := createTmpProjectTomlFile(projectToml) + if err != nil { + t.Fatal(err) + } + + projectDescriptor, err := ReadProjectDescriptor(tmpProjectToml.Name(), logger) + if err != nil { + t.Fatal(err) + } + + assertProjectName(t, "gallant 0.3", projectDescriptor) + assertSchemaVersion(t, api.MustParse("0.3"), projectDescriptor) + + expectedNumberOfBuildPacks := 1 + expectedNumberOfExecEnvs := 1 + atIndex := 0 + assertBuildPackGroupExecEnv(t, "production", expectedNumberOfBuildPacks, expectedNumberOfExecEnvs, atIndex, projectDescriptor) + }) + + it("should exec-env on [[io.buildpacks.pre.group]]", func() { + projectToml := ` +[_] +name = "gallant 0.3" +schema-version = "0.3" + +[[io.buildpacks.pre.group]] +id = "buildpacks/procfile" +version = "latest" +exec-env = ["test"] +` + tmpProjectToml, err := createTmpProjectTomlFile(projectToml) + if err != nil { + t.Fatal(err) + } + + projectDescriptor, err := ReadProjectDescriptor(tmpProjectToml.Name(), logger) + if err != nil { + t.Fatal(err) + } + + assertProjectName(t, "gallant 0.3", projectDescriptor) + assertSchemaVersion(t, api.MustParse("0.3"), projectDescriptor) + + expectedNumberOfBuildPacks := 1 + expectedNumberOfExecEnvs := 1 + atIndex := 0 + assertBuildPackPreGroupExecEnv(t, "test", expectedNumberOfBuildPacks, expectedNumberOfExecEnvs, atIndex, projectDescriptor) + }) + }) + + it("should exec-env on [[io.buildpacks.post.group]]", func() { + projectToml := ` +[_] +name = "gallant 0.3" +schema-version = "0.3" + +[[io.buildpacks.post.group]] +id = "buildpacks/headless-chrome" +version = "latest" +exec-env = ["test-1"] +` + tmpProjectToml, err := createTmpProjectTomlFile(projectToml) + if err != nil { + t.Fatal(err) + } + + projectDescriptor, err := ReadProjectDescriptor(tmpProjectToml.Name(), logger) + if err != nil { + t.Fatal(err) + } + + assertProjectName(t, "gallant 0.3", projectDescriptor) + assertSchemaVersion(t, api.MustParse("0.3"), projectDescriptor) + + expectedNumberOfBuildPacks := 1 + expectedNumberOfExecEnvs := 1 + atIndex := 0 + assertBuildPackPostGroupExecEnv(t, "test-1", expectedNumberOfBuildPacks, expectedNumberOfExecEnvs, atIndex, projectDescriptor) + }) + + it("should exec-env on [[io.buildpacks.build.env]]", func() { + projectToml := ` +[_] +name = "gallant 0.3" +schema-version = "0.3" + +[[io.buildpacks.build.env]] +name = "RAILS_ENV" +value = "test" +exec-env = ["test-1.1"] +` + tmpProjectToml, err := createTmpProjectTomlFile(projectToml) + if err != nil { + t.Fatal(err) + } + + projectDescriptor, err := ReadProjectDescriptor(tmpProjectToml.Name(), logger) + if err != nil { + t.Fatal(err) + } + + assertProjectName(t, "gallant 0.3", projectDescriptor) + assertSchemaVersion(t, api.MustParse("0.3"), projectDescriptor) + + expectedNumberOfBuildPacks := 1 + expectedNumberOfExecEnvs := 1 + atIndex := 0 + assertBuildPackBuildExecEnv(t, "test-1.1", expectedNumberOfBuildPacks, expectedNumberOfExecEnvs, atIndex, projectDescriptor) + }) + it("should parse a valid v0.2 project.toml file", func() { projectToml := ` [_] @@ -39,21 +172,28 @@ type = "MIT" pipeline = "Lucerne" [io.buildpacks] exclude = [ "*.jar" ] +[[io.buildpacks.pre.group]] +uri = "https://example.com/buildpack/pre" +[[io.buildpacks.post.group]] +uri = "https://example.com/buildpack/post" [[io.buildpacks.group]] id = "example/lua" version = "1.0" [[io.buildpacks.group]] uri = "https://example.com/buildpack" -[[io.buildpacks.env.build]] +[[io.buildpacks.build.env]] name = "JAVA_OPTS" value = "-Xmx300m" +[[io.buildpacks.env.build]] +name = "JAVA_OPTS" +value = "this-should-get-overridden-because-its-deprecated" ` tmpProjectToml, err := createTmpProjectTomlFile(projectToml) if err != nil { t.Fatal(err) } - projectDescriptor, err := ReadProjectDescriptor(tmpProjectToml.Name()) + projectDescriptor, err := ReadProjectDescriptor(tmpProjectToml.Name(), logger) if err != nil { t.Fatal(err) } @@ -90,6 +230,18 @@ value = "-Xmx300m" expected, projectDescriptor.Build.Buildpacks[1].URI) } + expected = "https://example.com/buildpack/pre" + if projectDescriptor.Build.Pre.Buildpacks[0].URI != expected { + t.Fatalf("Expected\n-----\n%#v\n-----\nbut got\n-----\n%#v\n", + expected, projectDescriptor.Build.Pre.Buildpacks[0].URI) + } + + expected = "https://example.com/buildpack/post" + if projectDescriptor.Build.Post.Buildpacks[0].URI != expected { + t.Fatalf("Expected\n-----\n%#v\n-----\nbut got\n-----\n%#v\n", + expected, projectDescriptor.Build.Post.Buildpacks[0].URI) + } + expected = "JAVA_OPTS" if projectDescriptor.Build.Env[0].Name != expected { t.Fatalf("Expected\n-----\n%#v\n-----\nbut got\n-----\n%#v\n", @@ -114,6 +266,41 @@ value = "-Xmx300m" expected, projectDescriptor.Metadata["pipeline"]) } }) + + it("should be backwards compatible with older v0.2 project.toml file", func() { + projectToml := ` +[_] +name = "gallant 0.2" +schema-version="0.2" +[[io.buildpacks.env.build]] +name = "JAVA_OPTS" +value = "-Xmx300m" +` + tmpProjectToml, err := createTmpProjectTomlFile(projectToml) + if err != nil { + t.Fatal(err) + } + + projectDescriptor, err := ReadProjectDescriptor(tmpProjectToml.Name(), logger) + if err != nil { + t.Fatal(err) + } + + var expected string + + expected = "JAVA_OPTS" + if projectDescriptor.Build.Env[0].Name != expected { + t.Fatalf("Expected\n-----\n%#v\n-----\nbut got\n-----\n%#v\n", + expected, projectDescriptor.Build.Env[0].Name) + } + + expected = "-Xmx300m" + if projectDescriptor.Build.Env[0].Value != expected { + t.Fatalf("Expected\n-----\n%#v\n-----\nbut got\n-----\n%#v\n", + expected, projectDescriptor.Build.Env[0].Value) + } + }) + it("should parse a valid v0.1 project.toml file", func() { projectToml := ` [project] @@ -140,7 +327,7 @@ pipeline = "Lucerne" t.Fatal(err) } - projectDescriptor, err := ReadProjectDescriptor(tmpProjectToml.Name()) + projectDescriptor, err := ReadProjectDescriptor(tmpProjectToml.Name(), logger) if err != nil { t.Fatal(err) } @@ -224,7 +411,7 @@ name = "gallant" t.Fatal(err) } - projectDescriptor, err := ReadProjectDescriptor(tmpProjectToml.Name()) + projectDescriptor, err := ReadProjectDescriptor(tmpProjectToml.Name(), logger) if err != nil { t.Fatal(err) } @@ -242,7 +429,7 @@ name = "gallant" }) it("should fail for an invalid project.toml path", func() { - _, err := ReadProjectDescriptor("/path/that/does/not/exist/project.toml") + _, err := ReadProjectDescriptor("/path/that/does/not/exist/project.toml", logger) if !os.IsNotExist(err) { t.Fatalf("Expected\n-----\n%#v\n-----\nbut got\n-----\n%#v\n", @@ -263,7 +450,7 @@ include = [ "*.jpg" ] if err != nil { t.Fatal(err) } - _, err = ReadProjectDescriptor(tmpProjectToml.Name()) + _, err = ReadProjectDescriptor(tmpProjectToml.Name(), logger) if err == nil { t.Fatalf( "Expected error for having both exclude and include defined") @@ -283,7 +470,7 @@ version = "1.2.3" t.Fatal(err) } - _, err = ReadProjectDescriptor(tmpProjectToml.Name()) + _, err = ReadProjectDescriptor(tmpProjectToml.Name(), logger) if err == nil { t.Fatalf("Expected error for NOT having id or uri defined for buildpacks") } @@ -303,7 +490,7 @@ version = "1.2.3" t.Fatal(err) } - _, err = ReadProjectDescriptor(tmpProjectToml.Name()) + _, err = ReadProjectDescriptor(tmpProjectToml.Name(), logger) if err == nil { t.Fatal("Expected error for having both uri and version defined for a buildpack(s)") } @@ -321,16 +508,166 @@ name = "licenses should have either a type or uri defined" t.Fatal(err) } - _, err = ReadProjectDescriptor(tmpProjectToml.Name()) + _, err = ReadProjectDescriptor(tmpProjectToml.Name(), logger) if err == nil { t.Fatal("Expected error for having neither type or uri defined for licenses") } }) + + it("should warn when no schema version is declared", func() { + projectToml := `` + tmpProjectToml, err := createTmpProjectTomlFile(projectToml) + if err != nil { + t.Fatal(err) + } + + _, err = ReadProjectDescriptor(tmpProjectToml.Name(), logger) + h.AssertNil(t, err) + + h.AssertContains(t, readStdout(), "Warning: No schema version declared in project.toml, defaulting to schema version 0.1\n") + }) + + it("should warn when unsupported keys, on tables the project owns, are declared with schema v0.1", func() { + projectToml := ` +[project] +authors = ["foo", "bar"] + +# try to use buildpack.io table with version 0.1 - warning message expected +[[io.buildpacks.build.env]] +name = "JAVA_OPTS" +value = "-Xmx1g" + +# something else defined by end-users - no warning message expected +[io.docker] +file = "./Dockerfile" + +# some metadata - no warning message expected +[metadata] +foo = "bar" +` + tmpProjectToml, err := createTmpProjectTomlFile(projectToml) + if err != nil { + t.Fatal(err) + } + + _, err = ReadProjectDescriptor(tmpProjectToml.Name(), logger) + h.AssertNil(t, err) + h.AssertContains( + t, + readStdout(), + "Warning: The following keys declared in project.toml are not supported in schema version 0.1:\n"+ + "Warning: - io.buildpacks.build.env\n"+ + "Warning: - io.buildpacks.build.env.name\n"+ + "Warning: - io.buildpacks.build.env.value\n"+ + "Warning: The above keys will be ignored. If this is not intentional, try updating your schema version.\n", + ) + }) + + it("should warn when unsupported keys, on tables the project owns, are declared with schema v0.2", func() { + projectToml := ` +[_] +schema-version = "0.2" +id = "foo" +version = "bar" +# typo in a key under valid table - warning message expected +versions = "0.1" + +[[_.licenses]] +type = "foo" +# invalid key under a valid table - warning message expected +foo = "bar" + +# try to use an invalid key under io.buildpacks - warning message expected +[[io.buildpacks.build.foo]] +name = "something" + +# something else defined by end-users - no warning message expected +[io.docker] +file = "./Dockerfile" + +# some metadata defined the end-user - no warning message expected +[_.metadata] +foo = "bar" + +# more metadata defined the end-user - no warning message expected +[_.metadata.fizz] +buzz = ["a", "b", "c"] +` + tmpProjectToml, err := createTmpProjectTomlFile(projectToml) + if err != nil { + t.Fatal(err) + } + + _, err = ReadProjectDescriptor(tmpProjectToml.Name(), logger) + h.AssertNil(t, err) + + // Assert we only warn + h.AssertContains( + t, + readStdout(), + "Warning: The following keys declared in project.toml are not supported in schema version 0.2:\n"+ + "Warning: - _.versions\n"+ + "Warning: - _.licenses.foo\n"+ + "Warning: - io.buildpacks.build.foo\n"+ + "Warning: - io.buildpacks.build.foo.name\n"+ + "Warning: The above keys will be ignored. If this is not intentional, try updating your schema version.\n", + ) + }) }) } +func assertProjectName(t *testing.T, expected string, projectDescriptor types.Descriptor) { + if projectDescriptor.Project.Name != expected { + t.Fatalf("Expected\n-----\n%#v\n-----\nbut got\n-----\n%#v\n", + expected, projectDescriptor.Project.Name) + } +} + +func assertSchemaVersion(t *testing.T, expected *api.Version, projectDescriptor types.Descriptor) { + if !reflect.DeepEqual(expected, projectDescriptor.SchemaVersion) { + t.Fatalf("Expected\n-----\n%#v\n-----\nbut got\n-----\n%#v\n", + expected, projectDescriptor.SchemaVersion) + } +} + +func assertBuildPackGroupExecEnv(t *testing.T, expected string, bpLength int, execEnvLength int, atIndex int, projectDescriptor types.Descriptor) { + h.AssertTrue(t, len(projectDescriptor.Build.Buildpacks) == bpLength) + h.AssertTrue(t, len(projectDescriptor.Build.Buildpacks[atIndex].ExecEnv) == execEnvLength) + if !reflect.DeepEqual(expected, projectDescriptor.Build.Buildpacks[atIndex].ExecEnv[atIndex]) { + t.Fatalf("Expected\n-----\n%#v\n-----\nbut got\n-----\n%#v\n", + expected, projectDescriptor.Build.Buildpacks[atIndex].ExecEnv[atIndex]) + } +} + +func assertBuildPackPreGroupExecEnv(t *testing.T, expected string, bpLength int, execEnvLength int, atIndex int, projectDescriptor types.Descriptor) { + h.AssertTrue(t, len(projectDescriptor.Build.Pre.Buildpacks) == bpLength) + h.AssertTrue(t, len(projectDescriptor.Build.Pre.Buildpacks[atIndex].ExecEnv) == execEnvLength) + if !reflect.DeepEqual(expected, projectDescriptor.Build.Pre.Buildpacks[atIndex].ExecEnv[atIndex]) { + t.Fatalf("Expected\n-----\n%#v\n-----\nbut got\n-----\n%#v\n", + expected, projectDescriptor.Build.Pre.Buildpacks[atIndex].ExecEnv[atIndex]) + } +} + +func assertBuildPackPostGroupExecEnv(t *testing.T, expected string, bpLength int, execEnvLength int, atIndex int, projectDescriptor types.Descriptor) { + h.AssertTrue(t, len(projectDescriptor.Build.Post.Buildpacks) == bpLength) + h.AssertTrue(t, len(projectDescriptor.Build.Post.Buildpacks[atIndex].ExecEnv) == execEnvLength) + if !reflect.DeepEqual(expected, projectDescriptor.Build.Post.Buildpacks[atIndex].ExecEnv[atIndex]) { + t.Fatalf("Expected\n-----\n%#v\n-----\nbut got\n-----\n%#v\n", + expected, projectDescriptor.Build.Post.Buildpacks[atIndex].ExecEnv[atIndex]) + } +} + +func assertBuildPackBuildExecEnv(t *testing.T, expected string, bpLength int, execEnvLength int, atIndex int, projectDescriptor types.Descriptor) { + h.AssertTrue(t, len(projectDescriptor.Build.Env) == bpLength) + h.AssertTrue(t, len(projectDescriptor.Build.Env[atIndex].ExecEnv) == execEnvLength) + if !reflect.DeepEqual(expected, projectDescriptor.Build.Env[atIndex].ExecEnv[atIndex]) { + t.Fatalf("Expected\n-----\n%#v\n-----\nbut got\n-----\n%#v\n", + expected, projectDescriptor.Build.Env[atIndex].ExecEnv[atIndex]) + } +} + func createTmpProjectTomlFile(projectToml string) (*os.File, error) { - tmpProjectToml, err := ioutil.TempFile(os.TempDir(), "project-") + tmpProjectToml, err := os.CreateTemp(os.TempDir(), "project-") if err != nil { log.Fatal("Failed to create temporary project toml file", err) } diff --git a/pkg/project/types/types.go b/pkg/project/types/types.go index 575adcc83f..a238e58c9c 100644 --- a/pkg/project/types/types.go +++ b/pkg/project/types/types.go @@ -11,15 +11,17 @@ type Script struct { } type Buildpack struct { - ID string `toml:"id"` - Version string `toml:"version"` - URI string `toml:"uri"` - Script Script `toml:"script"` + ID string `toml:"id"` + Version string `toml:"version"` + URI string `toml:"uri"` + Script Script `toml:"script"` + ExecEnv []string `toml:"exec-env"` } type EnvVar struct { - Name string `toml:"name"` - Value string `toml:"value"` + Name string `toml:"name"` + Value string `toml:"value"` + ExecEnv []string `toml:"exec-env"` } type Build struct { @@ -28,13 +30,18 @@ type Build struct { Buildpacks []Buildpack `toml:"buildpacks"` Env []EnvVar `toml:"env"` Builder string `toml:"builder"` + Pre GroupAddition + Post GroupAddition } type Project struct { - Name string `toml:"name"` - Version string `toml:"version"` - SourceURL string `toml:"source-url"` - Licenses []License `toml:"licenses"` + ID string `toml:"id"` + Name string `toml:"name"` + Version string `toml:"version"` + Authors []string `toml:"authors"` + DocumentationURL string `toml:"documentation-url"` + SourceURL string `toml:"source-url"` + Licenses []License `toml:"licenses"` } type License struct { @@ -48,3 +55,7 @@ type Descriptor struct { Metadata map[string]interface{} `toml:"metadata"` SchemaVersion *api.Version } + +type GroupAddition struct { + Buildpacks []Buildpack `toml:"group"` +} diff --git a/pkg/project/v01/project.go b/pkg/project/v01/project.go index 6d7919b291..f6f2052e3d 100644 --- a/pkg/project/v01/project.go +++ b/pkg/project/v01/project.go @@ -13,12 +13,12 @@ type Descriptor struct { Metadata map[string]interface{} `toml:"metadata"` } -func NewDescriptor(projectTomlContents string) (types.Descriptor, error) { +func NewDescriptor(projectTomlContents string) (types.Descriptor, toml.MetaData, error) { versionedDescriptor := &Descriptor{} - _, err := toml.Decode(projectTomlContents, versionedDescriptor) + tomlMetaData, err := toml.Decode(projectTomlContents, versionedDescriptor) if err != nil { - return types.Descriptor{}, err + return types.Descriptor{}, tomlMetaData, err } return types.Descriptor{ @@ -26,5 +26,5 @@ func NewDescriptor(projectTomlContents string) (types.Descriptor, error) { Build: versionedDescriptor.Build, Metadata: versionedDescriptor.Metadata, SchemaVersion: api.MustParse("0.1"), - }, nil + }, tomlMetaData, nil } diff --git a/pkg/project/v02/metadata.go b/pkg/project/v02/metadata.go new file mode 100644 index 0000000000..792ffb1a74 --- /dev/null +++ b/pkg/project/v02/metadata.go @@ -0,0 +1,177 @@ +package v02 + +import ( + "fmt" + "sort" + "strings" + "time" + + "github.com/buildpacks/lifecycle/platform/files" + "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/plumbing" +) + +type TagInfo struct { + Name string + Message string + Type string + TagHash string + TagTime time.Time +} + +func GitMetadata(appPath string) *files.ProjectSource { + repo, err := git.PlainOpen(appPath) + if err != nil { + return nil + } + headRef, err := repo.Head() + if err != nil { + return nil + } + commitTagMap := generateTagsMap(repo) + + describe := parseGitDescribe(repo, headRef, commitTagMap) + refs := parseGitRefs(repo, headRef, commitTagMap) + remote := parseGitRemote(repo) + + projectSource := &files.ProjectSource{ + Type: "git", + Version: map[string]interface{}{ + "commit": headRef.Hash().String(), + "describe": describe, + }, + Metadata: map[string]interface{}{ + "refs": refs, + "url": remote, + }, + } + return projectSource +} + +func generateTagsMap(repo *git.Repository) map[string][]TagInfo { + commitTagMap := make(map[string][]TagInfo) + tags, err := repo.Tags() + if err != nil { + return commitTagMap + } + + tags.ForEach(func(ref *plumbing.Reference) error { + tagObj, err := repo.TagObject(ref.Hash()) + switch err { + case nil: + commitTagMap[tagObj.Target.String()] = append( + commitTagMap[tagObj.Target.String()], + TagInfo{Name: tagObj.Name, Message: tagObj.Message, Type: "annotated", TagHash: ref.Hash().String(), TagTime: tagObj.Tagger.When}, + ) + case plumbing.ErrObjectNotFound: + commitTagMap[ref.Hash().String()] = append( + commitTagMap[ref.Hash().String()], + TagInfo{Name: getRefName(ref.Name().String()), Message: "", Type: "unannotated", TagHash: ref.Hash().String(), TagTime: time.Now()}, + ) + default: + return err + } + return nil + }) + + for _, tagRefs := range commitTagMap { + sort.Slice(tagRefs, func(i, j int) bool { + if tagRefs[i].Type == "annotated" && tagRefs[j].Type == "annotated" { + return tagRefs[i].TagTime.After(tagRefs[j].TagTime) + } + if tagRefs[i].Type == "unannotated" && tagRefs[j].Type == "unannotated" { + return tagRefs[i].Name < tagRefs[j].Name + } + if tagRefs[i].Type == "annotated" && tagRefs[j].Type == "unannotated" { + return true + } + return false + }) + } + return commitTagMap +} + +func generateBranchMap(repo *git.Repository) map[string][]string { + commitBranchMap := make(map[string][]string) + branches, err := repo.Branches() + if err != nil { + return commitBranchMap + } + branches.ForEach(func(ref *plumbing.Reference) error { + commitBranchMap[ref.Hash().String()] = append(commitBranchMap[ref.Hash().String()], getRefName(ref.Name().String())) + return nil + }) + return commitBranchMap +} + +// `git describe --tags --always` +func parseGitDescribe(repo *git.Repository, headRef *plumbing.Reference, commitTagMap map[string][]TagInfo) string { + logOpts := &git.LogOptions{ + From: headRef.Hash(), + Order: git.LogOrderCommitterTime, + } + commits, err := repo.Log(logOpts) + if err != nil { + return "" + } + + latestTag := headRef.Hash().String() + commitsFromHEAD := 0 + commitBranchMap := generateBranchMap(repo) + branchAtHEAD := getRefName(headRef.String()) + currentBranch := branchAtHEAD + for { + commitInfo, err := commits.Next() + if err != nil { + break + } + + if branchesAtCommit, exists := commitBranchMap[commitInfo.Hash.String()]; exists { + currentBranch = branchesAtCommit[0] + } + if refs, exists := commitTagMap[commitInfo.Hash.String()]; exists { + if branchAtHEAD != currentBranch && commitsFromHEAD != 0 { + // https://git-scm.com/docs/git-describe#_examples + latestTag = fmt.Sprintf("%s-%d-g%s", refs[0].Name, commitsFromHEAD, headRef.Hash().String()) + } else { + latestTag = refs[0].Name + } + break + } + commitsFromHEAD += 1 + } + return latestTag +} + +func parseGitRefs(repo *git.Repository, headRef *plumbing.Reference, commitTagMap map[string][]TagInfo) []string { + var parsedRefs []string + parsedRefs = append(parsedRefs, getRefName(headRef.Name().String())) + if refs, exists := commitTagMap[headRef.Hash().String()]; exists { + for _, ref := range refs { + parsedRefs = append(parsedRefs, ref.Name) + } + } + return parsedRefs +} + +func parseGitRemote(repo *git.Repository) string { + remotes, err := repo.Remotes() + if err != nil || len(remotes) == 0 { + return "" + } + + for _, remote := range remotes { + if remote.Config().Name == "origin" { + return remote.Config().URLs[0] + } + } + return remotes[0].Config().URLs[0] +} + +// Parse ref name from refs/tags/ +func getRefName(ref string) string { + if refSplit := strings.SplitN(ref, "/", 3); len(refSplit) == 3 { + return refSplit[2] + } + return "" +} diff --git a/pkg/project/v02/metadata_test.go b/pkg/project/v02/metadata_test.go new file mode 100644 index 0000000000..edfbdb5cac --- /dev/null +++ b/pkg/project/v02/metadata_test.go @@ -0,0 +1,659 @@ +package v02 + +import ( + "fmt" + "math/rand" + "os" + "path/filepath" + "sort" + "testing" + "time" + + "github.com/buildpacks/lifecycle/platform/files" + "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/config" + "github.com/go-git/go-git/v5/plumbing" + "github.com/go-git/go-git/v5/plumbing/object" + "github.com/heroku/color" + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + h "github.com/buildpacks/pack/testhelpers" +) + +func TestMetadata(t *testing.T) { + color.Disable(true) + defer color.Disable(false) + spec.Run(t, "Metadata", testMetadata, spec.Sequential(), spec.Report(report.Terminal{})) +} + +func testMetadata(t *testing.T, when spec.G, it spec.S) { + var ( + repoPath string + repo *git.Repository + commits []plumbing.Hash + ) + + it.Before(func() { + var err error + + repoPath, err = os.MkdirTemp("", "test-repo") + h.AssertNil(t, err) + + repo, err = git.PlainInit(repoPath, false) + h.AssertNil(t, err) + + commits = createCommits(t, repo, repoPath, 5) + }) + + it.After(func() { + h.AssertNil(t, os.RemoveAll(repoPath)) + }) + + when("#GitMetadata", func() { + it("returns proper metadata format", func() { + assert := h.NewAssertionManager(t) + remoteOpts := &config.RemoteConfig{ + Name: "origin", + URLs: []string{"git@github.com:testorg/testproj.git", "git@github.com:testorg/testproj.git"}, + } + repo.CreateRemote(remoteOpts) + createUnannotatedTag(t, repo, commits[len(commits)-1], "testTag") + + output := GitMetadata(repoPath) + expectedOutput := &files.ProjectSource{ + Type: "git", + Version: map[string]interface{}{ + "commit": commits[len(commits)-1].String(), + "describe": "testTag", + }, + Metadata: map[string]interface{}{ + "refs": []string{"master", "testTag"}, + "url": "git@github.com:testorg/testproj.git", + }, + } + assert.Equal(output, expectedOutput) + }) + + it("returns nil if error occurs while fetching metadata", func() { + output := GitMetadata("/git-path-not-found-ok") + h.AssertNil(t, output) + }) + }) + + when("#generateTagsMap", func() { + when("repository has no tags", func() { + it("returns empty map", func() { + commitTagsMap := generateTagsMap(repo) + h.AssertEq(t, len(commitTagsMap), 0) + }) + }) + + when("repository has only unannotated tags", func() { + it("returns correct map if commits only have one tag", func() { + for i := 0; i < 4; i++ { + createUnannotatedTag(t, repo, commits[i], "") + } + + commitTagsMap := generateTagsMap(repo) + h.AssertEq(t, len(commitTagsMap), 4) + for i := 0; i < 4; i++ { + tagsInfo, shouldExist := commitTagsMap[commits[i].String()] + h.AssertEq(t, shouldExist, true) + h.AssertNotEq(t, tagsInfo[0].Name, "") + h.AssertEq(t, tagsInfo[0].Type, "unannotated") + h.AssertEq(t, tagsInfo[0].Message, "") + } + _, shouldNotExist := commitTagsMap[commits[3].String()] + h.AssertEq(t, shouldNotExist, true) + }) + + it("returns map sorted by ascending tag name if commits have multiple tags", func() { + for i := 0; i < 4; i++ { + for j := 0; j <= rand.Intn(10); j++ { + createUnannotatedTag(t, repo, commits[i], "") + } + } + + commitTagsMap := generateTagsMap(repo) + h.AssertEq(t, len(commitTagsMap), 4) + for i := 0; i < 4; i++ { + tagsInfo, shouldExist := commitTagsMap[commits[i].String()] + h.AssertEq(t, shouldExist, true) + + tagsSortedByName := sort.SliceIsSorted(tagsInfo, func(i, j int) bool { + return tagsInfo[i].Name < tagsInfo[j].Name + }) + h.AssertEq(t, tagsSortedByName, true) + } + }) + }) + + when("repository has only annotated tags", func() { + it("returns correct map if commits only have one tag", func() { + for i := 0; i < 4; i++ { + createAnnotatedTag(t, repo, commits[i], "") + } + + commitTagsMap := generateTagsMap(repo) + h.AssertEq(t, len(commitTagsMap), 4) + for i := 0; i < 4; i++ { + tagsInfo, shouldExist := commitTagsMap[commits[i].String()] + h.AssertEq(t, shouldExist, true) + h.AssertNotEq(t, tagsInfo[0].Name, "") + h.AssertEq(t, tagsInfo[0].Type, "annotated") + h.AssertNotEq(t, tagsInfo[0].Message, "") + } + _, shouldNotExist := commitTagsMap[commits[3].String()] + h.AssertEq(t, shouldNotExist, true) + }) + + it("returns map sorted by descending tag creation time if commits have multiple tags", func() { + for i := 0; i < 4; i++ { + for j := 0; j <= rand.Intn(10); j++ { + createAnnotatedTag(t, repo, commits[i], "") + } + } + + commitTagsMap := generateTagsMap(repo) + h.AssertEq(t, len(commitTagsMap), 4) + for i := 0; i < 4; i++ { + tagsInfo, shouldExist := commitTagsMap[commits[i].String()] + h.AssertEq(t, shouldExist, true) + + tagsSortedByTime := sort.SliceIsSorted(tagsInfo, func(i, j int) bool { + return tagsInfo[i].TagTime.After(tagsInfo[j].TagTime) + }) + h.AssertEq(t, tagsSortedByTime, true) + } + _, shouldNotExist := commitTagsMap[commits[3].String()] + h.AssertEq(t, shouldNotExist, true) + }) + }) + + when("repository has both annotated and unannotated tags", func() { + it("returns map where annotated tags exist prior to unnanotated if commits have multiple tags", func() { + for i := 0; i < 4; i++ { + for j := 0; j <= rand.Intn(10); j++ { + createAnnotatedTag(t, repo, commits[i], "") + } + for j := 0; j <= rand.Intn(10); j++ { + createUnannotatedTag(t, repo, commits[i], "") + } + } + + commitTagsMap := generateTagsMap(repo) + h.AssertEq(t, len(commitTagsMap), 4) + for i := 0; i < 4; i++ { + tagsInfo, shouldExist := commitTagsMap[commits[i].String()] + h.AssertEq(t, shouldExist, true) + + tagsSortedByType := sort.SliceIsSorted(tagsInfo, func(i, j int) bool { + if tagsInfo[i].Type == "annotated" && tagsInfo[j].Type == "unannotated" { + return true + } + return false + }) + h.AssertEq(t, tagsSortedByType, true) + } + }) + }) + }) + + when("#generateBranchMap", func() { + it("returns map with latest commit of the `master` branch", func() { + branchMap := generateBranchMap(repo) + h.AssertEq(t, branchMap[commits[len(commits)-1].String()][0], "master") + }) + + it("returns map with latest commit all the branches", func() { + checkoutBranch(t, repo, "newbranch-1", true) + newBranchCommits := createCommits(t, repo, repoPath, 3) + checkoutBranch(t, repo, "master", false) + checkoutBranch(t, repo, "newbranch-2", true) + + branchMap := generateBranchMap(repo) + h.AssertEq(t, branchMap[commits[len(commits)-1].String()][0], "master") + h.AssertEq(t, branchMap[commits[len(commits)-1].String()][1], "newbranch-2") + h.AssertEq(t, branchMap[newBranchCommits[len(newBranchCommits)-1].String()][0], "newbranch-1") + }) + }) + + when("#parseGitDescribe", func() { + when("all tags are defined in a single branch", func() { + when("repository has no tags", func() { + it("returns latest commit hash", func() { + commitTagsMap := generateTagsMap(repo) + headRef, err := repo.Head() + h.AssertNil(t, err) + + output := parseGitDescribe(repo, headRef, commitTagsMap) + h.AssertEq(t, output, commits[len(commits)-1].String()) + }) + }) + + when("repository has only unannotated tags", func() { + it("returns first tag encountered from HEAD", func() { + for i := 0; i < 3; i++ { + tagName := fmt.Sprintf("v0.%d-lw", i+1) + createUnannotatedTag(t, repo, commits[i], tagName) + } + + commitTagsMap := generateTagsMap(repo) + headRef, err := repo.Head() + h.AssertNil(t, err) + output := parseGitDescribe(repo, headRef, commitTagsMap) + h.AssertEq(t, output, "v0.3-lw") + }) + + it("returns proper tag name for tags containing `/`", func() { + tagName := "v0.1/testing" + t.Logf("Checking output for tag name: %s", tagName) + createUnannotatedTag(t, repo, commits[0], tagName) + + commitTagsMap := generateTagsMap(repo) + headRef, err := repo.Head() + h.AssertNil(t, err) + output := parseGitDescribe(repo, headRef, commitTagsMap) + h.AssertContains(t, output, "v0.1/testing") + }) + }) + + when("repository has only annotated tags", func() { + it("returns first tag encountered from HEAD", func() { + for i := 0; i < 3; i++ { + tagName := fmt.Sprintf("v0.%d", i+1) + createAnnotatedTag(t, repo, commits[i], tagName) + } + + commitTagsMap := generateTagsMap(repo) + headRef, err := repo.Head() + h.AssertNil(t, err) + output := parseGitDescribe(repo, headRef, commitTagsMap) + h.AssertEq(t, output, "v0.3") + }) + }) + + when("repository has both annotated and unannotated tags", func() { + when("each commit has only one tag", func() { + it("returns the first tag encountered from HEAD if unannotated tag comes first", func() { + createAnnotatedTag(t, repo, commits[0], "ann-tag-at-commit-0") + createUnannotatedTag(t, repo, commits[1], "unann-tag-at-commit-1") + createAnnotatedTag(t, repo, commits[2], "ann-tag-at-commit-2") + createUnannotatedTag(t, repo, commits[3], "unann-tag-at-commit-3") + createUnannotatedTag(t, repo, commits[4], "unann-tag-at-commit-4") + + commitTagsMap := generateTagsMap(repo) + headRef, err := repo.Head() + h.AssertNil(t, err) + output := parseGitDescribe(repo, headRef, commitTagsMap) + h.AssertEq(t, output, "unann-tag-at-commit-4") + }) + + it("returns the first tag encountered from HEAD if annotated tag comes first", func() { + createAnnotatedTag(t, repo, commits[0], "ann-tag-at-commit-0") + createUnannotatedTag(t, repo, commits[1], "unann-tag-at-commit-1") + createAnnotatedTag(t, repo, commits[2], "ann-tag-at-commit-2") + createAnnotatedTag(t, repo, commits[3], "ann-tag-at-commit-3") + + commitTagsMap := generateTagsMap(repo) + headRef, err := repo.Head() + h.AssertNil(t, err) + output := parseGitDescribe(repo, headRef, commitTagsMap) + h.AssertEq(t, output, "ann-tag-at-commit-3") + }) + + it("returns the tag at HEAD if annotated tag exists at HEAD", func() { + createAnnotatedTag(t, repo, commits[4], "ann-tag-at-HEAD") + + commitTagsMap := generateTagsMap(repo) + headRef, err := repo.Head() + h.AssertNil(t, err) + output := parseGitDescribe(repo, headRef, commitTagsMap) + h.AssertEq(t, output, "ann-tag-at-HEAD") + }) + + it("returns the tag at HEAD if unannotated tag exists at HEAD", func() { + createUnannotatedTag(t, repo, commits[4], "unann-tag-at-HEAD") + + commitTagsMap := generateTagsMap(repo) + headRef, err := repo.Head() + h.AssertNil(t, err) + output := parseGitDescribe(repo, headRef, commitTagsMap) + h.AssertEq(t, output, "unann-tag-at-HEAD") + }) + }) + + when("commits have multiple tags", func() { + it("returns most recently created tag if a commit has multiple annotated tags", func() { + createAnnotatedTag(t, repo, commits[1], "ann-tag-1-at-commit-1") + createAnnotatedTag(t, repo, commits[2], "ann-tag-1-at-commit-2") + createAnnotatedTag(t, repo, commits[2], "ann-tag-2-at-commit-2") + createAnnotatedTag(t, repo, commits[2], "ann-tag-3-at-commit-2") + + commitTagsMap := generateTagsMap(repo) + headRef, err := repo.Head() + h.AssertNil(t, err) + + output := parseGitDescribe(repo, headRef, commitTagsMap) + tagsAtCommit := commitTagsMap[commits[2].String()] + h.AssertEq(t, output, tagsAtCommit[0].Name) + for i := 1; i < len(tagsAtCommit); i++ { + h.AssertEq(t, tagsAtCommit[i].TagTime.Before(tagsAtCommit[0].TagTime), true) + } + }) + + it("returns the tag name that comes first when sorted alphabetically if a commit has multiple unannotated tags", func() { + createUnannotatedTag(t, repo, commits[1], "ann-tag-1-at-commit-1") + createUnannotatedTag(t, repo, commits[2], "v0.000002-lw") + createUnannotatedTag(t, repo, commits[2], "v0.0002-lw") + createUnannotatedTag(t, repo, commits[2], "v1.0002-lw") + + commitTagsMap := generateTagsMap(repo) + headRef, err := repo.Head() + h.AssertNil(t, err) + + output := parseGitDescribe(repo, headRef, commitTagsMap) + h.AssertEq(t, output, "v0.000002-lw") + }) + + it("returns annotated tag is a commit has both annotated and unannotated tags", func() { + createAnnotatedTag(t, repo, commits[1], "ann-tag-1-at-commit-1") + createAnnotatedTag(t, repo, commits[2], "ann-tag-1-at-commit-2") + createUnannotatedTag(t, repo, commits[2], "unann-tag-1-at-commit-2") + + commitTagsMap := generateTagsMap(repo) + headRef, err := repo.Head() + h.AssertNil(t, err) + + output := parseGitDescribe(repo, headRef, commitTagsMap) + h.AssertEq(t, output, "ann-tag-1-at-commit-2") + }) + }) + }) + }) + + when("tags are defined in multiple branches", func() { + when("tag is defined in the latest commit of `master` branch and HEAD is at a different branch", func() { + it("returns the tag if HEAD, master and different branch is at tags", func() { + checkoutBranch(t, repo, "new-branch", true) + createAnnotatedTag(t, repo, commits[len(commits)-1], "ann-tag-at-HEAD") + + headRef, err := repo.Head() + h.AssertNil(t, err) + commitTagsMap := generateTagsMap(repo) + output := parseGitDescribe(repo, headRef, commitTagsMap) + h.AssertEq(t, output, "ann-tag-at-HEAD") + }) + + when("branch is multiple commits ahead of master", func() { + it("returns git generated version of annotated tag if branch is 2 commits ahead of `master`", func() { + createAnnotatedTag(t, repo, commits[len(commits)-1], "testTag") + checkoutBranch(t, repo, "new-branch", true) + newCommits := createCommits(t, repo, repoPath, 2) + + headRef, err := repo.Head() + h.AssertNil(t, err) + commitTagsMap := generateTagsMap(repo) + output := parseGitDescribe(repo, headRef, commitTagsMap) + expectedOutput := fmt.Sprintf("testTag-2-g%s", newCommits[len(newCommits)-1].String()) + h.AssertEq(t, output, expectedOutput) + }) + + it("returns git generated version of unannotated tag if branch is 5 commits ahead of `master`", func() { + createUnannotatedTag(t, repo, commits[len(commits)-1], "testTag") + checkoutBranch(t, repo, "new-branch", true) + newCommits := createCommits(t, repo, repoPath, 5) + + headRef, err := repo.Head() + h.AssertNil(t, err) + commitTagsMap := generateTagsMap(repo) + output := parseGitDescribe(repo, headRef, commitTagsMap) + expectedOutput := fmt.Sprintf("testTag-5-g%s", newCommits[len(newCommits)-1].String()) + h.AssertEq(t, output, expectedOutput) + }) + + it("returns the commit hash if only the diverged tree of `master` branch has a tag", func() { + checkoutBranch(t, repo, "new-branch", true) + checkoutBranch(t, repo, "master", false) + newCommits := createCommits(t, repo, repoPath, 3) + createUnannotatedTag(t, repo, newCommits[len(newCommits)-1], "testTagAtMaster") + checkoutBranch(t, repo, "new-branch", false) + + headRef, err := repo.Head() + h.AssertNil(t, err) + commitTagsMap := generateTagsMap(repo) + output := parseGitDescribe(repo, headRef, commitTagsMap) + expectedOutput := commits[len(commits)-1].String() + h.AssertEq(t, output, expectedOutput) + }) + }) + }) + }) + }) + + when("#parseGitRefs", func() { + when("HEAD is not at a tag", func() { + it("returns branch name if checked out branch is `master`", func() { + commitTagsMap := generateTagsMap(repo) + headRef, err := repo.Head() + h.AssertNil(t, err) + output := parseGitRefs(repo, headRef, commitTagsMap) + expectedOutput := []string{"master"} + h.AssertEq(t, output, expectedOutput) + }) + + it("returns branch name if checked out branch is not `master`", func() { + checkoutBranch(t, repo, "tests/05-05/test-branch", true) + createCommits(t, repo, repoPath, 1) + + commitTagsMap := generateTagsMap(repo) + headRef, err := repo.Head() + h.AssertNil(t, err) + output := parseGitRefs(repo, headRef, commitTagsMap) + expectedOutput := []string{"tests/05-05/test-branch"} + h.AssertEq(t, output, expectedOutput) + }) + }) + + when("HEAD is at a commit with single tag", func() { + it("returns annotated tag and branch name", func() { + createAnnotatedTag(t, repo, commits[len(commits)-1], "test-tag") + commitTagsMap := generateTagsMap(repo) + headRef, err := repo.Head() + h.AssertNil(t, err) + output := parseGitRefs(repo, headRef, commitTagsMap) + expectedOutput := []string{"master", "test-tag"} + h.AssertEq(t, output, expectedOutput) + }) + + it("returns unannotated tag and branch name", func() { + createUnannotatedTag(t, repo, commits[len(commits)-1], "test-tag") + commitTagsMap := generateTagsMap(repo) + headRef, err := repo.Head() + h.AssertNil(t, err) + output := parseGitRefs(repo, headRef, commitTagsMap) + expectedOutput := []string{"master", "test-tag"} + h.AssertEq(t, output, expectedOutput) + }) + }) + + when("HEAD is at a commit with multiple tags", func() { + it("returns correct tag names if all tags are unannotated", func() { + createUnannotatedTag(t, repo, commits[len(commits)-2], "v0.01-testtag-lw") + createUnannotatedTag(t, repo, commits[len(commits)-1], "v0.02-testtag-lw-1") + createUnannotatedTag(t, repo, commits[len(commits)-1], "v0.02-testtag-lw-2") + commitTagsMap := generateTagsMap(repo) + headRef, err := repo.Head() + h.AssertNil(t, err) + output := parseGitRefs(repo, headRef, commitTagsMap) + expectedOutput := []string{"master", "v0.02-testtag-lw-1", "v0.02-testtag-lw-2"} + h.AssertEq(t, output, expectedOutput) + }) + + it("returns correct tag names if all tags are annotated", func() { + createAnnotatedTag(t, repo, commits[len(commits)-2], "v0.01-testtag") + createAnnotatedTag(t, repo, commits[len(commits)-1], "v0.02-testtag") + createAnnotatedTag(t, repo, commits[len(commits)-1], "v0.03-testtag") + commitTagsMap := generateTagsMap(repo) + headRef, err := repo.Head() + h.AssertNil(t, err) + output := parseGitRefs(repo, headRef, commitTagsMap) + expectedOutput := []string{"master", "v0.02-testtag", "v0.03-testtag"} + sort.Strings(output) + sort.Strings(expectedOutput) + h.AssertEq(t, output, expectedOutput) + }) + + it("returns correct tag names for both tag types", func() { + createUnannotatedTag(t, repo, commits[len(commits)-3], "v0.001-testtag-lw") + createAnnotatedTag(t, repo, commits[len(commits)-2], "v0.01-testtag") + createUnannotatedTag(t, repo, commits[len(commits)-1], "v0.02-testtag-lw-1") + createUnannotatedTag(t, repo, commits[len(commits)-1], "v0.02-testtag-lw-2") + createAnnotatedTag(t, repo, commits[len(commits)-1], "v0.02-testtag-1") + + commitTagsMap := generateTagsMap(repo) + headRef, err := repo.Head() + h.AssertNil(t, err) + output := parseGitRefs(repo, headRef, commitTagsMap) + expectedOutput := []string{"master", "v0.02-testtag-1", "v0.02-testtag-lw-1", "v0.02-testtag-lw-2"} + h.AssertEq(t, output, expectedOutput) + }) + + it("returns correct tag names for both tag types when branch is not `master`", func() { + checkoutBranch(t, repo, "test-branch", true) + createUnannotatedTag(t, repo, commits[len(commits)-3], "v0.001-testtag-lw") + createAnnotatedTag(t, repo, commits[len(commits)-2], "v0.01-testtag") + createUnannotatedTag(t, repo, commits[len(commits)-1], "v0.02-testtag-lw-1") + createUnannotatedTag(t, repo, commits[len(commits)-1], "v0.02-testtag-lw-2") + createAnnotatedTag(t, repo, commits[len(commits)-1], "v0.02-testtag-1") + createAnnotatedTag(t, repo, commits[len(commits)-1], "v0.02-testtag-2") + + commitTagsMap := generateTagsMap(repo) + headRef, err := repo.Head() + h.AssertNil(t, err) + output := parseGitRefs(repo, headRef, commitTagsMap) + expectedOutput := []string{"test-branch", "v0.02-testtag-1", "v0.02-testtag-2", "v0.02-testtag-lw-1", "v0.02-testtag-lw-2"} + sort.Strings(output) + sort.Strings(expectedOutput) + h.AssertEq(t, output, expectedOutput) + }) + }) + }) + + when("#parseGitRemote", func() { + it("returns fetch url if remote `origin` exists", func() { + remoteOpts := &config.RemoteConfig{ + Name: "origin", + URLs: []string{"git@github.com:testorg/testproj.git", "git@github.com:testorg/testproj.git"}, + } + repo.CreateRemote(remoteOpts) + + output := parseGitRemote(repo) + h.AssertEq(t, output, "git@github.com:testorg/testproj.git") + }) + + it("returns empty string if no remote exists", func() { + output := parseGitRemote(repo) + h.AssertEq(t, output, "") + }) + + it("returns fetch url if fetch and push URLs are different", func() { + remoteOpts := &config.RemoteConfig{ + Name: "origin", + URLs: []string{"git@fetch.com:testorg/testproj.git", "git@pushing-p-github.com:testorg/testproj.git"}, + } + repo.CreateRemote(remoteOpts) + + output := parseGitRemote(repo) + h.AssertEq(t, output, "git@fetch.com:testorg/testproj.git") + }) + }) + + when("#getRefName", func() { + it("return proper ref for refs with `/`", func() { + output := getRefName("refs/tags/this/is/a/tag/with/slashes") + h.AssertEq(t, output, "this/is/a/tag/with/slashes") + }) + }) +} + +func createCommits(t *testing.T, repo *git.Repository, repoPath string, numberOfCommits int) []plumbing.Hash { + worktree, err := repo.Worktree() + h.AssertNil(t, err) + + var commitHashes []plumbing.Hash + for i := 0; i < numberOfCommits; i++ { + file, err := os.CreateTemp(repoPath, h.RandString(10)) + h.AssertNil(t, err) + defer file.Close() + + _, err = worktree.Add(filepath.Base(file.Name())) + h.AssertNil(t, err) + + commitMsg := fmt.Sprintf("%s %d", "test commit number", i) + commitOpts := git.CommitOptions{ + All: true, + Author: &object.Signature{ + Name: "Test Author", + Email: "testauthor@test.com", + When: time.Now(), + }, + Committer: &object.Signature{ + Name: "Test Committer", + Email: "testcommitter@test.com", + When: time.Now(), + }, + } + commitHash, err := worktree.Commit(commitMsg, &commitOpts) + h.AssertNil(t, err) + commitHashes = append(commitHashes, commitHash) + } + return commitHashes +} + +func createUnannotatedTag(t *testing.T, repo *git.Repository, commitHash plumbing.Hash, tagName string) { + if tagName == "" { + version := rand.Float32()*10 + float32(rand.Intn(20)) + tagName = fmt.Sprintf("v%f-lw", version) + } + _, err := repo.CreateTag(tagName, commitHash, nil) + h.AssertNil(t, err) +} + +func createAnnotatedTag(t *testing.T, repo *git.Repository, commitHash plumbing.Hash, tagName string) { + if tagName == "" { + version := rand.Float32()*10 + float32(rand.Intn(20)) + tagName = fmt.Sprintf("v%f-%s", version, h.RandString(5)) + } + tagMessage := fmt.Sprintf("This is an annotated tag for version - %s", tagName) + tagOpts := &git.CreateTagOptions{ + Message: tagMessage, + Tagger: &object.Signature{ + Name: "Test Tagger", + Email: "testtagger@test.com", + When: time.Now().Add(time.Hour*time.Duration(rand.Intn(100)) + time.Minute*time.Duration(rand.Intn(100))), + }, + } + _, err := repo.CreateTag(tagName, commitHash, tagOpts) + h.AssertNil(t, err) +} + +func checkoutBranch(t *testing.T, repo *git.Repository, branchName string, newBranch bool) { + worktree, err := repo.Worktree() + h.AssertNil(t, err) + + var fullBranchName string + if branchName == "" { + fullBranchName = "refs/heads/" + h.RandString(10) + } else { + fullBranchName = "refs/heads/" + branchName + } + + checkoutOpts := &git.CheckoutOptions{ + Branch: plumbing.ReferenceName(fullBranchName), + Create: newBranch, + } + err = worktree.Checkout(checkoutOpts) + h.AssertNil(t, err) +} diff --git a/pkg/project/v02/project.go b/pkg/project/v02/project.go index d4969001f1..c965087e84 100644 --- a/pkg/project/v02/project.go +++ b/pkg/project/v02/project.go @@ -8,22 +8,35 @@ import ( ) type Buildpacks struct { - Include []string `toml:"include"` - Exclude []string `toml:"exclude"` - Group []types.Buildpack `toml:"group"` - Env Env `toml:"env"` - Builder string `toml:"builder"` + Include []string `toml:"include"` + Exclude []string `toml:"exclude"` + Group []Buildpack `toml:"group"` + Env Env `toml:"env"` + Build Build `toml:"build"` + Builder string `toml:"builder"` + Pre GroupAddition `toml:"pre"` + Post GroupAddition `toml:"post"` } +type Build struct { + Env []EnvVar `toml:"env"` +} + +// Deprecated: use `[[io.buildpacks.build.env]]` instead. see https://github.com/buildpacks/pack/pull/1479 type Env struct { - Build []types.EnvVar `toml:"build"` + Build []EnvVar `toml:"build"` } type Project struct { - Name string `toml:"name"` - Licenses []types.License `toml:"licenses"` - Metadata map[string]interface{} `toml:"metadata"` - SchemaVersion string `toml:"schema-version"` + SchemaVersion string `toml:"schema-version"` + ID string `toml:"id"` + Name string `toml:"name"` + Version string `toml:"version"` + Authors []string `toml:"authors"` + Licenses []types.License `toml:"licenses"` + DocumentationURL string `toml:"documentation-url"` + SourceURL string `toml:"source-url"` + Metadata map[string]interface{} `toml:"metadata"` } type IO struct { @@ -35,11 +48,33 @@ type Descriptor struct { IO IO `toml:"io"` } -func NewDescriptor(projectTomlContents string) (types.Descriptor, error) { +type Buildpack struct { + ID string `toml:"id"` + Version string `toml:"version"` + URI string `toml:"uri"` + Script types.Script `toml:"script"` +} + +type EnvVar struct { + Name string `toml:"name"` + Value string `toml:"value"` +} + +type GroupAddition struct { + Buildpacks []Buildpack `toml:"group"` +} + +func NewDescriptor(projectTomlContents string) (types.Descriptor, toml.MetaData, error) { versionedDescriptor := &Descriptor{} - _, err := toml.Decode(projectTomlContents, &versionedDescriptor) + tomlMetaData, err := toml.Decode(projectTomlContents, &versionedDescriptor) if err != nil { - return types.Descriptor{}, err + return types.Descriptor{}, tomlMetaData, err + } + + // backward compatibility for incorrect key + env := versionedDescriptor.IO.Buildpacks.Build.Env + if env == nil { + env = versionedDescriptor.IO.Buildpacks.Env.Build } return types.Descriptor{ @@ -50,11 +85,51 @@ func NewDescriptor(projectTomlContents string) (types.Descriptor, error) { Build: types.Build{ Include: versionedDescriptor.IO.Buildpacks.Include, Exclude: versionedDescriptor.IO.Buildpacks.Exclude, - Buildpacks: versionedDescriptor.IO.Buildpacks.Group, - Env: versionedDescriptor.IO.Buildpacks.Env.Build, + Buildpacks: mapToBuildPacksDescriptor(versionedDescriptor.IO.Buildpacks.Group), + Env: mapToEnvVarsDescriptor(env), Builder: versionedDescriptor.IO.Buildpacks.Builder, + Pre: types.GroupAddition{ + Buildpacks: mapToBuildPacksDescriptor(versionedDescriptor.IO.Buildpacks.Pre.Buildpacks), + }, + Post: types.GroupAddition{ + Buildpacks: mapToBuildPacksDescriptor(versionedDescriptor.IO.Buildpacks.Post.Buildpacks), + }, }, Metadata: versionedDescriptor.Project.Metadata, SchemaVersion: api.MustParse("0.2"), - }, nil + }, tomlMetaData, nil +} + +func mapToBuildPacksDescriptor(v2BuildPacks []Buildpack) []types.Buildpack { + var buildPacks []types.Buildpack + for _, v2BuildPack := range v2BuildPacks { + buildPacks = append(buildPacks, mapToBuildPackDescriptor(v2BuildPack)) + } + return buildPacks +} + +func mapToBuildPackDescriptor(v2BuildPack Buildpack) types.Buildpack { + return types.Buildpack{ + ID: v2BuildPack.ID, + Version: v2BuildPack.Version, + URI: v2BuildPack.URI, + Script: v2BuildPack.Script, + ExecEnv: []string{}, // schema v2 doesn't handle execution environments variables + } +} + +func mapToEnvVarsDescriptor(v2EnvVars []EnvVar) []types.EnvVar { + var envVars []types.EnvVar + for _, v2EnvVar := range v2EnvVars { + envVars = append(envVars, mapToEnVarDescriptor(v2EnvVar)) + } + return envVars +} + +func mapToEnVarDescriptor(v2EnVar EnvVar) types.EnvVar { + return types.EnvVar{ + Name: v2EnVar.Name, + Value: v2EnVar.Value, + ExecEnv: []string{}, // schema v2 doesn't handle execution environments variables + } } diff --git a/pkg/project/v03/project.go b/pkg/project/v03/project.go new file mode 100644 index 0000000000..d01e6c354a --- /dev/null +++ b/pkg/project/v03/project.go @@ -0,0 +1,65 @@ +package v03 + +import ( + "github.com/BurntSushi/toml" + "github.com/buildpacks/lifecycle/api" + + "github.com/buildpacks/pack/pkg/project/types" +) + +type Buildpacks struct { + Include []string `toml:"include"` + Exclude []string `toml:"exclude"` + Group []types.Buildpack `toml:"group"` + Build types.Build `toml:"build"` + Builder string `toml:"builder"` + Pre types.GroupAddition `toml:"pre"` + Post types.GroupAddition `toml:"post"` +} + +type Project struct { + SchemaVersion string `toml:"schema-version"` + ID string `toml:"id"` + Name string `toml:"name"` + Version string `toml:"version"` + Authors []string `toml:"authors"` + Licenses []types.License `toml:"licenses"` + DocumentationURL string `toml:"documentation-url"` + SourceURL string `toml:"source-url"` + Metadata map[string]interface{} `toml:"metadata"` +} + +type IO struct { + Buildpacks Buildpacks `toml:"buildpacks"` +} + +type Descriptor struct { + Project Project `toml:"_"` + IO IO `toml:"io"` +} + +func NewDescriptor(projectTomlContents string) (types.Descriptor, toml.MetaData, error) { + versionedDescriptor := &Descriptor{} + tomlMetaData, err := toml.Decode(projectTomlContents, &versionedDescriptor) + if err != nil { + return types.Descriptor{}, tomlMetaData, err + } + + return types.Descriptor{ + Project: types.Project{ + Name: versionedDescriptor.Project.Name, + Licenses: versionedDescriptor.Project.Licenses, + }, + Build: types.Build{ + Include: versionedDescriptor.IO.Buildpacks.Include, + Exclude: versionedDescriptor.IO.Buildpacks.Exclude, + Buildpacks: versionedDescriptor.IO.Buildpacks.Group, + Env: versionedDescriptor.IO.Buildpacks.Build.Env, + Builder: versionedDescriptor.IO.Buildpacks.Builder, + Pre: versionedDescriptor.IO.Buildpacks.Pre, + Post: versionedDescriptor.IO.Buildpacks.Post, + }, + Metadata: versionedDescriptor.Project.Metadata, + SchemaVersion: api.MustParse("0.3"), + }, tomlMetaData, nil +} diff --git a/pkg/testmocks/mock_access_checker.go b/pkg/testmocks/mock_access_checker.go new file mode 100644 index 0000000000..558b85a580 --- /dev/null +++ b/pkg/testmocks/mock_access_checker.go @@ -0,0 +1,48 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/buildpacks/pack/pkg/client (interfaces: AccessChecker) + +// Package testmocks is a generated GoMock package. +package testmocks + +import ( + reflect "reflect" + + gomock "github.com/golang/mock/gomock" +) + +// MockAccessChecker is a mock of AccessChecker interface. +type MockAccessChecker struct { + ctrl *gomock.Controller + recorder *MockAccessCheckerMockRecorder +} + +// MockAccessCheckerMockRecorder is the mock recorder for MockAccessChecker. +type MockAccessCheckerMockRecorder struct { + mock *MockAccessChecker +} + +// NewMockAccessChecker creates a new mock instance. +func NewMockAccessChecker(ctrl *gomock.Controller) *MockAccessChecker { + mock := &MockAccessChecker{ctrl: ctrl} + mock.recorder = &MockAccessCheckerMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockAccessChecker) EXPECT() *MockAccessCheckerMockRecorder { + return m.recorder +} + +// Check mocks base method. +func (m *MockAccessChecker) Check(arg0 string) bool { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Check", arg0) + ret0, _ := ret[0].(bool) + return ret0 +} + +// Check indicates an expected call of Check. +func (mr *MockAccessCheckerMockRecorder) Check(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Check", reflect.TypeOf((*MockAccessChecker)(nil).Check), arg0) +} diff --git a/pkg/testmocks/mock_blob_downloader.go b/pkg/testmocks/mock_blob_downloader.go index 9596b0f0d8..80a04d9878 100644 --- a/pkg/testmocks/mock_blob_downloader.go +++ b/pkg/testmocks/mock_blob_downloader.go @@ -13,30 +13,30 @@ import ( blob "github.com/buildpacks/pack/pkg/blob" ) -// MockBlobDownloader is a mock of BlobDownloader interface +// MockBlobDownloader is a mock of BlobDownloader interface. type MockBlobDownloader struct { ctrl *gomock.Controller recorder *MockBlobDownloaderMockRecorder } -// MockBlobDownloaderMockRecorder is the mock recorder for MockBlobDownloader +// MockBlobDownloaderMockRecorder is the mock recorder for MockBlobDownloader. type MockBlobDownloaderMockRecorder struct { mock *MockBlobDownloader } -// NewMockBlobDownloader creates a new mock instance +// NewMockBlobDownloader creates a new mock instance. func NewMockBlobDownloader(ctrl *gomock.Controller) *MockBlobDownloader { mock := &MockBlobDownloader{ctrl: ctrl} mock.recorder = &MockBlobDownloaderMockRecorder{mock} return mock } -// EXPECT returns an object that allows the caller to indicate expected use +// EXPECT returns an object that allows the caller to indicate expected use. func (m *MockBlobDownloader) EXPECT() *MockBlobDownloaderMockRecorder { return m.recorder } -// Download mocks base method +// Download mocks base method. func (m *MockBlobDownloader) Download(arg0 context.Context, arg1 string) (blob.Blob, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Download", arg0, arg1) @@ -45,7 +45,7 @@ func (m *MockBlobDownloader) Download(arg0 context.Context, arg1 string) (blob.B return ret0, ret1 } -// Download indicates an expected call of Download +// Download indicates an expected call of Download. func (mr *MockBlobDownloaderMockRecorder) Download(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Download", reflect.TypeOf((*MockBlobDownloader)(nil).Download), arg0, arg1) diff --git a/pkg/testmocks/mock_build_module.go b/pkg/testmocks/mock_build_module.go new file mode 100644 index 0000000000..5548ad6f6f --- /dev/null +++ b/pkg/testmocks/mock_build_module.go @@ -0,0 +1,66 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/buildpacks/pack/pkg/buildpack (interfaces: BuildModule) + +// Package testmocks is a generated GoMock package. +package testmocks + +import ( + io "io" + reflect "reflect" + + gomock "github.com/golang/mock/gomock" + + buildpack "github.com/buildpacks/pack/pkg/buildpack" +) + +// MockBuildModule is a mock of BuildModule interface. +type MockBuildModule struct { + ctrl *gomock.Controller + recorder *MockBuildModuleMockRecorder +} + +// MockBuildModuleMockRecorder is the mock recorder for MockBuildModule. +type MockBuildModuleMockRecorder struct { + mock *MockBuildModule +} + +// NewMockBuildModule creates a new mock instance. +func NewMockBuildModule(ctrl *gomock.Controller) *MockBuildModule { + mock := &MockBuildModule{ctrl: ctrl} + mock.recorder = &MockBuildModuleMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockBuildModule) EXPECT() *MockBuildModuleMockRecorder { + return m.recorder +} + +// Descriptor mocks base method. +func (m *MockBuildModule) Descriptor() buildpack.Descriptor { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Descriptor") + ret0, _ := ret[0].(buildpack.Descriptor) + return ret0 +} + +// Descriptor indicates an expected call of Descriptor. +func (mr *MockBuildModuleMockRecorder) Descriptor() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Descriptor", reflect.TypeOf((*MockBuildModule)(nil).Descriptor)) +} + +// Open mocks base method. +func (m *MockBuildModule) Open() (io.ReadCloser, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Open") + ret0, _ := ret[0].(io.ReadCloser) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Open indicates an expected call of Open. +func (mr *MockBuildModuleMockRecorder) Open() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Open", reflect.TypeOf((*MockBuildModule)(nil).Open)) +} diff --git a/pkg/testmocks/mock_buildpack.go b/pkg/testmocks/mock_buildpack.go deleted file mode 100644 index a96441d925..0000000000 --- a/pkg/testmocks/mock_buildpack.go +++ /dev/null @@ -1,66 +0,0 @@ -// Code generated by MockGen. DO NOT EDIT. -// Source: github.com/buildpacks/pack/pkg/buildpack (interfaces: Buildpack) - -// Package testmocks is a generated GoMock package. -package testmocks - -import ( - io "io" - reflect "reflect" - - gomock "github.com/golang/mock/gomock" - - dist "github.com/buildpacks/pack/pkg/dist" -) - -// MockBuildpack is a mock of Buildpack interface -type MockBuildpack struct { - ctrl *gomock.Controller - recorder *MockBuildpackMockRecorder -} - -// MockBuildpackMockRecorder is the mock recorder for MockBuildpack -type MockBuildpackMockRecorder struct { - mock *MockBuildpack -} - -// NewMockBuildpack creates a new mock instance -func NewMockBuildpack(ctrl *gomock.Controller) *MockBuildpack { - mock := &MockBuildpack{ctrl: ctrl} - mock.recorder = &MockBuildpackMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockBuildpack) EXPECT() *MockBuildpackMockRecorder { - return m.recorder -} - -// Descriptor mocks base method -func (m *MockBuildpack) Descriptor() dist.BuildpackDescriptor { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Descriptor") - ret0, _ := ret[0].(dist.BuildpackDescriptor) - return ret0 -} - -// Descriptor indicates an expected call of Descriptor -func (mr *MockBuildpackMockRecorder) Descriptor() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Descriptor", reflect.TypeOf((*MockBuildpack)(nil).Descriptor)) -} - -// Open mocks base method -func (m *MockBuildpack) Open() (io.ReadCloser, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Open") - ret0, _ := ret[0].(io.ReadCloser) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Open indicates an expected call of Open -func (mr *MockBuildpackMockRecorder) Open() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Open", reflect.TypeOf((*MockBuildpack)(nil).Open)) -} diff --git a/pkg/testmocks/mock_buildpack_downloader.go b/pkg/testmocks/mock_buildpack_downloader.go index 9d46b10518..d67ff548d2 100644 --- a/pkg/testmocks/mock_buildpack_downloader.go +++ b/pkg/testmocks/mock_buildpack_downloader.go @@ -13,40 +13,40 @@ import ( buildpack "github.com/buildpacks/pack/pkg/buildpack" ) -// MockBuildpackDownloader is a mock of BuildpackDownloader interface +// MockBuildpackDownloader is a mock of BuildpackDownloader interface. type MockBuildpackDownloader struct { ctrl *gomock.Controller recorder *MockBuildpackDownloaderMockRecorder } -// MockBuildpackDownloaderMockRecorder is the mock recorder for MockBuildpackDownloader +// MockBuildpackDownloaderMockRecorder is the mock recorder for MockBuildpackDownloader. type MockBuildpackDownloaderMockRecorder struct { mock *MockBuildpackDownloader } -// NewMockBuildpackDownloader creates a new mock instance +// NewMockBuildpackDownloader creates a new mock instance. func NewMockBuildpackDownloader(ctrl *gomock.Controller) *MockBuildpackDownloader { mock := &MockBuildpackDownloader{ctrl: ctrl} mock.recorder = &MockBuildpackDownloaderMockRecorder{mock} return mock } -// EXPECT returns an object that allows the caller to indicate expected use +// EXPECT returns an object that allows the caller to indicate expected use. func (m *MockBuildpackDownloader) EXPECT() *MockBuildpackDownloaderMockRecorder { return m.recorder } -// Download mocks base method -func (m *MockBuildpackDownloader) Download(arg0 context.Context, arg1 string, arg2 buildpack.DownloadOptions) (buildpack.Buildpack, []buildpack.Buildpack, error) { +// Download mocks base method. +func (m *MockBuildpackDownloader) Download(arg0 context.Context, arg1 string, arg2 buildpack.DownloadOptions) (buildpack.BuildModule, []buildpack.BuildModule, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Download", arg0, arg1, arg2) - ret0, _ := ret[0].(buildpack.Buildpack) - ret1, _ := ret[1].([]buildpack.Buildpack) + ret0, _ := ret[0].(buildpack.BuildModule) + ret1, _ := ret[1].([]buildpack.BuildModule) ret2, _ := ret[2].(error) return ret0, ret1, ret2 } -// Download indicates an expected call of Download +// Download indicates an expected call of Download. func (mr *MockBuildpackDownloaderMockRecorder) Download(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Download", reflect.TypeOf((*MockBuildpackDownloader)(nil).Download), arg0, arg1, arg2) diff --git a/pkg/testmocks/mock_docker_client.go b/pkg/testmocks/mock_docker_client.go index 72b57b0887..7b92fd1b95 100644 --- a/pkg/testmocks/mock_docker_client.go +++ b/pkg/testmocks/mock_docker_client.go @@ -1,5 +1,5 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: github.com/docker/docker/client (interfaces: CommonAPIClient) +// Source: github.com/moby/moby/client (interfaces: APIClient) // Package testmocks is a generated GoMock package. package testmocks @@ -8,663 +8,603 @@ import ( context "context" io "io" net "net" - http "net/http" reflect "reflect" - time "time" - - types "github.com/docker/docker/api/types" - container "github.com/docker/docker/api/types/container" - events "github.com/docker/docker/api/types/events" - filters "github.com/docker/docker/api/types/filters" - image "github.com/docker/docker/api/types/image" - network "github.com/docker/docker/api/types/network" - registry "github.com/docker/docker/api/types/registry" - swarm "github.com/docker/docker/api/types/swarm" - volume "github.com/docker/docker/api/types/volume" + gomock "github.com/golang/mock/gomock" - v1 "github.com/opencontainers/image-spec/specs-go/v1" + client "github.com/moby/moby/client" ) -// MockCommonAPIClient is a mock of CommonAPIClient interface -type MockCommonAPIClient struct { +// MockAPIClient is a mock of APIClient interface. +type MockAPIClient struct { ctrl *gomock.Controller - recorder *MockCommonAPIClientMockRecorder + recorder *MockAPIClientMockRecorder } -// MockCommonAPIClientMockRecorder is the mock recorder for MockCommonAPIClient -type MockCommonAPIClientMockRecorder struct { - mock *MockCommonAPIClient +// MockAPIClientMockRecorder is the mock recorder for MockAPIClient. +type MockAPIClientMockRecorder struct { + mock *MockAPIClient } -// NewMockCommonAPIClient creates a new mock instance -func NewMockCommonAPIClient(ctrl *gomock.Controller) *MockCommonAPIClient { - mock := &MockCommonAPIClient{ctrl: ctrl} - mock.recorder = &MockCommonAPIClientMockRecorder{mock} +// NewMockAPIClient creates a new mock instance. +func NewMockAPIClient(ctrl *gomock.Controller) *MockAPIClient { + mock := &MockAPIClient{ctrl: ctrl} + mock.recorder = &MockAPIClientMockRecorder{mock} return mock } -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockCommonAPIClient) EXPECT() *MockCommonAPIClientMockRecorder { +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockAPIClient) EXPECT() *MockAPIClientMockRecorder { return m.recorder } -// BuildCachePrune mocks base method -func (m *MockCommonAPIClient) BuildCachePrune(arg0 context.Context, arg1 types.BuildCachePruneOptions) (*types.BuildCachePruneReport, error) { +// BuildCachePrune mocks base method. +func (m *MockAPIClient) BuildCachePrune(arg0 context.Context, arg1 client.BuildCachePruneOptions) (client.BuildCachePruneResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "BuildCachePrune", arg0, arg1) - ret0, _ := ret[0].(*types.BuildCachePruneReport) + ret0, _ := ret[0].(client.BuildCachePruneResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// BuildCachePrune indicates an expected call of BuildCachePrune -func (mr *MockCommonAPIClientMockRecorder) BuildCachePrune(arg0, arg1 interface{}) *gomock.Call { +// BuildCachePrune indicates an expected call of BuildCachePrune. +func (mr *MockAPIClientMockRecorder) BuildCachePrune(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BuildCachePrune", reflect.TypeOf((*MockCommonAPIClient)(nil).BuildCachePrune), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BuildCachePrune", reflect.TypeOf((*MockAPIClient)(nil).BuildCachePrune), arg0, arg1) } -// BuildCancel mocks base method -func (m *MockCommonAPIClient) BuildCancel(arg0 context.Context, arg1 string) error { +// BuildCancel mocks base method. +func (m *MockAPIClient) BuildCancel(arg0 context.Context, arg1 string, arg2 client.BuildCancelOptions) (client.BuildCancelResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "BuildCancel", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// BuildCancel indicates an expected call of BuildCancel -func (mr *MockCommonAPIClientMockRecorder) BuildCancel(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BuildCancel", reflect.TypeOf((*MockCommonAPIClient)(nil).BuildCancel), arg0, arg1) -} - -// ClientVersion mocks base method -func (m *MockCommonAPIClient) ClientVersion() string { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ClientVersion") - ret0, _ := ret[0].(string) - return ret0 -} - -// ClientVersion indicates an expected call of ClientVersion -func (mr *MockCommonAPIClientMockRecorder) ClientVersion() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ClientVersion", reflect.TypeOf((*MockCommonAPIClient)(nil).ClientVersion)) -} - -// Close mocks base method -func (m *MockCommonAPIClient) Close() error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Close") - ret0, _ := ret[0].(error) - return ret0 + ret := m.ctrl.Call(m, "BuildCancel", arg0, arg1, arg2) + ret0, _ := ret[0].(client.BuildCancelResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// Close indicates an expected call of Close -func (mr *MockCommonAPIClientMockRecorder) Close() *gomock.Call { +// BuildCancel indicates an expected call of BuildCancel. +func (mr *MockAPIClientMockRecorder) BuildCancel(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Close", reflect.TypeOf((*MockCommonAPIClient)(nil).Close)) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BuildCancel", reflect.TypeOf((*MockAPIClient)(nil).BuildCancel), arg0, arg1, arg2) } -// ConfigCreate mocks base method -func (m *MockCommonAPIClient) ConfigCreate(arg0 context.Context, arg1 swarm.ConfigSpec) (types.ConfigCreateResponse, error) { +// CheckpointCreate mocks base method. +func (m *MockAPIClient) CheckpointCreate(arg0 context.Context, arg1 string, arg2 client.CheckpointCreateOptions) (client.CheckpointCreateResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ConfigCreate", arg0, arg1) - ret0, _ := ret[0].(types.ConfigCreateResponse) + ret := m.ctrl.Call(m, "CheckpointCreate", arg0, arg1, arg2) + ret0, _ := ret[0].(client.CheckpointCreateResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ConfigCreate indicates an expected call of ConfigCreate -func (mr *MockCommonAPIClientMockRecorder) ConfigCreate(arg0, arg1 interface{}) *gomock.Call { +// CheckpointCreate indicates an expected call of CheckpointCreate. +func (mr *MockAPIClientMockRecorder) CheckpointCreate(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ConfigCreate", reflect.TypeOf((*MockCommonAPIClient)(nil).ConfigCreate), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CheckpointCreate", reflect.TypeOf((*MockAPIClient)(nil).CheckpointCreate), arg0, arg1, arg2) } -// ConfigInspectWithRaw mocks base method -func (m *MockCommonAPIClient) ConfigInspectWithRaw(arg0 context.Context, arg1 string) (swarm.Config, []byte, error) { +// CheckpointList mocks base method. +func (m *MockAPIClient) CheckpointList(arg0 context.Context, arg1 string, arg2 client.CheckpointListOptions) (client.CheckpointListResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ConfigInspectWithRaw", arg0, arg1) - ret0, _ := ret[0].(swarm.Config) - ret1, _ := ret[1].([]byte) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 + ret := m.ctrl.Call(m, "CheckpointList", arg0, arg1, arg2) + ret0, _ := ret[0].(client.CheckpointListResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// ConfigInspectWithRaw indicates an expected call of ConfigInspectWithRaw -func (mr *MockCommonAPIClientMockRecorder) ConfigInspectWithRaw(arg0, arg1 interface{}) *gomock.Call { +// CheckpointList indicates an expected call of CheckpointList. +func (mr *MockAPIClientMockRecorder) CheckpointList(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ConfigInspectWithRaw", reflect.TypeOf((*MockCommonAPIClient)(nil).ConfigInspectWithRaw), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CheckpointList", reflect.TypeOf((*MockAPIClient)(nil).CheckpointList), arg0, arg1, arg2) } -// ConfigList mocks base method -func (m *MockCommonAPIClient) ConfigList(arg0 context.Context, arg1 types.ConfigListOptions) ([]swarm.Config, error) { +// CheckpointRemove mocks base method. +func (m *MockAPIClient) CheckpointRemove(arg0 context.Context, arg1 string, arg2 client.CheckpointRemoveOptions) (client.CheckpointRemoveResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ConfigList", arg0, arg1) - ret0, _ := ret[0].([]swarm.Config) + ret := m.ctrl.Call(m, "CheckpointRemove", arg0, arg1, arg2) + ret0, _ := ret[0].(client.CheckpointRemoveResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ConfigList indicates an expected call of ConfigList -func (mr *MockCommonAPIClientMockRecorder) ConfigList(arg0, arg1 interface{}) *gomock.Call { +// CheckpointRemove indicates an expected call of CheckpointRemove. +func (mr *MockAPIClientMockRecorder) CheckpointRemove(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ConfigList", reflect.TypeOf((*MockCommonAPIClient)(nil).ConfigList), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CheckpointRemove", reflect.TypeOf((*MockAPIClient)(nil).CheckpointRemove), arg0, arg1, arg2) } -// ConfigRemove mocks base method -func (m *MockCommonAPIClient) ConfigRemove(arg0 context.Context, arg1 string) error { +// ClientVersion mocks base method. +func (m *MockAPIClient) ClientVersion() string { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ConfigRemove", arg0, arg1) - ret0, _ := ret[0].(error) + ret := m.ctrl.Call(m, "ClientVersion") + ret0, _ := ret[0].(string) return ret0 } -// ConfigRemove indicates an expected call of ConfigRemove -func (mr *MockCommonAPIClientMockRecorder) ConfigRemove(arg0, arg1 interface{}) *gomock.Call { +// ClientVersion indicates an expected call of ClientVersion. +func (mr *MockAPIClientMockRecorder) ClientVersion() *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ConfigRemove", reflect.TypeOf((*MockCommonAPIClient)(nil).ConfigRemove), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ClientVersion", reflect.TypeOf((*MockAPIClient)(nil).ClientVersion)) } -// ConfigUpdate mocks base method -func (m *MockCommonAPIClient) ConfigUpdate(arg0 context.Context, arg1 string, arg2 swarm.Version, arg3 swarm.ConfigSpec) error { +// Close mocks base method. +func (m *MockAPIClient) Close() error { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ConfigUpdate", arg0, arg1, arg2, arg3) + ret := m.ctrl.Call(m, "Close") ret0, _ := ret[0].(error) return ret0 } -// ConfigUpdate indicates an expected call of ConfigUpdate -func (mr *MockCommonAPIClientMockRecorder) ConfigUpdate(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { +// Close indicates an expected call of Close. +func (mr *MockAPIClientMockRecorder) Close() *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ConfigUpdate", reflect.TypeOf((*MockCommonAPIClient)(nil).ConfigUpdate), arg0, arg1, arg2, arg3) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Close", reflect.TypeOf((*MockAPIClient)(nil).Close)) } -// ContainerAttach mocks base method -func (m *MockCommonAPIClient) ContainerAttach(arg0 context.Context, arg1 string, arg2 types.ContainerAttachOptions) (types.HijackedResponse, error) { +// ConfigCreate mocks base method. +func (m *MockAPIClient) ConfigCreate(arg0 context.Context, arg1 client.ConfigCreateOptions) (client.ConfigCreateResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ContainerAttach", arg0, arg1, arg2) - ret0, _ := ret[0].(types.HijackedResponse) + ret := m.ctrl.Call(m, "ConfigCreate", arg0, arg1) + ret0, _ := ret[0].(client.ConfigCreateResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ContainerAttach indicates an expected call of ContainerAttach -func (mr *MockCommonAPIClientMockRecorder) ContainerAttach(arg0, arg1, arg2 interface{}) *gomock.Call { +// ConfigCreate indicates an expected call of ConfigCreate. +func (mr *MockAPIClientMockRecorder) ConfigCreate(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerAttach", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerAttach), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ConfigCreate", reflect.TypeOf((*MockAPIClient)(nil).ConfigCreate), arg0, arg1) } -// ContainerCommit mocks base method -func (m *MockCommonAPIClient) ContainerCommit(arg0 context.Context, arg1 string, arg2 types.ContainerCommitOptions) (types.IDResponse, error) { +// ConfigInspect mocks base method. +func (m *MockAPIClient) ConfigInspect(arg0 context.Context, arg1 string, arg2 client.ConfigInspectOptions) (client.ConfigInspectResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ContainerCommit", arg0, arg1, arg2) - ret0, _ := ret[0].(types.IDResponse) + ret := m.ctrl.Call(m, "ConfigInspect", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ConfigInspectResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ContainerCommit indicates an expected call of ContainerCommit -func (mr *MockCommonAPIClientMockRecorder) ContainerCommit(arg0, arg1, arg2 interface{}) *gomock.Call { +// ConfigInspect indicates an expected call of ConfigInspect. +func (mr *MockAPIClientMockRecorder) ConfigInspect(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerCommit", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerCommit), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ConfigInspect", reflect.TypeOf((*MockAPIClient)(nil).ConfigInspect), arg0, arg1, arg2) } -// ContainerCreate mocks base method -func (m *MockCommonAPIClient) ContainerCreate(arg0 context.Context, arg1 *container.Config, arg2 *container.HostConfig, arg3 *network.NetworkingConfig, arg4 *v1.Platform, arg5 string) (container.ContainerCreateCreatedBody, error) { +// ConfigList mocks base method. +func (m *MockAPIClient) ConfigList(arg0 context.Context, arg1 client.ConfigListOptions) (client.ConfigListResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ContainerCreate", arg0, arg1, arg2, arg3, arg4, arg5) - ret0, _ := ret[0].(container.ContainerCreateCreatedBody) + ret := m.ctrl.Call(m, "ConfigList", arg0, arg1) + ret0, _ := ret[0].(client.ConfigListResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ContainerCreate indicates an expected call of ContainerCreate -func (mr *MockCommonAPIClientMockRecorder) ContainerCreate(arg0, arg1, arg2, arg3, arg4, arg5 interface{}) *gomock.Call { +// ConfigList indicates an expected call of ConfigList. +func (mr *MockAPIClientMockRecorder) ConfigList(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerCreate", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerCreate), arg0, arg1, arg2, arg3, arg4, arg5) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ConfigList", reflect.TypeOf((*MockAPIClient)(nil).ConfigList), arg0, arg1) } -// ContainerDiff mocks base method -func (m *MockCommonAPIClient) ContainerDiff(arg0 context.Context, arg1 string) ([]container.ContainerChangeResponseItem, error) { +// ConfigRemove mocks base method. +func (m *MockAPIClient) ConfigRemove(arg0 context.Context, arg1 string, arg2 client.ConfigRemoveOptions) (client.ConfigRemoveResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ContainerDiff", arg0, arg1) - ret0, _ := ret[0].([]container.ContainerChangeResponseItem) + ret := m.ctrl.Call(m, "ConfigRemove", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ConfigRemoveResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ContainerDiff indicates an expected call of ContainerDiff -func (mr *MockCommonAPIClientMockRecorder) ContainerDiff(arg0, arg1 interface{}) *gomock.Call { +// ConfigRemove indicates an expected call of ConfigRemove. +func (mr *MockAPIClientMockRecorder) ConfigRemove(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerDiff", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerDiff), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ConfigRemove", reflect.TypeOf((*MockAPIClient)(nil).ConfigRemove), arg0, arg1, arg2) } -// ContainerExecAttach mocks base method -func (m *MockCommonAPIClient) ContainerExecAttach(arg0 context.Context, arg1 string, arg2 types.ExecStartCheck) (types.HijackedResponse, error) { +// ConfigUpdate mocks base method. +func (m *MockAPIClient) ConfigUpdate(arg0 context.Context, arg1 string, arg2 client.ConfigUpdateOptions) (client.ConfigUpdateResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ContainerExecAttach", arg0, arg1, arg2) - ret0, _ := ret[0].(types.HijackedResponse) + ret := m.ctrl.Call(m, "ConfigUpdate", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ConfigUpdateResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ContainerExecAttach indicates an expected call of ContainerExecAttach -func (mr *MockCommonAPIClientMockRecorder) ContainerExecAttach(arg0, arg1, arg2 interface{}) *gomock.Call { +// ConfigUpdate indicates an expected call of ConfigUpdate. +func (mr *MockAPIClientMockRecorder) ConfigUpdate(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerExecAttach", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerExecAttach), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ConfigUpdate", reflect.TypeOf((*MockAPIClient)(nil).ConfigUpdate), arg0, arg1, arg2) } -// ContainerExecCreate mocks base method -func (m *MockCommonAPIClient) ContainerExecCreate(arg0 context.Context, arg1 string, arg2 types.ExecConfig) (types.IDResponse, error) { +// ContainerAttach mocks base method. +func (m *MockAPIClient) ContainerAttach(arg0 context.Context, arg1 string, arg2 client.ContainerAttachOptions) (client.ContainerAttachResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ContainerExecCreate", arg0, arg1, arg2) - ret0, _ := ret[0].(types.IDResponse) + ret := m.ctrl.Call(m, "ContainerAttach", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ContainerAttachResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ContainerExecCreate indicates an expected call of ContainerExecCreate -func (mr *MockCommonAPIClientMockRecorder) ContainerExecCreate(arg0, arg1, arg2 interface{}) *gomock.Call { +// ContainerAttach indicates an expected call of ContainerAttach. +func (mr *MockAPIClientMockRecorder) ContainerAttach(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerExecCreate", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerExecCreate), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerAttach", reflect.TypeOf((*MockAPIClient)(nil).ContainerAttach), arg0, arg1, arg2) } -// ContainerExecInspect mocks base method -func (m *MockCommonAPIClient) ContainerExecInspect(arg0 context.Context, arg1 string) (types.ContainerExecInspect, error) { +// ContainerCommit mocks base method. +func (m *MockAPIClient) ContainerCommit(arg0 context.Context, arg1 string, arg2 client.ContainerCommitOptions) (client.ContainerCommitResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ContainerExecInspect", arg0, arg1) - ret0, _ := ret[0].(types.ContainerExecInspect) + ret := m.ctrl.Call(m, "ContainerCommit", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ContainerCommitResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ContainerExecInspect indicates an expected call of ContainerExecInspect -func (mr *MockCommonAPIClientMockRecorder) ContainerExecInspect(arg0, arg1 interface{}) *gomock.Call { +// ContainerCommit indicates an expected call of ContainerCommit. +func (mr *MockAPIClientMockRecorder) ContainerCommit(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerExecInspect", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerExecInspect), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerCommit", reflect.TypeOf((*MockAPIClient)(nil).ContainerCommit), arg0, arg1, arg2) } -// ContainerExecResize mocks base method -func (m *MockCommonAPIClient) ContainerExecResize(arg0 context.Context, arg1 string, arg2 types.ResizeOptions) error { +// ContainerCreate mocks base method. +func (m *MockAPIClient) ContainerCreate(arg0 context.Context, arg1 client.ContainerCreateOptions) (client.ContainerCreateResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ContainerExecResize", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 + ret := m.ctrl.Call(m, "ContainerCreate", arg0, arg1) + ret0, _ := ret[0].(client.ContainerCreateResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// ContainerExecResize indicates an expected call of ContainerExecResize -func (mr *MockCommonAPIClientMockRecorder) ContainerExecResize(arg0, arg1, arg2 interface{}) *gomock.Call { +// ContainerCreate indicates an expected call of ContainerCreate. +func (mr *MockAPIClientMockRecorder) ContainerCreate(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerExecResize", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerExecResize), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerCreate", reflect.TypeOf((*MockAPIClient)(nil).ContainerCreate), arg0, arg1) } -// ContainerExecStart mocks base method -func (m *MockCommonAPIClient) ContainerExecStart(arg0 context.Context, arg1 string, arg2 types.ExecStartCheck) error { +// ContainerDiff mocks base method. +func (m *MockAPIClient) ContainerDiff(arg0 context.Context, arg1 string, arg2 client.ContainerDiffOptions) (client.ContainerDiffResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ContainerExecStart", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 + ret := m.ctrl.Call(m, "ContainerDiff", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ContainerDiffResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// ContainerExecStart indicates an expected call of ContainerExecStart -func (mr *MockCommonAPIClientMockRecorder) ContainerExecStart(arg0, arg1, arg2 interface{}) *gomock.Call { +// ContainerDiff indicates an expected call of ContainerDiff. +func (mr *MockAPIClientMockRecorder) ContainerDiff(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerExecStart", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerExecStart), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerDiff", reflect.TypeOf((*MockAPIClient)(nil).ContainerDiff), arg0, arg1, arg2) } -// ContainerExport mocks base method -func (m *MockCommonAPIClient) ContainerExport(arg0 context.Context, arg1 string) (io.ReadCloser, error) { +// ContainerExport mocks base method. +func (m *MockAPIClient) ContainerExport(arg0 context.Context, arg1 string, arg2 client.ContainerExportOptions) (client.ContainerExportResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ContainerExport", arg0, arg1) - ret0, _ := ret[0].(io.ReadCloser) + ret := m.ctrl.Call(m, "ContainerExport", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ContainerExportResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ContainerExport indicates an expected call of ContainerExport -func (mr *MockCommonAPIClientMockRecorder) ContainerExport(arg0, arg1 interface{}) *gomock.Call { +// ContainerExport indicates an expected call of ContainerExport. +func (mr *MockAPIClientMockRecorder) ContainerExport(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerExport", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerExport), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerExport", reflect.TypeOf((*MockAPIClient)(nil).ContainerExport), arg0, arg1, arg2) } -// ContainerInspect mocks base method -func (m *MockCommonAPIClient) ContainerInspect(arg0 context.Context, arg1 string) (types.ContainerJSON, error) { +// ContainerInspect mocks base method. +func (m *MockAPIClient) ContainerInspect(arg0 context.Context, arg1 string, arg2 client.ContainerInspectOptions) (client.ContainerInspectResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ContainerInspect", arg0, arg1) - ret0, _ := ret[0].(types.ContainerJSON) + ret := m.ctrl.Call(m, "ContainerInspect", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ContainerInspectResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ContainerInspect indicates an expected call of ContainerInspect -func (mr *MockCommonAPIClientMockRecorder) ContainerInspect(arg0, arg1 interface{}) *gomock.Call { +// ContainerInspect indicates an expected call of ContainerInspect. +func (mr *MockAPIClientMockRecorder) ContainerInspect(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerInspect", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerInspect), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerInspect", reflect.TypeOf((*MockAPIClient)(nil).ContainerInspect), arg0, arg1, arg2) } -// ContainerInspectWithRaw mocks base method -func (m *MockCommonAPIClient) ContainerInspectWithRaw(arg0 context.Context, arg1 string, arg2 bool) (types.ContainerJSON, []byte, error) { +// ContainerKill mocks base method. +func (m *MockAPIClient) ContainerKill(arg0 context.Context, arg1 string, arg2 client.ContainerKillOptions) (client.ContainerKillResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ContainerInspectWithRaw", arg0, arg1, arg2) - ret0, _ := ret[0].(types.ContainerJSON) - ret1, _ := ret[1].([]byte) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 + ret := m.ctrl.Call(m, "ContainerKill", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ContainerKillResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// ContainerInspectWithRaw indicates an expected call of ContainerInspectWithRaw -func (mr *MockCommonAPIClientMockRecorder) ContainerInspectWithRaw(arg0, arg1, arg2 interface{}) *gomock.Call { +// ContainerKill indicates an expected call of ContainerKill. +func (mr *MockAPIClientMockRecorder) ContainerKill(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerInspectWithRaw", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerInspectWithRaw), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerKill", reflect.TypeOf((*MockAPIClient)(nil).ContainerKill), arg0, arg1, arg2) } -// ContainerKill mocks base method -func (m *MockCommonAPIClient) ContainerKill(arg0 context.Context, arg1, arg2 string) error { +// ContainerList mocks base method. +func (m *MockAPIClient) ContainerList(arg0 context.Context, arg1 client.ContainerListOptions) (client.ContainerListResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ContainerKill", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 + ret := m.ctrl.Call(m, "ContainerList", arg0, arg1) + ret0, _ := ret[0].(client.ContainerListResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// ContainerKill indicates an expected call of ContainerKill -func (mr *MockCommonAPIClientMockRecorder) ContainerKill(arg0, arg1, arg2 interface{}) *gomock.Call { +// ContainerList indicates an expected call of ContainerList. +func (mr *MockAPIClientMockRecorder) ContainerList(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerKill", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerKill), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerList", reflect.TypeOf((*MockAPIClient)(nil).ContainerList), arg0, arg1) } -// ContainerList mocks base method -func (m *MockCommonAPIClient) ContainerList(arg0 context.Context, arg1 types.ContainerListOptions) ([]types.Container, error) { +// ContainerLogs mocks base method. +func (m *MockAPIClient) ContainerLogs(arg0 context.Context, arg1 string, arg2 client.ContainerLogsOptions) (client.ContainerLogsResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ContainerList", arg0, arg1) - ret0, _ := ret[0].([]types.Container) + ret := m.ctrl.Call(m, "ContainerLogs", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ContainerLogsResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ContainerList indicates an expected call of ContainerList -func (mr *MockCommonAPIClientMockRecorder) ContainerList(arg0, arg1 interface{}) *gomock.Call { +// ContainerLogs indicates an expected call of ContainerLogs. +func (mr *MockAPIClientMockRecorder) ContainerLogs(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerList", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerList), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerLogs", reflect.TypeOf((*MockAPIClient)(nil).ContainerLogs), arg0, arg1, arg2) } -// ContainerLogs mocks base method -func (m *MockCommonAPIClient) ContainerLogs(arg0 context.Context, arg1 string, arg2 types.ContainerLogsOptions) (io.ReadCloser, error) { +// ContainerPause mocks base method. +func (m *MockAPIClient) ContainerPause(arg0 context.Context, arg1 string, arg2 client.ContainerPauseOptions) (client.ContainerPauseResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ContainerLogs", arg0, arg1, arg2) - ret0, _ := ret[0].(io.ReadCloser) + ret := m.ctrl.Call(m, "ContainerPause", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ContainerPauseResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ContainerLogs indicates an expected call of ContainerLogs -func (mr *MockCommonAPIClientMockRecorder) ContainerLogs(arg0, arg1, arg2 interface{}) *gomock.Call { +// ContainerPause indicates an expected call of ContainerPause. +func (mr *MockAPIClientMockRecorder) ContainerPause(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerLogs", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerLogs), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerPause", reflect.TypeOf((*MockAPIClient)(nil).ContainerPause), arg0, arg1, arg2) } -// ContainerPause mocks base method -func (m *MockCommonAPIClient) ContainerPause(arg0 context.Context, arg1 string) error { +// ContainerPrune mocks base method. +func (m *MockAPIClient) ContainerPrune(arg0 context.Context, arg1 client.ContainerPruneOptions) (client.ContainerPruneResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ContainerPause", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 + ret := m.ctrl.Call(m, "ContainerPrune", arg0, arg1) + ret0, _ := ret[0].(client.ContainerPruneResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// ContainerPause indicates an expected call of ContainerPause -func (mr *MockCommonAPIClientMockRecorder) ContainerPause(arg0, arg1 interface{}) *gomock.Call { +// ContainerPrune indicates an expected call of ContainerPrune. +func (mr *MockAPIClientMockRecorder) ContainerPrune(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerPause", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerPause), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerPrune", reflect.TypeOf((*MockAPIClient)(nil).ContainerPrune), arg0, arg1) } -// ContainerRemove mocks base method -func (m *MockCommonAPIClient) ContainerRemove(arg0 context.Context, arg1 string, arg2 types.ContainerRemoveOptions) error { +// ContainerRemove mocks base method. +func (m *MockAPIClient) ContainerRemove(arg0 context.Context, arg1 string, arg2 client.ContainerRemoveOptions) (client.ContainerRemoveResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "ContainerRemove", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 + ret0, _ := ret[0].(client.ContainerRemoveResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// ContainerRemove indicates an expected call of ContainerRemove -func (mr *MockCommonAPIClientMockRecorder) ContainerRemove(arg0, arg1, arg2 interface{}) *gomock.Call { +// ContainerRemove indicates an expected call of ContainerRemove. +func (mr *MockAPIClientMockRecorder) ContainerRemove(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerRemove", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerRemove), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerRemove", reflect.TypeOf((*MockAPIClient)(nil).ContainerRemove), arg0, arg1, arg2) } -// ContainerRename mocks base method -func (m *MockCommonAPIClient) ContainerRename(arg0 context.Context, arg1, arg2 string) error { +// ContainerRename mocks base method. +func (m *MockAPIClient) ContainerRename(arg0 context.Context, arg1 string, arg2 client.ContainerRenameOptions) (client.ContainerRenameResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "ContainerRename", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 + ret0, _ := ret[0].(client.ContainerRenameResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// ContainerRename indicates an expected call of ContainerRename -func (mr *MockCommonAPIClientMockRecorder) ContainerRename(arg0, arg1, arg2 interface{}) *gomock.Call { +// ContainerRename indicates an expected call of ContainerRename. +func (mr *MockAPIClientMockRecorder) ContainerRename(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerRename", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerRename), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerRename", reflect.TypeOf((*MockAPIClient)(nil).ContainerRename), arg0, arg1, arg2) } -// ContainerResize mocks base method -func (m *MockCommonAPIClient) ContainerResize(arg0 context.Context, arg1 string, arg2 types.ResizeOptions) error { +// ContainerResize mocks base method. +func (m *MockAPIClient) ContainerResize(arg0 context.Context, arg1 string, arg2 client.ContainerResizeOptions) (client.ContainerResizeResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "ContainerResize", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 + ret0, _ := ret[0].(client.ContainerResizeResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// ContainerResize indicates an expected call of ContainerResize -func (mr *MockCommonAPIClientMockRecorder) ContainerResize(arg0, arg1, arg2 interface{}) *gomock.Call { +// ContainerResize indicates an expected call of ContainerResize. +func (mr *MockAPIClientMockRecorder) ContainerResize(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerResize", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerResize), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerResize", reflect.TypeOf((*MockAPIClient)(nil).ContainerResize), arg0, arg1, arg2) } -// ContainerRestart mocks base method -func (m *MockCommonAPIClient) ContainerRestart(arg0 context.Context, arg1 string, arg2 *time.Duration) error { +// ContainerRestart mocks base method. +func (m *MockAPIClient) ContainerRestart(arg0 context.Context, arg1 string, arg2 client.ContainerRestartOptions) (client.ContainerRestartResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "ContainerRestart", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 + ret0, _ := ret[0].(client.ContainerRestartResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// ContainerRestart indicates an expected call of ContainerRestart -func (mr *MockCommonAPIClientMockRecorder) ContainerRestart(arg0, arg1, arg2 interface{}) *gomock.Call { +// ContainerRestart indicates an expected call of ContainerRestart. +func (mr *MockAPIClientMockRecorder) ContainerRestart(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerRestart", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerRestart), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerRestart", reflect.TypeOf((*MockAPIClient)(nil).ContainerRestart), arg0, arg1, arg2) } -// ContainerStart mocks base method -func (m *MockCommonAPIClient) ContainerStart(arg0 context.Context, arg1 string, arg2 types.ContainerStartOptions) error { +// ContainerStart mocks base method. +func (m *MockAPIClient) ContainerStart(arg0 context.Context, arg1 string, arg2 client.ContainerStartOptions) (client.ContainerStartResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "ContainerStart", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 + ret0, _ := ret[0].(client.ContainerStartResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// ContainerStart indicates an expected call of ContainerStart -func (mr *MockCommonAPIClientMockRecorder) ContainerStart(arg0, arg1, arg2 interface{}) *gomock.Call { +// ContainerStart indicates an expected call of ContainerStart. +func (mr *MockAPIClientMockRecorder) ContainerStart(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerStart", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerStart), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerStart", reflect.TypeOf((*MockAPIClient)(nil).ContainerStart), arg0, arg1, arg2) } -// ContainerStatPath mocks base method -func (m *MockCommonAPIClient) ContainerStatPath(arg0 context.Context, arg1, arg2 string) (types.ContainerPathStat, error) { +// ContainerStatPath mocks base method. +func (m *MockAPIClient) ContainerStatPath(arg0 context.Context, arg1 string, arg2 client.ContainerStatPathOptions) (client.ContainerStatPathResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "ContainerStatPath", arg0, arg1, arg2) - ret0, _ := ret[0].(types.ContainerPathStat) + ret0, _ := ret[0].(client.ContainerStatPathResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ContainerStatPath indicates an expected call of ContainerStatPath -func (mr *MockCommonAPIClientMockRecorder) ContainerStatPath(arg0, arg1, arg2 interface{}) *gomock.Call { +// ContainerStatPath indicates an expected call of ContainerStatPath. +func (mr *MockAPIClientMockRecorder) ContainerStatPath(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerStatPath", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerStatPath), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerStatPath", reflect.TypeOf((*MockAPIClient)(nil).ContainerStatPath), arg0, arg1, arg2) } -// ContainerStats mocks base method -func (m *MockCommonAPIClient) ContainerStats(arg0 context.Context, arg1 string, arg2 bool) (types.ContainerStats, error) { +// ContainerStats mocks base method. +func (m *MockAPIClient) ContainerStats(arg0 context.Context, arg1 string, arg2 client.ContainerStatsOptions) (client.ContainerStatsResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "ContainerStats", arg0, arg1, arg2) - ret0, _ := ret[0].(types.ContainerStats) + ret0, _ := ret[0].(client.ContainerStatsResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ContainerStats indicates an expected call of ContainerStats -func (mr *MockCommonAPIClientMockRecorder) ContainerStats(arg0, arg1, arg2 interface{}) *gomock.Call { +// ContainerStats indicates an expected call of ContainerStats. +func (mr *MockAPIClientMockRecorder) ContainerStats(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerStats", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerStats), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerStats", reflect.TypeOf((*MockAPIClient)(nil).ContainerStats), arg0, arg1, arg2) } -// ContainerStatsOneShot mocks base method -func (m *MockCommonAPIClient) ContainerStatsOneShot(arg0 context.Context, arg1 string) (types.ContainerStats, error) { +// ContainerStop mocks base method. +func (m *MockAPIClient) ContainerStop(arg0 context.Context, arg1 string, arg2 client.ContainerStopOptions) (client.ContainerStopResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ContainerStatsOneShot", arg0, arg1) - ret0, _ := ret[0].(types.ContainerStats) + ret := m.ctrl.Call(m, "ContainerStop", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ContainerStopResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ContainerStatsOneShot indicates an expected call of ContainerStatsOneShot -func (mr *MockCommonAPIClientMockRecorder) ContainerStatsOneShot(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerStatsOneShot", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerStatsOneShot), arg0, arg1) -} - -// ContainerStop mocks base method -func (m *MockCommonAPIClient) ContainerStop(arg0 context.Context, arg1 string, arg2 *time.Duration) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ContainerStop", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// ContainerStop indicates an expected call of ContainerStop -func (mr *MockCommonAPIClientMockRecorder) ContainerStop(arg0, arg1, arg2 interface{}) *gomock.Call { +// ContainerStop indicates an expected call of ContainerStop. +func (mr *MockAPIClientMockRecorder) ContainerStop(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerStop", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerStop), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerStop", reflect.TypeOf((*MockAPIClient)(nil).ContainerStop), arg0, arg1, arg2) } -// ContainerTop mocks base method -func (m *MockCommonAPIClient) ContainerTop(arg0 context.Context, arg1 string, arg2 []string) (container.ContainerTopOKBody, error) { +// ContainerTop mocks base method. +func (m *MockAPIClient) ContainerTop(arg0 context.Context, arg1 string, arg2 client.ContainerTopOptions) (client.ContainerTopResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "ContainerTop", arg0, arg1, arg2) - ret0, _ := ret[0].(container.ContainerTopOKBody) + ret0, _ := ret[0].(client.ContainerTopResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ContainerTop indicates an expected call of ContainerTop -func (mr *MockCommonAPIClientMockRecorder) ContainerTop(arg0, arg1, arg2 interface{}) *gomock.Call { +// ContainerTop indicates an expected call of ContainerTop. +func (mr *MockAPIClientMockRecorder) ContainerTop(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerTop", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerTop), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerTop", reflect.TypeOf((*MockAPIClient)(nil).ContainerTop), arg0, arg1, arg2) } -// ContainerUnpause mocks base method -func (m *MockCommonAPIClient) ContainerUnpause(arg0 context.Context, arg1 string) error { +// ContainerUnpause mocks base method. +func (m *MockAPIClient) ContainerUnpause(arg0 context.Context, arg1 string, arg2 client.ContainerUnpauseOptions) (client.ContainerUnpauseResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ContainerUnpause", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 + ret := m.ctrl.Call(m, "ContainerUnpause", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ContainerUnpauseResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// ContainerUnpause indicates an expected call of ContainerUnpause -func (mr *MockCommonAPIClientMockRecorder) ContainerUnpause(arg0, arg1 interface{}) *gomock.Call { +// ContainerUnpause indicates an expected call of ContainerUnpause. +func (mr *MockAPIClientMockRecorder) ContainerUnpause(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerUnpause", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerUnpause), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerUnpause", reflect.TypeOf((*MockAPIClient)(nil).ContainerUnpause), arg0, arg1, arg2) } -// ContainerUpdate mocks base method -func (m *MockCommonAPIClient) ContainerUpdate(arg0 context.Context, arg1 string, arg2 container.UpdateConfig) (container.ContainerUpdateOKBody, error) { +// ContainerUpdate mocks base method. +func (m *MockAPIClient) ContainerUpdate(arg0 context.Context, arg1 string, arg2 client.ContainerUpdateOptions) (client.ContainerUpdateResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "ContainerUpdate", arg0, arg1, arg2) - ret0, _ := ret[0].(container.ContainerUpdateOKBody) + ret0, _ := ret[0].(client.ContainerUpdateResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ContainerUpdate indicates an expected call of ContainerUpdate -func (mr *MockCommonAPIClientMockRecorder) ContainerUpdate(arg0, arg1, arg2 interface{}) *gomock.Call { +// ContainerUpdate indicates an expected call of ContainerUpdate. +func (mr *MockAPIClientMockRecorder) ContainerUpdate(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerUpdate", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerUpdate), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerUpdate", reflect.TypeOf((*MockAPIClient)(nil).ContainerUpdate), arg0, arg1, arg2) } -// ContainerWait mocks base method -func (m *MockCommonAPIClient) ContainerWait(arg0 context.Context, arg1 string, arg2 container.WaitCondition) (<-chan container.ContainerWaitOKBody, <-chan error) { +// ContainerWait mocks base method. +func (m *MockAPIClient) ContainerWait(arg0 context.Context, arg1 string, arg2 client.ContainerWaitOptions) client.ContainerWaitResult { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "ContainerWait", arg0, arg1, arg2) - ret0, _ := ret[0].(<-chan container.ContainerWaitOKBody) - ret1, _ := ret[1].(<-chan error) - return ret0, ret1 + ret0, _ := ret[0].(client.ContainerWaitResult) + return ret0 } -// ContainerWait indicates an expected call of ContainerWait -func (mr *MockCommonAPIClientMockRecorder) ContainerWait(arg0, arg1, arg2 interface{}) *gomock.Call { +// ContainerWait indicates an expected call of ContainerWait. +func (mr *MockAPIClientMockRecorder) ContainerWait(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerWait", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainerWait), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainerWait", reflect.TypeOf((*MockAPIClient)(nil).ContainerWait), arg0, arg1, arg2) } -// ContainersPrune mocks base method -func (m *MockCommonAPIClient) ContainersPrune(arg0 context.Context, arg1 filters.Args) (types.ContainersPruneReport, error) { +// CopyFromContainer mocks base method. +func (m *MockAPIClient) CopyFromContainer(arg0 context.Context, arg1 string, arg2 client.CopyFromContainerOptions) (client.CopyFromContainerResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ContainersPrune", arg0, arg1) - ret0, _ := ret[0].(types.ContainersPruneReport) + ret := m.ctrl.Call(m, "CopyFromContainer", arg0, arg1, arg2) + ret0, _ := ret[0].(client.CopyFromContainerResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ContainersPrune indicates an expected call of ContainersPrune -func (mr *MockCommonAPIClientMockRecorder) ContainersPrune(arg0, arg1 interface{}) *gomock.Call { +// CopyFromContainer indicates an expected call of CopyFromContainer. +func (mr *MockAPIClientMockRecorder) CopyFromContainer(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ContainersPrune", reflect.TypeOf((*MockCommonAPIClient)(nil).ContainersPrune), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CopyFromContainer", reflect.TypeOf((*MockAPIClient)(nil).CopyFromContainer), arg0, arg1, arg2) } -// CopyFromContainer mocks base method -func (m *MockCommonAPIClient) CopyFromContainer(arg0 context.Context, arg1, arg2 string) (io.ReadCloser, types.ContainerPathStat, error) { +// CopyToContainer mocks base method. +func (m *MockAPIClient) CopyToContainer(arg0 context.Context, arg1 string, arg2 client.CopyToContainerOptions) (client.CopyToContainerResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CopyFromContainer", arg0, arg1, arg2) - ret0, _ := ret[0].(io.ReadCloser) - ret1, _ := ret[1].(types.ContainerPathStat) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// CopyFromContainer indicates an expected call of CopyFromContainer -func (mr *MockCommonAPIClientMockRecorder) CopyFromContainer(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CopyFromContainer", reflect.TypeOf((*MockCommonAPIClient)(nil).CopyFromContainer), arg0, arg1, arg2) -} - -// CopyToContainer mocks base method -func (m *MockCommonAPIClient) CopyToContainer(arg0 context.Context, arg1, arg2 string, arg3 io.Reader, arg4 types.CopyToContainerOptions) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CopyToContainer", arg0, arg1, arg2, arg3, arg4) - ret0, _ := ret[0].(error) - return ret0 + ret := m.ctrl.Call(m, "CopyToContainer", arg0, arg1, arg2) + ret0, _ := ret[0].(client.CopyToContainerResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// CopyToContainer indicates an expected call of CopyToContainer -func (mr *MockCommonAPIClientMockRecorder) CopyToContainer(arg0, arg1, arg2, arg3, arg4 interface{}) *gomock.Call { +// CopyToContainer indicates an expected call of CopyToContainer. +func (mr *MockAPIClientMockRecorder) CopyToContainer(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CopyToContainer", reflect.TypeOf((*MockCommonAPIClient)(nil).CopyToContainer), arg0, arg1, arg2, arg3, arg4) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CopyToContainer", reflect.TypeOf((*MockAPIClient)(nil).CopyToContainer), arg0, arg1, arg2) } -// DaemonHost mocks base method -func (m *MockCommonAPIClient) DaemonHost() string { +// DaemonHost mocks base method. +func (m *MockAPIClient) DaemonHost() string { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "DaemonHost") ret0, _ := ret[0].(string) return ret0 } -// DaemonHost indicates an expected call of DaemonHost -func (mr *MockCommonAPIClientMockRecorder) DaemonHost() *gomock.Call { +// DaemonHost indicates an expected call of DaemonHost. +func (mr *MockAPIClientMockRecorder) DaemonHost() *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DaemonHost", reflect.TypeOf((*MockCommonAPIClient)(nil).DaemonHost)) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DaemonHost", reflect.TypeOf((*MockAPIClient)(nil).DaemonHost)) } -// DialHijack mocks base method -func (m *MockCommonAPIClient) DialHijack(arg0 context.Context, arg1, arg2 string, arg3 map[string][]string) (net.Conn, error) { +// DialHijack mocks base method. +func (m *MockAPIClient) DialHijack(arg0 context.Context, arg1, arg2 string, arg3 map[string][]string) (net.Conn, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "DialHijack", arg0, arg1, arg2, arg3) ret0, _ := ret[0].(net.Conn) @@ -672,1099 +612,1136 @@ func (m *MockCommonAPIClient) DialHijack(arg0 context.Context, arg1, arg2 string return ret0, ret1 } -// DialHijack indicates an expected call of DialHijack -func (mr *MockCommonAPIClientMockRecorder) DialHijack(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { +// DialHijack indicates an expected call of DialHijack. +func (mr *MockAPIClientMockRecorder) DialHijack(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DialHijack", reflect.TypeOf((*MockCommonAPIClient)(nil).DialHijack), arg0, arg1, arg2, arg3) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DialHijack", reflect.TypeOf((*MockAPIClient)(nil).DialHijack), arg0, arg1, arg2, arg3) } -// Dialer mocks base method -func (m *MockCommonAPIClient) Dialer() func(context.Context) (net.Conn, error) { +// Dialer mocks base method. +func (m *MockAPIClient) Dialer() func(context.Context) (net.Conn, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Dialer") ret0, _ := ret[0].(func(context.Context) (net.Conn, error)) return ret0 } -// Dialer indicates an expected call of Dialer -func (mr *MockCommonAPIClientMockRecorder) Dialer() *gomock.Call { +// Dialer indicates an expected call of Dialer. +func (mr *MockAPIClientMockRecorder) Dialer() *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Dialer", reflect.TypeOf((*MockCommonAPIClient)(nil).Dialer)) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Dialer", reflect.TypeOf((*MockAPIClient)(nil).Dialer)) } -// DiskUsage mocks base method -func (m *MockCommonAPIClient) DiskUsage(arg0 context.Context) (types.DiskUsage, error) { +// DiskUsage mocks base method. +func (m *MockAPIClient) DiskUsage(arg0 context.Context, arg1 client.DiskUsageOptions) (client.DiskUsageResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "DiskUsage", arg0) - ret0, _ := ret[0].(types.DiskUsage) + ret := m.ctrl.Call(m, "DiskUsage", arg0, arg1) + ret0, _ := ret[0].(client.DiskUsageResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// DiskUsage indicates an expected call of DiskUsage -func (mr *MockCommonAPIClientMockRecorder) DiskUsage(arg0 interface{}) *gomock.Call { +// DiskUsage indicates an expected call of DiskUsage. +func (mr *MockAPIClientMockRecorder) DiskUsage(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DiskUsage", reflect.TypeOf((*MockCommonAPIClient)(nil).DiskUsage), arg0) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DiskUsage", reflect.TypeOf((*MockAPIClient)(nil).DiskUsage), arg0, arg1) } -// DistributionInspect mocks base method -func (m *MockCommonAPIClient) DistributionInspect(arg0 context.Context, arg1, arg2 string) (registry.DistributionInspect, error) { +// DistributionInspect mocks base method. +func (m *MockAPIClient) DistributionInspect(arg0 context.Context, arg1 string, arg2 client.DistributionInspectOptions) (client.DistributionInspectResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "DistributionInspect", arg0, arg1, arg2) - ret0, _ := ret[0].(registry.DistributionInspect) + ret0, _ := ret[0].(client.DistributionInspectResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// DistributionInspect indicates an expected call of DistributionInspect -func (mr *MockCommonAPIClientMockRecorder) DistributionInspect(arg0, arg1, arg2 interface{}) *gomock.Call { +// DistributionInspect indicates an expected call of DistributionInspect. +func (mr *MockAPIClientMockRecorder) DistributionInspect(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DistributionInspect", reflect.TypeOf((*MockCommonAPIClient)(nil).DistributionInspect), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DistributionInspect", reflect.TypeOf((*MockAPIClient)(nil).DistributionInspect), arg0, arg1, arg2) } -// Events mocks base method -func (m *MockCommonAPIClient) Events(arg0 context.Context, arg1 types.EventsOptions) (<-chan events.Message, <-chan error) { +// Events mocks base method. +func (m *MockAPIClient) Events(arg0 context.Context, arg1 client.EventsListOptions) client.EventsResult { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Events", arg0, arg1) - ret0, _ := ret[0].(<-chan events.Message) - ret1, _ := ret[1].(<-chan error) - return ret0, ret1 + ret0, _ := ret[0].(client.EventsResult) + return ret0 } -// Events indicates an expected call of Events -func (mr *MockCommonAPIClientMockRecorder) Events(arg0, arg1 interface{}) *gomock.Call { +// Events indicates an expected call of Events. +func (mr *MockAPIClientMockRecorder) Events(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Events", reflect.TypeOf((*MockCommonAPIClient)(nil).Events), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Events", reflect.TypeOf((*MockAPIClient)(nil).Events), arg0, arg1) } -// HTTPClient mocks base method -func (m *MockCommonAPIClient) HTTPClient() *http.Client { +// ExecAttach mocks base method. +func (m *MockAPIClient) ExecAttach(arg0 context.Context, arg1 string, arg2 client.ExecAttachOptions) (client.ExecAttachResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "HTTPClient") - ret0, _ := ret[0].(*http.Client) - return ret0 + ret := m.ctrl.Call(m, "ExecAttach", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ExecAttachResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// HTTPClient indicates an expected call of HTTPClient -func (mr *MockCommonAPIClientMockRecorder) HTTPClient() *gomock.Call { +// ExecAttach indicates an expected call of ExecAttach. +func (mr *MockAPIClientMockRecorder) ExecAttach(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "HTTPClient", reflect.TypeOf((*MockCommonAPIClient)(nil).HTTPClient)) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ExecAttach", reflect.TypeOf((*MockAPIClient)(nil).ExecAttach), arg0, arg1, arg2) } -// ImageBuild mocks base method -func (m *MockCommonAPIClient) ImageBuild(arg0 context.Context, arg1 io.Reader, arg2 types.ImageBuildOptions) (types.ImageBuildResponse, error) { +// ExecCreate mocks base method. +func (m *MockAPIClient) ExecCreate(arg0 context.Context, arg1 string, arg2 client.ExecCreateOptions) (client.ExecCreateResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ImageBuild", arg0, arg1, arg2) - ret0, _ := ret[0].(types.ImageBuildResponse) + ret := m.ctrl.Call(m, "ExecCreate", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ExecCreateResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ImageBuild indicates an expected call of ImageBuild -func (mr *MockCommonAPIClientMockRecorder) ImageBuild(arg0, arg1, arg2 interface{}) *gomock.Call { +// ExecCreate indicates an expected call of ExecCreate. +func (mr *MockAPIClientMockRecorder) ExecCreate(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImageBuild", reflect.TypeOf((*MockCommonAPIClient)(nil).ImageBuild), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ExecCreate", reflect.TypeOf((*MockAPIClient)(nil).ExecCreate), arg0, arg1, arg2) } -// ImageCreate mocks base method -func (m *MockCommonAPIClient) ImageCreate(arg0 context.Context, arg1 string, arg2 types.ImageCreateOptions) (io.ReadCloser, error) { +// ExecInspect mocks base method. +func (m *MockAPIClient) ExecInspect(arg0 context.Context, arg1 string, arg2 client.ExecInspectOptions) (client.ExecInspectResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ImageCreate", arg0, arg1, arg2) - ret0, _ := ret[0].(io.ReadCloser) + ret := m.ctrl.Call(m, "ExecInspect", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ExecInspectResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ImageCreate indicates an expected call of ImageCreate -func (mr *MockCommonAPIClientMockRecorder) ImageCreate(arg0, arg1, arg2 interface{}) *gomock.Call { +// ExecInspect indicates an expected call of ExecInspect. +func (mr *MockAPIClientMockRecorder) ExecInspect(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImageCreate", reflect.TypeOf((*MockCommonAPIClient)(nil).ImageCreate), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ExecInspect", reflect.TypeOf((*MockAPIClient)(nil).ExecInspect), arg0, arg1, arg2) } -// ImageHistory mocks base method -func (m *MockCommonAPIClient) ImageHistory(arg0 context.Context, arg1 string) ([]image.HistoryResponseItem, error) { +// ExecResize mocks base method. +func (m *MockAPIClient) ExecResize(arg0 context.Context, arg1 string, arg2 client.ExecResizeOptions) (client.ExecResizeResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ImageHistory", arg0, arg1) - ret0, _ := ret[0].([]image.HistoryResponseItem) + ret := m.ctrl.Call(m, "ExecResize", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ExecResizeResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ImageHistory indicates an expected call of ImageHistory -func (mr *MockCommonAPIClientMockRecorder) ImageHistory(arg0, arg1 interface{}) *gomock.Call { +// ExecResize indicates an expected call of ExecResize. +func (mr *MockAPIClientMockRecorder) ExecResize(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImageHistory", reflect.TypeOf((*MockCommonAPIClient)(nil).ImageHistory), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ExecResize", reflect.TypeOf((*MockAPIClient)(nil).ExecResize), arg0, arg1, arg2) } -// ImageImport mocks base method -func (m *MockCommonAPIClient) ImageImport(arg0 context.Context, arg1 types.ImageImportSource, arg2 string, arg3 types.ImageImportOptions) (io.ReadCloser, error) { +// ExecStart mocks base method. +func (m *MockAPIClient) ExecStart(arg0 context.Context, arg1 string, arg2 client.ExecStartOptions) (client.ExecStartResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ImageImport", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].(io.ReadCloser) + ret := m.ctrl.Call(m, "ExecStart", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ExecStartResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ImageImport indicates an expected call of ImageImport -func (mr *MockCommonAPIClientMockRecorder) ImageImport(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { +// ExecStart indicates an expected call of ExecStart. +func (mr *MockAPIClientMockRecorder) ExecStart(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImageImport", reflect.TypeOf((*MockCommonAPIClient)(nil).ImageImport), arg0, arg1, arg2, arg3) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ExecStart", reflect.TypeOf((*MockAPIClient)(nil).ExecStart), arg0, arg1, arg2) } -// ImageInspectWithRaw mocks base method -func (m *MockCommonAPIClient) ImageInspectWithRaw(arg0 context.Context, arg1 string) (types.ImageInspect, []byte, error) { +// ImageBuild mocks base method. +func (m *MockAPIClient) ImageBuild(arg0 context.Context, arg1 io.Reader, arg2 client.ImageBuildOptions) (client.ImageBuildResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ImageInspectWithRaw", arg0, arg1) - ret0, _ := ret[0].(types.ImageInspect) - ret1, _ := ret[1].([]byte) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 + ret := m.ctrl.Call(m, "ImageBuild", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ImageBuildResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// ImageInspectWithRaw indicates an expected call of ImageInspectWithRaw -func (mr *MockCommonAPIClientMockRecorder) ImageInspectWithRaw(arg0, arg1 interface{}) *gomock.Call { +// ImageBuild indicates an expected call of ImageBuild. +func (mr *MockAPIClientMockRecorder) ImageBuild(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImageInspectWithRaw", reflect.TypeOf((*MockCommonAPIClient)(nil).ImageInspectWithRaw), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImageBuild", reflect.TypeOf((*MockAPIClient)(nil).ImageBuild), arg0, arg1, arg2) } -// ImageList mocks base method -func (m *MockCommonAPIClient) ImageList(arg0 context.Context, arg1 types.ImageListOptions) ([]types.ImageSummary, error) { +// ImageHistory mocks base method. +func (m *MockAPIClient) ImageHistory(arg0 context.Context, arg1 string, arg2 ...client.ImageHistoryOption) (client.ImageHistoryResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ImageList", arg0, arg1) - ret0, _ := ret[0].([]types.ImageSummary) + varargs := []interface{}{arg0, arg1} + for _, a := range arg2 { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "ImageHistory", varargs...) + ret0, _ := ret[0].(client.ImageHistoryResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ImageList indicates an expected call of ImageList -func (mr *MockCommonAPIClientMockRecorder) ImageList(arg0, arg1 interface{}) *gomock.Call { +// ImageHistory indicates an expected call of ImageHistory. +func (mr *MockAPIClientMockRecorder) ImageHistory(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImageList", reflect.TypeOf((*MockCommonAPIClient)(nil).ImageList), arg0, arg1) + varargs := append([]interface{}{arg0, arg1}, arg2...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImageHistory", reflect.TypeOf((*MockAPIClient)(nil).ImageHistory), varargs...) } -// ImageLoad mocks base method -func (m *MockCommonAPIClient) ImageLoad(arg0 context.Context, arg1 io.Reader, arg2 bool) (types.ImageLoadResponse, error) { +// ImageImport mocks base method. +func (m *MockAPIClient) ImageImport(arg0 context.Context, arg1 client.ImageImportSource, arg2 string, arg3 client.ImageImportOptions) (client.ImageImportResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ImageLoad", arg0, arg1, arg2) - ret0, _ := ret[0].(types.ImageLoadResponse) + ret := m.ctrl.Call(m, "ImageImport", arg0, arg1, arg2, arg3) + ret0, _ := ret[0].(client.ImageImportResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ImageLoad indicates an expected call of ImageLoad -func (mr *MockCommonAPIClientMockRecorder) ImageLoad(arg0, arg1, arg2 interface{}) *gomock.Call { +// ImageImport indicates an expected call of ImageImport. +func (mr *MockAPIClientMockRecorder) ImageImport(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImageLoad", reflect.TypeOf((*MockCommonAPIClient)(nil).ImageLoad), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImageImport", reflect.TypeOf((*MockAPIClient)(nil).ImageImport), arg0, arg1, arg2, arg3) } -// ImagePull mocks base method -func (m *MockCommonAPIClient) ImagePull(arg0 context.Context, arg1 string, arg2 types.ImagePullOptions) (io.ReadCloser, error) { +// ImageInspect mocks base method. +func (m *MockAPIClient) ImageInspect(arg0 context.Context, arg1 string, arg2 ...client.ImageInspectOption) (client.ImageInspectResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ImagePull", arg0, arg1, arg2) - ret0, _ := ret[0].(io.ReadCloser) + varargs := []interface{}{arg0, arg1} + for _, a := range arg2 { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "ImageInspect", varargs...) + ret0, _ := ret[0].(client.ImageInspectResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ImagePull indicates an expected call of ImagePull -func (mr *MockCommonAPIClientMockRecorder) ImagePull(arg0, arg1, arg2 interface{}) *gomock.Call { +// ImageInspect indicates an expected call of ImageInspect. +func (mr *MockAPIClientMockRecorder) ImageInspect(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImagePull", reflect.TypeOf((*MockCommonAPIClient)(nil).ImagePull), arg0, arg1, arg2) + varargs := append([]interface{}{arg0, arg1}, arg2...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImageInspect", reflect.TypeOf((*MockAPIClient)(nil).ImageInspect), varargs...) } -// ImagePush mocks base method -func (m *MockCommonAPIClient) ImagePush(arg0 context.Context, arg1 string, arg2 types.ImagePushOptions) (io.ReadCloser, error) { +// ImageList mocks base method. +func (m *MockAPIClient) ImageList(arg0 context.Context, arg1 client.ImageListOptions) (client.ImageListResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ImagePush", arg0, arg1, arg2) - ret0, _ := ret[0].(io.ReadCloser) + ret := m.ctrl.Call(m, "ImageList", arg0, arg1) + ret0, _ := ret[0].(client.ImageListResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ImagePush indicates an expected call of ImagePush -func (mr *MockCommonAPIClientMockRecorder) ImagePush(arg0, arg1, arg2 interface{}) *gomock.Call { +// ImageList indicates an expected call of ImageList. +func (mr *MockAPIClientMockRecorder) ImageList(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImagePush", reflect.TypeOf((*MockCommonAPIClient)(nil).ImagePush), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImageList", reflect.TypeOf((*MockAPIClient)(nil).ImageList), arg0, arg1) } -// ImageRemove mocks base method -func (m *MockCommonAPIClient) ImageRemove(arg0 context.Context, arg1 string, arg2 types.ImageRemoveOptions) ([]types.ImageDeleteResponseItem, error) { +// ImageLoad mocks base method. +func (m *MockAPIClient) ImageLoad(arg0 context.Context, arg1 io.Reader, arg2 ...client.ImageLoadOption) (client.ImageLoadResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ImageRemove", arg0, arg1, arg2) - ret0, _ := ret[0].([]types.ImageDeleteResponseItem) + varargs := []interface{}{arg0, arg1} + for _, a := range arg2 { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "ImageLoad", varargs...) + ret0, _ := ret[0].(client.ImageLoadResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ImageRemove indicates an expected call of ImageRemove -func (mr *MockCommonAPIClientMockRecorder) ImageRemove(arg0, arg1, arg2 interface{}) *gomock.Call { +// ImageLoad indicates an expected call of ImageLoad. +func (mr *MockAPIClientMockRecorder) ImageLoad(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImageRemove", reflect.TypeOf((*MockCommonAPIClient)(nil).ImageRemove), arg0, arg1, arg2) + varargs := append([]interface{}{arg0, arg1}, arg2...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImageLoad", reflect.TypeOf((*MockAPIClient)(nil).ImageLoad), varargs...) } -// ImageSave mocks base method -func (m *MockCommonAPIClient) ImageSave(arg0 context.Context, arg1 []string) (io.ReadCloser, error) { +// ImagePrune mocks base method. +func (m *MockAPIClient) ImagePrune(arg0 context.Context, arg1 client.ImagePruneOptions) (client.ImagePruneResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ImageSave", arg0, arg1) - ret0, _ := ret[0].(io.ReadCloser) + ret := m.ctrl.Call(m, "ImagePrune", arg0, arg1) + ret0, _ := ret[0].(client.ImagePruneResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ImageSave indicates an expected call of ImageSave -func (mr *MockCommonAPIClientMockRecorder) ImageSave(arg0, arg1 interface{}) *gomock.Call { +// ImagePrune indicates an expected call of ImagePrune. +func (mr *MockAPIClientMockRecorder) ImagePrune(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImageSave", reflect.TypeOf((*MockCommonAPIClient)(nil).ImageSave), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImagePrune", reflect.TypeOf((*MockAPIClient)(nil).ImagePrune), arg0, arg1) } -// ImageSearch mocks base method -func (m *MockCommonAPIClient) ImageSearch(arg0 context.Context, arg1 string, arg2 types.ImageSearchOptions) ([]registry.SearchResult, error) { +// ImagePull mocks base method. +func (m *MockAPIClient) ImagePull(arg0 context.Context, arg1 string, arg2 client.ImagePullOptions) (client.ImagePullResponse, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ImageSearch", arg0, arg1, arg2) - ret0, _ := ret[0].([]registry.SearchResult) + ret := m.ctrl.Call(m, "ImagePull", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ImagePullResponse) ret1, _ := ret[1].(error) return ret0, ret1 } -// ImageSearch indicates an expected call of ImageSearch -func (mr *MockCommonAPIClientMockRecorder) ImageSearch(arg0, arg1, arg2 interface{}) *gomock.Call { +// ImagePull indicates an expected call of ImagePull. +func (mr *MockAPIClientMockRecorder) ImagePull(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImageSearch", reflect.TypeOf((*MockCommonAPIClient)(nil).ImageSearch), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImagePull", reflect.TypeOf((*MockAPIClient)(nil).ImagePull), arg0, arg1, arg2) } -// ImageTag mocks base method -func (m *MockCommonAPIClient) ImageTag(arg0 context.Context, arg1, arg2 string) error { +// ImagePush mocks base method. +func (m *MockAPIClient) ImagePush(arg0 context.Context, arg1 string, arg2 client.ImagePushOptions) (client.ImagePushResponse, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ImageTag", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 + ret := m.ctrl.Call(m, "ImagePush", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ImagePushResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// ImageTag indicates an expected call of ImageTag -func (mr *MockCommonAPIClientMockRecorder) ImageTag(arg0, arg1, arg2 interface{}) *gomock.Call { +// ImagePush indicates an expected call of ImagePush. +func (mr *MockAPIClientMockRecorder) ImagePush(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImageTag", reflect.TypeOf((*MockCommonAPIClient)(nil).ImageTag), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImagePush", reflect.TypeOf((*MockAPIClient)(nil).ImagePush), arg0, arg1, arg2) } -// ImagesPrune mocks base method -func (m *MockCommonAPIClient) ImagesPrune(arg0 context.Context, arg1 filters.Args) (types.ImagesPruneReport, error) { +// ImageRemove mocks base method. +func (m *MockAPIClient) ImageRemove(arg0 context.Context, arg1 string, arg2 client.ImageRemoveOptions) (client.ImageRemoveResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ImagesPrune", arg0, arg1) - ret0, _ := ret[0].(types.ImagesPruneReport) + ret := m.ctrl.Call(m, "ImageRemove", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ImageRemoveResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ImagesPrune indicates an expected call of ImagesPrune -func (mr *MockCommonAPIClientMockRecorder) ImagesPrune(arg0, arg1 interface{}) *gomock.Call { +// ImageRemove indicates an expected call of ImageRemove. +func (mr *MockAPIClientMockRecorder) ImageRemove(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImagesPrune", reflect.TypeOf((*MockCommonAPIClient)(nil).ImagesPrune), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImageRemove", reflect.TypeOf((*MockAPIClient)(nil).ImageRemove), arg0, arg1, arg2) } -// Info mocks base method -func (m *MockCommonAPIClient) Info(arg0 context.Context) (types.Info, error) { +// ImageSave mocks base method. +func (m *MockAPIClient) ImageSave(arg0 context.Context, arg1 []string, arg2 ...client.ImageSaveOption) (client.ImageSaveResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Info", arg0) - ret0, _ := ret[0].(types.Info) + varargs := []interface{}{arg0, arg1} + for _, a := range arg2 { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "ImageSave", varargs...) + ret0, _ := ret[0].(client.ImageSaveResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// Info indicates an expected call of Info -func (mr *MockCommonAPIClientMockRecorder) Info(arg0 interface{}) *gomock.Call { +// ImageSave indicates an expected call of ImageSave. +func (mr *MockAPIClientMockRecorder) ImageSave(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Info", reflect.TypeOf((*MockCommonAPIClient)(nil).Info), arg0) + varargs := append([]interface{}{arg0, arg1}, arg2...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImageSave", reflect.TypeOf((*MockAPIClient)(nil).ImageSave), varargs...) } -// NegotiateAPIVersion mocks base method -func (m *MockCommonAPIClient) NegotiateAPIVersion(arg0 context.Context) { +// ImageSearch mocks base method. +func (m *MockAPIClient) ImageSearch(arg0 context.Context, arg1 string, arg2 client.ImageSearchOptions) (client.ImageSearchResult, error) { m.ctrl.T.Helper() - m.ctrl.Call(m, "NegotiateAPIVersion", arg0) + ret := m.ctrl.Call(m, "ImageSearch", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ImageSearchResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// NegotiateAPIVersion indicates an expected call of NegotiateAPIVersion -func (mr *MockCommonAPIClientMockRecorder) NegotiateAPIVersion(arg0 interface{}) *gomock.Call { +// ImageSearch indicates an expected call of ImageSearch. +func (mr *MockAPIClientMockRecorder) ImageSearch(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NegotiateAPIVersion", reflect.TypeOf((*MockCommonAPIClient)(nil).NegotiateAPIVersion), arg0) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImageSearch", reflect.TypeOf((*MockAPIClient)(nil).ImageSearch), arg0, arg1, arg2) } -// NegotiateAPIVersionPing mocks base method -func (m *MockCommonAPIClient) NegotiateAPIVersionPing(arg0 types.Ping) { +// ImageTag mocks base method. +func (m *MockAPIClient) ImageTag(arg0 context.Context, arg1 client.ImageTagOptions) (client.ImageTagResult, error) { m.ctrl.T.Helper() - m.ctrl.Call(m, "NegotiateAPIVersionPing", arg0) + ret := m.ctrl.Call(m, "ImageTag", arg0, arg1) + ret0, _ := ret[0].(client.ImageTagResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// NegotiateAPIVersionPing indicates an expected call of NegotiateAPIVersionPing -func (mr *MockCommonAPIClientMockRecorder) NegotiateAPIVersionPing(arg0 interface{}) *gomock.Call { +// ImageTag indicates an expected call of ImageTag. +func (mr *MockAPIClientMockRecorder) ImageTag(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NegotiateAPIVersionPing", reflect.TypeOf((*MockCommonAPIClient)(nil).NegotiateAPIVersionPing), arg0) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ImageTag", reflect.TypeOf((*MockAPIClient)(nil).ImageTag), arg0, arg1) } -// NetworkConnect mocks base method -func (m *MockCommonAPIClient) NetworkConnect(arg0 context.Context, arg1, arg2 string, arg3 *network.EndpointSettings) error { +// Info mocks base method. +func (m *MockAPIClient) Info(arg0 context.Context, arg1 client.InfoOptions) (client.SystemInfoResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "NetworkConnect", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].(error) - return ret0 + ret := m.ctrl.Call(m, "Info", arg0, arg1) + ret0, _ := ret[0].(client.SystemInfoResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// NetworkConnect indicates an expected call of NetworkConnect -func (mr *MockCommonAPIClientMockRecorder) NetworkConnect(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { +// Info indicates an expected call of Info. +func (mr *MockAPIClientMockRecorder) Info(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NetworkConnect", reflect.TypeOf((*MockCommonAPIClient)(nil).NetworkConnect), arg0, arg1, arg2, arg3) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Info", reflect.TypeOf((*MockAPIClient)(nil).Info), arg0, arg1) } -// NetworkCreate mocks base method -func (m *MockCommonAPIClient) NetworkCreate(arg0 context.Context, arg1 string, arg2 types.NetworkCreate) (types.NetworkCreateResponse, error) { +// NetworkConnect mocks base method. +func (m *MockAPIClient) NetworkConnect(arg0 context.Context, arg1 string, arg2 client.NetworkConnectOptions) (client.NetworkConnectResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "NetworkCreate", arg0, arg1, arg2) - ret0, _ := ret[0].(types.NetworkCreateResponse) + ret := m.ctrl.Call(m, "NetworkConnect", arg0, arg1, arg2) + ret0, _ := ret[0].(client.NetworkConnectResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// NetworkCreate indicates an expected call of NetworkCreate -func (mr *MockCommonAPIClientMockRecorder) NetworkCreate(arg0, arg1, arg2 interface{}) *gomock.Call { +// NetworkConnect indicates an expected call of NetworkConnect. +func (mr *MockAPIClientMockRecorder) NetworkConnect(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NetworkCreate", reflect.TypeOf((*MockCommonAPIClient)(nil).NetworkCreate), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NetworkConnect", reflect.TypeOf((*MockAPIClient)(nil).NetworkConnect), arg0, arg1, arg2) } -// NetworkDisconnect mocks base method -func (m *MockCommonAPIClient) NetworkDisconnect(arg0 context.Context, arg1, arg2 string, arg3 bool) error { +// NetworkCreate mocks base method. +func (m *MockAPIClient) NetworkCreate(arg0 context.Context, arg1 string, arg2 client.NetworkCreateOptions) (client.NetworkCreateResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "NetworkDisconnect", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].(error) - return ret0 + ret := m.ctrl.Call(m, "NetworkCreate", arg0, arg1, arg2) + ret0, _ := ret[0].(client.NetworkCreateResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// NetworkDisconnect indicates an expected call of NetworkDisconnect -func (mr *MockCommonAPIClientMockRecorder) NetworkDisconnect(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { +// NetworkCreate indicates an expected call of NetworkCreate. +func (mr *MockAPIClientMockRecorder) NetworkCreate(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NetworkDisconnect", reflect.TypeOf((*MockCommonAPIClient)(nil).NetworkDisconnect), arg0, arg1, arg2, arg3) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NetworkCreate", reflect.TypeOf((*MockAPIClient)(nil).NetworkCreate), arg0, arg1, arg2) } -// NetworkInspect mocks base method -func (m *MockCommonAPIClient) NetworkInspect(arg0 context.Context, arg1 string, arg2 types.NetworkInspectOptions) (types.NetworkResource, error) { +// NetworkDisconnect mocks base method. +func (m *MockAPIClient) NetworkDisconnect(arg0 context.Context, arg1 string, arg2 client.NetworkDisconnectOptions) (client.NetworkDisconnectResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "NetworkInspect", arg0, arg1, arg2) - ret0, _ := ret[0].(types.NetworkResource) + ret := m.ctrl.Call(m, "NetworkDisconnect", arg0, arg1, arg2) + ret0, _ := ret[0].(client.NetworkDisconnectResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// NetworkInspect indicates an expected call of NetworkInspect -func (mr *MockCommonAPIClientMockRecorder) NetworkInspect(arg0, arg1, arg2 interface{}) *gomock.Call { +// NetworkDisconnect indicates an expected call of NetworkDisconnect. +func (mr *MockAPIClientMockRecorder) NetworkDisconnect(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NetworkInspect", reflect.TypeOf((*MockCommonAPIClient)(nil).NetworkInspect), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NetworkDisconnect", reflect.TypeOf((*MockAPIClient)(nil).NetworkDisconnect), arg0, arg1, arg2) } -// NetworkInspectWithRaw mocks base method -func (m *MockCommonAPIClient) NetworkInspectWithRaw(arg0 context.Context, arg1 string, arg2 types.NetworkInspectOptions) (types.NetworkResource, []byte, error) { +// NetworkInspect mocks base method. +func (m *MockAPIClient) NetworkInspect(arg0 context.Context, arg1 string, arg2 client.NetworkInspectOptions) (client.NetworkInspectResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "NetworkInspectWithRaw", arg0, arg1, arg2) - ret0, _ := ret[0].(types.NetworkResource) - ret1, _ := ret[1].([]byte) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 + ret := m.ctrl.Call(m, "NetworkInspect", arg0, arg1, arg2) + ret0, _ := ret[0].(client.NetworkInspectResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// NetworkInspectWithRaw indicates an expected call of NetworkInspectWithRaw -func (mr *MockCommonAPIClientMockRecorder) NetworkInspectWithRaw(arg0, arg1, arg2 interface{}) *gomock.Call { +// NetworkInspect indicates an expected call of NetworkInspect. +func (mr *MockAPIClientMockRecorder) NetworkInspect(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NetworkInspectWithRaw", reflect.TypeOf((*MockCommonAPIClient)(nil).NetworkInspectWithRaw), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NetworkInspect", reflect.TypeOf((*MockAPIClient)(nil).NetworkInspect), arg0, arg1, arg2) } -// NetworkList mocks base method -func (m *MockCommonAPIClient) NetworkList(arg0 context.Context, arg1 types.NetworkListOptions) ([]types.NetworkResource, error) { +// NetworkList mocks base method. +func (m *MockAPIClient) NetworkList(arg0 context.Context, arg1 client.NetworkListOptions) (client.NetworkListResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "NetworkList", arg0, arg1) - ret0, _ := ret[0].([]types.NetworkResource) + ret0, _ := ret[0].(client.NetworkListResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// NetworkList indicates an expected call of NetworkList -func (mr *MockCommonAPIClientMockRecorder) NetworkList(arg0, arg1 interface{}) *gomock.Call { +// NetworkList indicates an expected call of NetworkList. +func (mr *MockAPIClientMockRecorder) NetworkList(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NetworkList", reflect.TypeOf((*MockCommonAPIClient)(nil).NetworkList), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NetworkList", reflect.TypeOf((*MockAPIClient)(nil).NetworkList), arg0, arg1) } -// NetworkRemove mocks base method -func (m *MockCommonAPIClient) NetworkRemove(arg0 context.Context, arg1 string) error { +// NetworkPrune mocks base method. +func (m *MockAPIClient) NetworkPrune(arg0 context.Context, arg1 client.NetworkPruneOptions) (client.NetworkPruneResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "NetworkRemove", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 + ret := m.ctrl.Call(m, "NetworkPrune", arg0, arg1) + ret0, _ := ret[0].(client.NetworkPruneResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// NetworkRemove indicates an expected call of NetworkRemove -func (mr *MockCommonAPIClientMockRecorder) NetworkRemove(arg0, arg1 interface{}) *gomock.Call { +// NetworkPrune indicates an expected call of NetworkPrune. +func (mr *MockAPIClientMockRecorder) NetworkPrune(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NetworkRemove", reflect.TypeOf((*MockCommonAPIClient)(nil).NetworkRemove), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NetworkPrune", reflect.TypeOf((*MockAPIClient)(nil).NetworkPrune), arg0, arg1) } -// NetworksPrune mocks base method -func (m *MockCommonAPIClient) NetworksPrune(arg0 context.Context, arg1 filters.Args) (types.NetworksPruneReport, error) { +// NetworkRemove mocks base method. +func (m *MockAPIClient) NetworkRemove(arg0 context.Context, arg1 string, arg2 client.NetworkRemoveOptions) (client.NetworkRemoveResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "NetworksPrune", arg0, arg1) - ret0, _ := ret[0].(types.NetworksPruneReport) + ret := m.ctrl.Call(m, "NetworkRemove", arg0, arg1, arg2) + ret0, _ := ret[0].(client.NetworkRemoveResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// NetworksPrune indicates an expected call of NetworksPrune -func (mr *MockCommonAPIClientMockRecorder) NetworksPrune(arg0, arg1 interface{}) *gomock.Call { +// NetworkRemove indicates an expected call of NetworkRemove. +func (mr *MockAPIClientMockRecorder) NetworkRemove(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NetworksPrune", reflect.TypeOf((*MockCommonAPIClient)(nil).NetworksPrune), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NetworkRemove", reflect.TypeOf((*MockAPIClient)(nil).NetworkRemove), arg0, arg1, arg2) } -// NodeInspectWithRaw mocks base method -func (m *MockCommonAPIClient) NodeInspectWithRaw(arg0 context.Context, arg1 string) (swarm.Node, []byte, error) { +// NodeInspect mocks base method. +func (m *MockAPIClient) NodeInspect(arg0 context.Context, arg1 string, arg2 client.NodeInspectOptions) (client.NodeInspectResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "NodeInspectWithRaw", arg0, arg1) - ret0, _ := ret[0].(swarm.Node) - ret1, _ := ret[1].([]byte) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 + ret := m.ctrl.Call(m, "NodeInspect", arg0, arg1, arg2) + ret0, _ := ret[0].(client.NodeInspectResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// NodeInspectWithRaw indicates an expected call of NodeInspectWithRaw -func (mr *MockCommonAPIClientMockRecorder) NodeInspectWithRaw(arg0, arg1 interface{}) *gomock.Call { +// NodeInspect indicates an expected call of NodeInspect. +func (mr *MockAPIClientMockRecorder) NodeInspect(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NodeInspectWithRaw", reflect.TypeOf((*MockCommonAPIClient)(nil).NodeInspectWithRaw), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NodeInspect", reflect.TypeOf((*MockAPIClient)(nil).NodeInspect), arg0, arg1, arg2) } -// NodeList mocks base method -func (m *MockCommonAPIClient) NodeList(arg0 context.Context, arg1 types.NodeListOptions) ([]swarm.Node, error) { +// NodeList mocks base method. +func (m *MockAPIClient) NodeList(arg0 context.Context, arg1 client.NodeListOptions) (client.NodeListResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "NodeList", arg0, arg1) - ret0, _ := ret[0].([]swarm.Node) + ret0, _ := ret[0].(client.NodeListResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// NodeList indicates an expected call of NodeList -func (mr *MockCommonAPIClientMockRecorder) NodeList(arg0, arg1 interface{}) *gomock.Call { +// NodeList indicates an expected call of NodeList. +func (mr *MockAPIClientMockRecorder) NodeList(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NodeList", reflect.TypeOf((*MockCommonAPIClient)(nil).NodeList), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NodeList", reflect.TypeOf((*MockAPIClient)(nil).NodeList), arg0, arg1) } -// NodeRemove mocks base method -func (m *MockCommonAPIClient) NodeRemove(arg0 context.Context, arg1 string, arg2 types.NodeRemoveOptions) error { +// NodeRemove mocks base method. +func (m *MockAPIClient) NodeRemove(arg0 context.Context, arg1 string, arg2 client.NodeRemoveOptions) (client.NodeRemoveResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "NodeRemove", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 + ret0, _ := ret[0].(client.NodeRemoveResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// NodeRemove indicates an expected call of NodeRemove -func (mr *MockCommonAPIClientMockRecorder) NodeRemove(arg0, arg1, arg2 interface{}) *gomock.Call { +// NodeRemove indicates an expected call of NodeRemove. +func (mr *MockAPIClientMockRecorder) NodeRemove(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NodeRemove", reflect.TypeOf((*MockCommonAPIClient)(nil).NodeRemove), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NodeRemove", reflect.TypeOf((*MockAPIClient)(nil).NodeRemove), arg0, arg1, arg2) } -// NodeUpdate mocks base method -func (m *MockCommonAPIClient) NodeUpdate(arg0 context.Context, arg1 string, arg2 swarm.Version, arg3 swarm.NodeSpec) error { +// NodeUpdate mocks base method. +func (m *MockAPIClient) NodeUpdate(arg0 context.Context, arg1 string, arg2 client.NodeUpdateOptions) (client.NodeUpdateResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "NodeUpdate", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].(error) - return ret0 + ret := m.ctrl.Call(m, "NodeUpdate", arg0, arg1, arg2) + ret0, _ := ret[0].(client.NodeUpdateResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// NodeUpdate indicates an expected call of NodeUpdate -func (mr *MockCommonAPIClientMockRecorder) NodeUpdate(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { +// NodeUpdate indicates an expected call of NodeUpdate. +func (mr *MockAPIClientMockRecorder) NodeUpdate(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NodeUpdate", reflect.TypeOf((*MockCommonAPIClient)(nil).NodeUpdate), arg0, arg1, arg2, arg3) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NodeUpdate", reflect.TypeOf((*MockAPIClient)(nil).NodeUpdate), arg0, arg1, arg2) } -// Ping mocks base method -func (m *MockCommonAPIClient) Ping(arg0 context.Context) (types.Ping, error) { +// Ping mocks base method. +func (m *MockAPIClient) Ping(arg0 context.Context, arg1 client.PingOptions) (client.PingResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Ping", arg0) - ret0, _ := ret[0].(types.Ping) + ret := m.ctrl.Call(m, "Ping", arg0, arg1) + ret0, _ := ret[0].(client.PingResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// Ping indicates an expected call of Ping -func (mr *MockCommonAPIClientMockRecorder) Ping(arg0 interface{}) *gomock.Call { +// Ping indicates an expected call of Ping. +func (mr *MockAPIClientMockRecorder) Ping(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Ping", reflect.TypeOf((*MockCommonAPIClient)(nil).Ping), arg0) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Ping", reflect.TypeOf((*MockAPIClient)(nil).Ping), arg0, arg1) } -// PluginCreate mocks base method -func (m *MockCommonAPIClient) PluginCreate(arg0 context.Context, arg1 io.Reader, arg2 types.PluginCreateOptions) error { +// PluginCreate mocks base method. +func (m *MockAPIClient) PluginCreate(arg0 context.Context, arg1 io.Reader, arg2 client.PluginCreateOptions) (client.PluginCreateResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "PluginCreate", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 + ret0, _ := ret[0].(client.PluginCreateResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// PluginCreate indicates an expected call of PluginCreate -func (mr *MockCommonAPIClientMockRecorder) PluginCreate(arg0, arg1, arg2 interface{}) *gomock.Call { +// PluginCreate indicates an expected call of PluginCreate. +func (mr *MockAPIClientMockRecorder) PluginCreate(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PluginCreate", reflect.TypeOf((*MockCommonAPIClient)(nil).PluginCreate), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PluginCreate", reflect.TypeOf((*MockAPIClient)(nil).PluginCreate), arg0, arg1, arg2) } -// PluginDisable mocks base method -func (m *MockCommonAPIClient) PluginDisable(arg0 context.Context, arg1 string, arg2 types.PluginDisableOptions) error { +// PluginDisable mocks base method. +func (m *MockAPIClient) PluginDisable(arg0 context.Context, arg1 string, arg2 client.PluginDisableOptions) (client.PluginDisableResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "PluginDisable", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 + ret0, _ := ret[0].(client.PluginDisableResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// PluginDisable indicates an expected call of PluginDisable -func (mr *MockCommonAPIClientMockRecorder) PluginDisable(arg0, arg1, arg2 interface{}) *gomock.Call { +// PluginDisable indicates an expected call of PluginDisable. +func (mr *MockAPIClientMockRecorder) PluginDisable(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PluginDisable", reflect.TypeOf((*MockCommonAPIClient)(nil).PluginDisable), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PluginDisable", reflect.TypeOf((*MockAPIClient)(nil).PluginDisable), arg0, arg1, arg2) } -// PluginEnable mocks base method -func (m *MockCommonAPIClient) PluginEnable(arg0 context.Context, arg1 string, arg2 types.PluginEnableOptions) error { +// PluginEnable mocks base method. +func (m *MockAPIClient) PluginEnable(arg0 context.Context, arg1 string, arg2 client.PluginEnableOptions) (client.PluginEnableResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "PluginEnable", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 + ret0, _ := ret[0].(client.PluginEnableResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// PluginEnable indicates an expected call of PluginEnable -func (mr *MockCommonAPIClientMockRecorder) PluginEnable(arg0, arg1, arg2 interface{}) *gomock.Call { +// PluginEnable indicates an expected call of PluginEnable. +func (mr *MockAPIClientMockRecorder) PluginEnable(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PluginEnable", reflect.TypeOf((*MockCommonAPIClient)(nil).PluginEnable), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PluginEnable", reflect.TypeOf((*MockAPIClient)(nil).PluginEnable), arg0, arg1, arg2) } -// PluginInspectWithRaw mocks base method -func (m *MockCommonAPIClient) PluginInspectWithRaw(arg0 context.Context, arg1 string) (*types.Plugin, []byte, error) { +// PluginInspect mocks base method. +func (m *MockAPIClient) PluginInspect(arg0 context.Context, arg1 string, arg2 client.PluginInspectOptions) (client.PluginInspectResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "PluginInspectWithRaw", arg0, arg1) - ret0, _ := ret[0].(*types.Plugin) - ret1, _ := ret[1].([]byte) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 + ret := m.ctrl.Call(m, "PluginInspect", arg0, arg1, arg2) + ret0, _ := ret[0].(client.PluginInspectResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// PluginInspectWithRaw indicates an expected call of PluginInspectWithRaw -func (mr *MockCommonAPIClientMockRecorder) PluginInspectWithRaw(arg0, arg1 interface{}) *gomock.Call { +// PluginInspect indicates an expected call of PluginInspect. +func (mr *MockAPIClientMockRecorder) PluginInspect(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PluginInspectWithRaw", reflect.TypeOf((*MockCommonAPIClient)(nil).PluginInspectWithRaw), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PluginInspect", reflect.TypeOf((*MockAPIClient)(nil).PluginInspect), arg0, arg1, arg2) } -// PluginInstall mocks base method -func (m *MockCommonAPIClient) PluginInstall(arg0 context.Context, arg1 string, arg2 types.PluginInstallOptions) (io.ReadCloser, error) { +// PluginInstall mocks base method. +func (m *MockAPIClient) PluginInstall(arg0 context.Context, arg1 string, arg2 client.PluginInstallOptions) (client.PluginInstallResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "PluginInstall", arg0, arg1, arg2) - ret0, _ := ret[0].(io.ReadCloser) + ret0, _ := ret[0].(client.PluginInstallResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// PluginInstall indicates an expected call of PluginInstall -func (mr *MockCommonAPIClientMockRecorder) PluginInstall(arg0, arg1, arg2 interface{}) *gomock.Call { +// PluginInstall indicates an expected call of PluginInstall. +func (mr *MockAPIClientMockRecorder) PluginInstall(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PluginInstall", reflect.TypeOf((*MockCommonAPIClient)(nil).PluginInstall), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PluginInstall", reflect.TypeOf((*MockAPIClient)(nil).PluginInstall), arg0, arg1, arg2) } -// PluginList mocks base method -func (m *MockCommonAPIClient) PluginList(arg0 context.Context, arg1 filters.Args) (types.PluginsListResponse, error) { +// PluginList mocks base method. +func (m *MockAPIClient) PluginList(arg0 context.Context, arg1 client.PluginListOptions) (client.PluginListResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "PluginList", arg0, arg1) - ret0, _ := ret[0].(types.PluginsListResponse) + ret0, _ := ret[0].(client.PluginListResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// PluginList indicates an expected call of PluginList -func (mr *MockCommonAPIClientMockRecorder) PluginList(arg0, arg1 interface{}) *gomock.Call { +// PluginList indicates an expected call of PluginList. +func (mr *MockAPIClientMockRecorder) PluginList(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PluginList", reflect.TypeOf((*MockCommonAPIClient)(nil).PluginList), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PluginList", reflect.TypeOf((*MockAPIClient)(nil).PluginList), arg0, arg1) } -// PluginPush mocks base method -func (m *MockCommonAPIClient) PluginPush(arg0 context.Context, arg1, arg2 string) (io.ReadCloser, error) { +// PluginPush mocks base method. +func (m *MockAPIClient) PluginPush(arg0 context.Context, arg1 string, arg2 client.PluginPushOptions) (client.PluginPushResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "PluginPush", arg0, arg1, arg2) - ret0, _ := ret[0].(io.ReadCloser) + ret0, _ := ret[0].(client.PluginPushResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// PluginPush indicates an expected call of PluginPush -func (mr *MockCommonAPIClientMockRecorder) PluginPush(arg0, arg1, arg2 interface{}) *gomock.Call { +// PluginPush indicates an expected call of PluginPush. +func (mr *MockAPIClientMockRecorder) PluginPush(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PluginPush", reflect.TypeOf((*MockCommonAPIClient)(nil).PluginPush), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PluginPush", reflect.TypeOf((*MockAPIClient)(nil).PluginPush), arg0, arg1, arg2) } -// PluginRemove mocks base method -func (m *MockCommonAPIClient) PluginRemove(arg0 context.Context, arg1 string, arg2 types.PluginRemoveOptions) error { +// PluginRemove mocks base method. +func (m *MockAPIClient) PluginRemove(arg0 context.Context, arg1 string, arg2 client.PluginRemoveOptions) (client.PluginRemoveResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "PluginRemove", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 + ret0, _ := ret[0].(client.PluginRemoveResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// PluginRemove indicates an expected call of PluginRemove -func (mr *MockCommonAPIClientMockRecorder) PluginRemove(arg0, arg1, arg2 interface{}) *gomock.Call { +// PluginRemove indicates an expected call of PluginRemove. +func (mr *MockAPIClientMockRecorder) PluginRemove(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PluginRemove", reflect.TypeOf((*MockCommonAPIClient)(nil).PluginRemove), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PluginRemove", reflect.TypeOf((*MockAPIClient)(nil).PluginRemove), arg0, arg1, arg2) } -// PluginSet mocks base method -func (m *MockCommonAPIClient) PluginSet(arg0 context.Context, arg1 string, arg2 []string) error { +// PluginSet mocks base method. +func (m *MockAPIClient) PluginSet(arg0 context.Context, arg1 string, arg2 client.PluginSetOptions) (client.PluginSetResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "PluginSet", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 + ret0, _ := ret[0].(client.PluginSetResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// PluginSet indicates an expected call of PluginSet -func (mr *MockCommonAPIClientMockRecorder) PluginSet(arg0, arg1, arg2 interface{}) *gomock.Call { +// PluginSet indicates an expected call of PluginSet. +func (mr *MockAPIClientMockRecorder) PluginSet(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PluginSet", reflect.TypeOf((*MockCommonAPIClient)(nil).PluginSet), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PluginSet", reflect.TypeOf((*MockAPIClient)(nil).PluginSet), arg0, arg1, arg2) } -// PluginUpgrade mocks base method -func (m *MockCommonAPIClient) PluginUpgrade(arg0 context.Context, arg1 string, arg2 types.PluginInstallOptions) (io.ReadCloser, error) { +// PluginUpgrade mocks base method. +func (m *MockAPIClient) PluginUpgrade(arg0 context.Context, arg1 string, arg2 client.PluginUpgradeOptions) (client.PluginUpgradeResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "PluginUpgrade", arg0, arg1, arg2) - ret0, _ := ret[0].(io.ReadCloser) + ret0, _ := ret[0].(client.PluginUpgradeResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// PluginUpgrade indicates an expected call of PluginUpgrade -func (mr *MockCommonAPIClientMockRecorder) PluginUpgrade(arg0, arg1, arg2 interface{}) *gomock.Call { +// PluginUpgrade indicates an expected call of PluginUpgrade. +func (mr *MockAPIClientMockRecorder) PluginUpgrade(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PluginUpgrade", reflect.TypeOf((*MockCommonAPIClient)(nil).PluginUpgrade), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PluginUpgrade", reflect.TypeOf((*MockAPIClient)(nil).PluginUpgrade), arg0, arg1, arg2) } -// RegistryLogin mocks base method -func (m *MockCommonAPIClient) RegistryLogin(arg0 context.Context, arg1 types.AuthConfig) (registry.AuthenticateOKBody, error) { +// RegistryLogin mocks base method. +func (m *MockAPIClient) RegistryLogin(arg0 context.Context, arg1 client.RegistryLoginOptions) (client.RegistryLoginResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "RegistryLogin", arg0, arg1) - ret0, _ := ret[0].(registry.AuthenticateOKBody) + ret0, _ := ret[0].(client.RegistryLoginResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// RegistryLogin indicates an expected call of RegistryLogin -func (mr *MockCommonAPIClientMockRecorder) RegistryLogin(arg0, arg1 interface{}) *gomock.Call { +// RegistryLogin indicates an expected call of RegistryLogin. +func (mr *MockAPIClientMockRecorder) RegistryLogin(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RegistryLogin", reflect.TypeOf((*MockCommonAPIClient)(nil).RegistryLogin), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RegistryLogin", reflect.TypeOf((*MockAPIClient)(nil).RegistryLogin), arg0, arg1) } -// SecretCreate mocks base method -func (m *MockCommonAPIClient) SecretCreate(arg0 context.Context, arg1 swarm.SecretSpec) (types.SecretCreateResponse, error) { +// SecretCreate mocks base method. +func (m *MockAPIClient) SecretCreate(arg0 context.Context, arg1 client.SecretCreateOptions) (client.SecretCreateResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "SecretCreate", arg0, arg1) - ret0, _ := ret[0].(types.SecretCreateResponse) + ret0, _ := ret[0].(client.SecretCreateResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// SecretCreate indicates an expected call of SecretCreate -func (mr *MockCommonAPIClientMockRecorder) SecretCreate(arg0, arg1 interface{}) *gomock.Call { +// SecretCreate indicates an expected call of SecretCreate. +func (mr *MockAPIClientMockRecorder) SecretCreate(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SecretCreate", reflect.TypeOf((*MockCommonAPIClient)(nil).SecretCreate), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SecretCreate", reflect.TypeOf((*MockAPIClient)(nil).SecretCreate), arg0, arg1) } -// SecretInspectWithRaw mocks base method -func (m *MockCommonAPIClient) SecretInspectWithRaw(arg0 context.Context, arg1 string) (swarm.Secret, []byte, error) { +// SecretInspect mocks base method. +func (m *MockAPIClient) SecretInspect(arg0 context.Context, arg1 string, arg2 client.SecretInspectOptions) (client.SecretInspectResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "SecretInspectWithRaw", arg0, arg1) - ret0, _ := ret[0].(swarm.Secret) - ret1, _ := ret[1].([]byte) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 + ret := m.ctrl.Call(m, "SecretInspect", arg0, arg1, arg2) + ret0, _ := ret[0].(client.SecretInspectResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// SecretInspectWithRaw indicates an expected call of SecretInspectWithRaw -func (mr *MockCommonAPIClientMockRecorder) SecretInspectWithRaw(arg0, arg1 interface{}) *gomock.Call { +// SecretInspect indicates an expected call of SecretInspect. +func (mr *MockAPIClientMockRecorder) SecretInspect(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SecretInspectWithRaw", reflect.TypeOf((*MockCommonAPIClient)(nil).SecretInspectWithRaw), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SecretInspect", reflect.TypeOf((*MockAPIClient)(nil).SecretInspect), arg0, arg1, arg2) } -// SecretList mocks base method -func (m *MockCommonAPIClient) SecretList(arg0 context.Context, arg1 types.SecretListOptions) ([]swarm.Secret, error) { +// SecretList mocks base method. +func (m *MockAPIClient) SecretList(arg0 context.Context, arg1 client.SecretListOptions) (client.SecretListResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "SecretList", arg0, arg1) - ret0, _ := ret[0].([]swarm.Secret) + ret0, _ := ret[0].(client.SecretListResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// SecretList indicates an expected call of SecretList -func (mr *MockCommonAPIClientMockRecorder) SecretList(arg0, arg1 interface{}) *gomock.Call { +// SecretList indicates an expected call of SecretList. +func (mr *MockAPIClientMockRecorder) SecretList(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SecretList", reflect.TypeOf((*MockCommonAPIClient)(nil).SecretList), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SecretList", reflect.TypeOf((*MockAPIClient)(nil).SecretList), arg0, arg1) } -// SecretRemove mocks base method -func (m *MockCommonAPIClient) SecretRemove(arg0 context.Context, arg1 string) error { +// SecretRemove mocks base method. +func (m *MockAPIClient) SecretRemove(arg0 context.Context, arg1 string, arg2 client.SecretRemoveOptions) (client.SecretRemoveResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "SecretRemove", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 + ret := m.ctrl.Call(m, "SecretRemove", arg0, arg1, arg2) + ret0, _ := ret[0].(client.SecretRemoveResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// SecretRemove indicates an expected call of SecretRemove -func (mr *MockCommonAPIClientMockRecorder) SecretRemove(arg0, arg1 interface{}) *gomock.Call { +// SecretRemove indicates an expected call of SecretRemove. +func (mr *MockAPIClientMockRecorder) SecretRemove(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SecretRemove", reflect.TypeOf((*MockCommonAPIClient)(nil).SecretRemove), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SecretRemove", reflect.TypeOf((*MockAPIClient)(nil).SecretRemove), arg0, arg1, arg2) } -// SecretUpdate mocks base method -func (m *MockCommonAPIClient) SecretUpdate(arg0 context.Context, arg1 string, arg2 swarm.Version, arg3 swarm.SecretSpec) error { +// SecretUpdate mocks base method. +func (m *MockAPIClient) SecretUpdate(arg0 context.Context, arg1 string, arg2 client.SecretUpdateOptions) (client.SecretUpdateResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "SecretUpdate", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].(error) - return ret0 + ret := m.ctrl.Call(m, "SecretUpdate", arg0, arg1, arg2) + ret0, _ := ret[0].(client.SecretUpdateResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// SecretUpdate indicates an expected call of SecretUpdate -func (mr *MockCommonAPIClientMockRecorder) SecretUpdate(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { +// SecretUpdate indicates an expected call of SecretUpdate. +func (mr *MockAPIClientMockRecorder) SecretUpdate(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SecretUpdate", reflect.TypeOf((*MockCommonAPIClient)(nil).SecretUpdate), arg0, arg1, arg2, arg3) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SecretUpdate", reflect.TypeOf((*MockAPIClient)(nil).SecretUpdate), arg0, arg1, arg2) } -// ServerVersion mocks base method -func (m *MockCommonAPIClient) ServerVersion(arg0 context.Context) (types.Version, error) { +// ServerVersion mocks base method. +func (m *MockAPIClient) ServerVersion(arg0 context.Context, arg1 client.ServerVersionOptions) (client.ServerVersionResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ServerVersion", arg0) - ret0, _ := ret[0].(types.Version) + ret := m.ctrl.Call(m, "ServerVersion", arg0, arg1) + ret0, _ := ret[0].(client.ServerVersionResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ServerVersion indicates an expected call of ServerVersion -func (mr *MockCommonAPIClientMockRecorder) ServerVersion(arg0 interface{}) *gomock.Call { +// ServerVersion indicates an expected call of ServerVersion. +func (mr *MockAPIClientMockRecorder) ServerVersion(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ServerVersion", reflect.TypeOf((*MockCommonAPIClient)(nil).ServerVersion), arg0) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ServerVersion", reflect.TypeOf((*MockAPIClient)(nil).ServerVersion), arg0, arg1) } -// ServiceCreate mocks base method -func (m *MockCommonAPIClient) ServiceCreate(arg0 context.Context, arg1 swarm.ServiceSpec, arg2 types.ServiceCreateOptions) (types.ServiceCreateResponse, error) { +// ServiceCreate mocks base method. +func (m *MockAPIClient) ServiceCreate(arg0 context.Context, arg1 client.ServiceCreateOptions) (client.ServiceCreateResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ServiceCreate", arg0, arg1, arg2) - ret0, _ := ret[0].(types.ServiceCreateResponse) + ret := m.ctrl.Call(m, "ServiceCreate", arg0, arg1) + ret0, _ := ret[0].(client.ServiceCreateResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ServiceCreate indicates an expected call of ServiceCreate -func (mr *MockCommonAPIClientMockRecorder) ServiceCreate(arg0, arg1, arg2 interface{}) *gomock.Call { +// ServiceCreate indicates an expected call of ServiceCreate. +func (mr *MockAPIClientMockRecorder) ServiceCreate(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ServiceCreate", reflect.TypeOf((*MockCommonAPIClient)(nil).ServiceCreate), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ServiceCreate", reflect.TypeOf((*MockAPIClient)(nil).ServiceCreate), arg0, arg1) } -// ServiceInspectWithRaw mocks base method -func (m *MockCommonAPIClient) ServiceInspectWithRaw(arg0 context.Context, arg1 string, arg2 types.ServiceInspectOptions) (swarm.Service, []byte, error) { +// ServiceInspect mocks base method. +func (m *MockAPIClient) ServiceInspect(arg0 context.Context, arg1 string, arg2 client.ServiceInspectOptions) (client.ServiceInspectResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ServiceInspectWithRaw", arg0, arg1, arg2) - ret0, _ := ret[0].(swarm.Service) - ret1, _ := ret[1].([]byte) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 + ret := m.ctrl.Call(m, "ServiceInspect", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ServiceInspectResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// ServiceInspectWithRaw indicates an expected call of ServiceInspectWithRaw -func (mr *MockCommonAPIClientMockRecorder) ServiceInspectWithRaw(arg0, arg1, arg2 interface{}) *gomock.Call { +// ServiceInspect indicates an expected call of ServiceInspect. +func (mr *MockAPIClientMockRecorder) ServiceInspect(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ServiceInspectWithRaw", reflect.TypeOf((*MockCommonAPIClient)(nil).ServiceInspectWithRaw), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ServiceInspect", reflect.TypeOf((*MockAPIClient)(nil).ServiceInspect), arg0, arg1, arg2) } -// ServiceList mocks base method -func (m *MockCommonAPIClient) ServiceList(arg0 context.Context, arg1 types.ServiceListOptions) ([]swarm.Service, error) { +// ServiceList mocks base method. +func (m *MockAPIClient) ServiceList(arg0 context.Context, arg1 client.ServiceListOptions) (client.ServiceListResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "ServiceList", arg0, arg1) - ret0, _ := ret[0].([]swarm.Service) + ret0, _ := ret[0].(client.ServiceListResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ServiceList indicates an expected call of ServiceList -func (mr *MockCommonAPIClientMockRecorder) ServiceList(arg0, arg1 interface{}) *gomock.Call { +// ServiceList indicates an expected call of ServiceList. +func (mr *MockAPIClientMockRecorder) ServiceList(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ServiceList", reflect.TypeOf((*MockCommonAPIClient)(nil).ServiceList), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ServiceList", reflect.TypeOf((*MockAPIClient)(nil).ServiceList), arg0, arg1) } -// ServiceLogs mocks base method -func (m *MockCommonAPIClient) ServiceLogs(arg0 context.Context, arg1 string, arg2 types.ContainerLogsOptions) (io.ReadCloser, error) { +// ServiceLogs mocks base method. +func (m *MockAPIClient) ServiceLogs(arg0 context.Context, arg1 string, arg2 client.ServiceLogsOptions) (client.ServiceLogsResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "ServiceLogs", arg0, arg1, arg2) - ret0, _ := ret[0].(io.ReadCloser) + ret0, _ := ret[0].(client.ServiceLogsResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ServiceLogs indicates an expected call of ServiceLogs -func (mr *MockCommonAPIClientMockRecorder) ServiceLogs(arg0, arg1, arg2 interface{}) *gomock.Call { +// ServiceLogs indicates an expected call of ServiceLogs. +func (mr *MockAPIClientMockRecorder) ServiceLogs(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ServiceLogs", reflect.TypeOf((*MockCommonAPIClient)(nil).ServiceLogs), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ServiceLogs", reflect.TypeOf((*MockAPIClient)(nil).ServiceLogs), arg0, arg1, arg2) } -// ServiceRemove mocks base method -func (m *MockCommonAPIClient) ServiceRemove(arg0 context.Context, arg1 string) error { +// ServiceRemove mocks base method. +func (m *MockAPIClient) ServiceRemove(arg0 context.Context, arg1 string, arg2 client.ServiceRemoveOptions) (client.ServiceRemoveResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ServiceRemove", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 + ret := m.ctrl.Call(m, "ServiceRemove", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ServiceRemoveResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// ServiceRemove indicates an expected call of ServiceRemove -func (mr *MockCommonAPIClientMockRecorder) ServiceRemove(arg0, arg1 interface{}) *gomock.Call { +// ServiceRemove indicates an expected call of ServiceRemove. +func (mr *MockAPIClientMockRecorder) ServiceRemove(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ServiceRemove", reflect.TypeOf((*MockCommonAPIClient)(nil).ServiceRemove), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ServiceRemove", reflect.TypeOf((*MockAPIClient)(nil).ServiceRemove), arg0, arg1, arg2) } -// ServiceUpdate mocks base method -func (m *MockCommonAPIClient) ServiceUpdate(arg0 context.Context, arg1 string, arg2 swarm.Version, arg3 swarm.ServiceSpec, arg4 types.ServiceUpdateOptions) (types.ServiceUpdateResponse, error) { +// ServiceUpdate mocks base method. +func (m *MockAPIClient) ServiceUpdate(arg0 context.Context, arg1 string, arg2 client.ServiceUpdateOptions) (client.ServiceUpdateResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ServiceUpdate", arg0, arg1, arg2, arg3, arg4) - ret0, _ := ret[0].(types.ServiceUpdateResponse) + ret := m.ctrl.Call(m, "ServiceUpdate", arg0, arg1, arg2) + ret0, _ := ret[0].(client.ServiceUpdateResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// ServiceUpdate indicates an expected call of ServiceUpdate -func (mr *MockCommonAPIClientMockRecorder) ServiceUpdate(arg0, arg1, arg2, arg3, arg4 interface{}) *gomock.Call { +// ServiceUpdate indicates an expected call of ServiceUpdate. +func (mr *MockAPIClientMockRecorder) ServiceUpdate(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ServiceUpdate", reflect.TypeOf((*MockCommonAPIClient)(nil).ServiceUpdate), arg0, arg1, arg2, arg3, arg4) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ServiceUpdate", reflect.TypeOf((*MockAPIClient)(nil).ServiceUpdate), arg0, arg1, arg2) } -// SwarmGetUnlockKey mocks base method -func (m *MockCommonAPIClient) SwarmGetUnlockKey(arg0 context.Context) (types.SwarmUnlockKeyResponse, error) { +// SwarmGetUnlockKey mocks base method. +func (m *MockAPIClient) SwarmGetUnlockKey(arg0 context.Context) (client.SwarmGetUnlockKeyResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "SwarmGetUnlockKey", arg0) - ret0, _ := ret[0].(types.SwarmUnlockKeyResponse) + ret0, _ := ret[0].(client.SwarmGetUnlockKeyResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// SwarmGetUnlockKey indicates an expected call of SwarmGetUnlockKey -func (mr *MockCommonAPIClientMockRecorder) SwarmGetUnlockKey(arg0 interface{}) *gomock.Call { +// SwarmGetUnlockKey indicates an expected call of SwarmGetUnlockKey. +func (mr *MockAPIClientMockRecorder) SwarmGetUnlockKey(arg0 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SwarmGetUnlockKey", reflect.TypeOf((*MockCommonAPIClient)(nil).SwarmGetUnlockKey), arg0) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SwarmGetUnlockKey", reflect.TypeOf((*MockAPIClient)(nil).SwarmGetUnlockKey), arg0) } -// SwarmInit mocks base method -func (m *MockCommonAPIClient) SwarmInit(arg0 context.Context, arg1 swarm.InitRequest) (string, error) { +// SwarmInit mocks base method. +func (m *MockAPIClient) SwarmInit(arg0 context.Context, arg1 client.SwarmInitOptions) (client.SwarmInitResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "SwarmInit", arg0, arg1) - ret0, _ := ret[0].(string) + ret0, _ := ret[0].(client.SwarmInitResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// SwarmInit indicates an expected call of SwarmInit -func (mr *MockCommonAPIClientMockRecorder) SwarmInit(arg0, arg1 interface{}) *gomock.Call { +// SwarmInit indicates an expected call of SwarmInit. +func (mr *MockAPIClientMockRecorder) SwarmInit(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SwarmInit", reflect.TypeOf((*MockCommonAPIClient)(nil).SwarmInit), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SwarmInit", reflect.TypeOf((*MockAPIClient)(nil).SwarmInit), arg0, arg1) } -// SwarmInspect mocks base method -func (m *MockCommonAPIClient) SwarmInspect(arg0 context.Context) (swarm.Swarm, error) { +// SwarmInspect mocks base method. +func (m *MockAPIClient) SwarmInspect(arg0 context.Context, arg1 client.SwarmInspectOptions) (client.SwarmInspectResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "SwarmInspect", arg0) - ret0, _ := ret[0].(swarm.Swarm) + ret := m.ctrl.Call(m, "SwarmInspect", arg0, arg1) + ret0, _ := ret[0].(client.SwarmInspectResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// SwarmInspect indicates an expected call of SwarmInspect -func (mr *MockCommonAPIClientMockRecorder) SwarmInspect(arg0 interface{}) *gomock.Call { +// SwarmInspect indicates an expected call of SwarmInspect. +func (mr *MockAPIClientMockRecorder) SwarmInspect(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SwarmInspect", reflect.TypeOf((*MockCommonAPIClient)(nil).SwarmInspect), arg0) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SwarmInspect", reflect.TypeOf((*MockAPIClient)(nil).SwarmInspect), arg0, arg1) } -// SwarmJoin mocks base method -func (m *MockCommonAPIClient) SwarmJoin(arg0 context.Context, arg1 swarm.JoinRequest) error { +// SwarmJoin mocks base method. +func (m *MockAPIClient) SwarmJoin(arg0 context.Context, arg1 client.SwarmJoinOptions) (client.SwarmJoinResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "SwarmJoin", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 + ret0, _ := ret[0].(client.SwarmJoinResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// SwarmJoin indicates an expected call of SwarmJoin -func (mr *MockCommonAPIClientMockRecorder) SwarmJoin(arg0, arg1 interface{}) *gomock.Call { +// SwarmJoin indicates an expected call of SwarmJoin. +func (mr *MockAPIClientMockRecorder) SwarmJoin(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SwarmJoin", reflect.TypeOf((*MockCommonAPIClient)(nil).SwarmJoin), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SwarmJoin", reflect.TypeOf((*MockAPIClient)(nil).SwarmJoin), arg0, arg1) } -// SwarmLeave mocks base method -func (m *MockCommonAPIClient) SwarmLeave(arg0 context.Context, arg1 bool) error { +// SwarmLeave mocks base method. +func (m *MockAPIClient) SwarmLeave(arg0 context.Context, arg1 client.SwarmLeaveOptions) (client.SwarmLeaveResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "SwarmLeave", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 + ret0, _ := ret[0].(client.SwarmLeaveResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// SwarmLeave indicates an expected call of SwarmLeave -func (mr *MockCommonAPIClientMockRecorder) SwarmLeave(arg0, arg1 interface{}) *gomock.Call { +// SwarmLeave indicates an expected call of SwarmLeave. +func (mr *MockAPIClientMockRecorder) SwarmLeave(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SwarmLeave", reflect.TypeOf((*MockCommonAPIClient)(nil).SwarmLeave), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SwarmLeave", reflect.TypeOf((*MockAPIClient)(nil).SwarmLeave), arg0, arg1) } -// SwarmUnlock mocks base method -func (m *MockCommonAPIClient) SwarmUnlock(arg0 context.Context, arg1 swarm.UnlockRequest) error { +// SwarmUnlock mocks base method. +func (m *MockAPIClient) SwarmUnlock(arg0 context.Context, arg1 client.SwarmUnlockOptions) (client.SwarmUnlockResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "SwarmUnlock", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 + ret0, _ := ret[0].(client.SwarmUnlockResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// SwarmUnlock indicates an expected call of SwarmUnlock -func (mr *MockCommonAPIClientMockRecorder) SwarmUnlock(arg0, arg1 interface{}) *gomock.Call { +// SwarmUnlock indicates an expected call of SwarmUnlock. +func (mr *MockAPIClientMockRecorder) SwarmUnlock(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SwarmUnlock", reflect.TypeOf((*MockCommonAPIClient)(nil).SwarmUnlock), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SwarmUnlock", reflect.TypeOf((*MockAPIClient)(nil).SwarmUnlock), arg0, arg1) } -// SwarmUpdate mocks base method -func (m *MockCommonAPIClient) SwarmUpdate(arg0 context.Context, arg1 swarm.Version, arg2 swarm.Spec, arg3 swarm.UpdateFlags) error { +// SwarmUpdate mocks base method. +func (m *MockAPIClient) SwarmUpdate(arg0 context.Context, arg1 client.SwarmUpdateOptions) (client.SwarmUpdateResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "SwarmUpdate", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].(error) - return ret0 + ret := m.ctrl.Call(m, "SwarmUpdate", arg0, arg1) + ret0, _ := ret[0].(client.SwarmUpdateResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// SwarmUpdate indicates an expected call of SwarmUpdate -func (mr *MockCommonAPIClientMockRecorder) SwarmUpdate(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { +// SwarmUpdate indicates an expected call of SwarmUpdate. +func (mr *MockAPIClientMockRecorder) SwarmUpdate(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SwarmUpdate", reflect.TypeOf((*MockCommonAPIClient)(nil).SwarmUpdate), arg0, arg1, arg2, arg3) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SwarmUpdate", reflect.TypeOf((*MockAPIClient)(nil).SwarmUpdate), arg0, arg1) } -// TaskInspectWithRaw mocks base method -func (m *MockCommonAPIClient) TaskInspectWithRaw(arg0 context.Context, arg1 string) (swarm.Task, []byte, error) { +// TaskInspect mocks base method. +func (m *MockAPIClient) TaskInspect(arg0 context.Context, arg1 string, arg2 client.TaskInspectOptions) (client.TaskInspectResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "TaskInspectWithRaw", arg0, arg1) - ret0, _ := ret[0].(swarm.Task) - ret1, _ := ret[1].([]byte) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 + ret := m.ctrl.Call(m, "TaskInspect", arg0, arg1, arg2) + ret0, _ := ret[0].(client.TaskInspectResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// TaskInspectWithRaw indicates an expected call of TaskInspectWithRaw -func (mr *MockCommonAPIClientMockRecorder) TaskInspectWithRaw(arg0, arg1 interface{}) *gomock.Call { +// TaskInspect indicates an expected call of TaskInspect. +func (mr *MockAPIClientMockRecorder) TaskInspect(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "TaskInspectWithRaw", reflect.TypeOf((*MockCommonAPIClient)(nil).TaskInspectWithRaw), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "TaskInspect", reflect.TypeOf((*MockAPIClient)(nil).TaskInspect), arg0, arg1, arg2) } -// TaskList mocks base method -func (m *MockCommonAPIClient) TaskList(arg0 context.Context, arg1 types.TaskListOptions) ([]swarm.Task, error) { +// TaskList mocks base method. +func (m *MockAPIClient) TaskList(arg0 context.Context, arg1 client.TaskListOptions) (client.TaskListResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "TaskList", arg0, arg1) - ret0, _ := ret[0].([]swarm.Task) + ret0, _ := ret[0].(client.TaskListResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// TaskList indicates an expected call of TaskList -func (mr *MockCommonAPIClientMockRecorder) TaskList(arg0, arg1 interface{}) *gomock.Call { +// TaskList indicates an expected call of TaskList. +func (mr *MockAPIClientMockRecorder) TaskList(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "TaskList", reflect.TypeOf((*MockCommonAPIClient)(nil).TaskList), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "TaskList", reflect.TypeOf((*MockAPIClient)(nil).TaskList), arg0, arg1) } -// TaskLogs mocks base method -func (m *MockCommonAPIClient) TaskLogs(arg0 context.Context, arg1 string, arg2 types.ContainerLogsOptions) (io.ReadCloser, error) { +// TaskLogs mocks base method. +func (m *MockAPIClient) TaskLogs(arg0 context.Context, arg1 string, arg2 client.TaskLogsOptions) (client.TaskLogsResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "TaskLogs", arg0, arg1, arg2) - ret0, _ := ret[0].(io.ReadCloser) + ret0, _ := ret[0].(client.TaskLogsResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// TaskLogs indicates an expected call of TaskLogs -func (mr *MockCommonAPIClientMockRecorder) TaskLogs(arg0, arg1, arg2 interface{}) *gomock.Call { +// TaskLogs indicates an expected call of TaskLogs. +func (mr *MockAPIClientMockRecorder) TaskLogs(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "TaskLogs", reflect.TypeOf((*MockCommonAPIClient)(nil).TaskLogs), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "TaskLogs", reflect.TypeOf((*MockAPIClient)(nil).TaskLogs), arg0, arg1, arg2) } -// VolumeCreate mocks base method -func (m *MockCommonAPIClient) VolumeCreate(arg0 context.Context, arg1 volume.VolumeCreateBody) (types.Volume, error) { +// VolumeCreate mocks base method. +func (m *MockAPIClient) VolumeCreate(arg0 context.Context, arg1 client.VolumeCreateOptions) (client.VolumeCreateResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "VolumeCreate", arg0, arg1) - ret0, _ := ret[0].(types.Volume) + ret0, _ := ret[0].(client.VolumeCreateResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// VolumeCreate indicates an expected call of VolumeCreate -func (mr *MockCommonAPIClientMockRecorder) VolumeCreate(arg0, arg1 interface{}) *gomock.Call { +// VolumeCreate indicates an expected call of VolumeCreate. +func (mr *MockAPIClientMockRecorder) VolumeCreate(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "VolumeCreate", reflect.TypeOf((*MockCommonAPIClient)(nil).VolumeCreate), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "VolumeCreate", reflect.TypeOf((*MockAPIClient)(nil).VolumeCreate), arg0, arg1) } -// VolumeInspect mocks base method -func (m *MockCommonAPIClient) VolumeInspect(arg0 context.Context, arg1 string) (types.Volume, error) { +// VolumeInspect mocks base method. +func (m *MockAPIClient) VolumeInspect(arg0 context.Context, arg1 string, arg2 client.VolumeInspectOptions) (client.VolumeInspectResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "VolumeInspect", arg0, arg1) - ret0, _ := ret[0].(types.Volume) + ret := m.ctrl.Call(m, "VolumeInspect", arg0, arg1, arg2) + ret0, _ := ret[0].(client.VolumeInspectResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// VolumeInspect indicates an expected call of VolumeInspect -func (mr *MockCommonAPIClientMockRecorder) VolumeInspect(arg0, arg1 interface{}) *gomock.Call { +// VolumeInspect indicates an expected call of VolumeInspect. +func (mr *MockAPIClientMockRecorder) VolumeInspect(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "VolumeInspect", reflect.TypeOf((*MockCommonAPIClient)(nil).VolumeInspect), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "VolumeInspect", reflect.TypeOf((*MockAPIClient)(nil).VolumeInspect), arg0, arg1, arg2) } -// VolumeInspectWithRaw mocks base method -func (m *MockCommonAPIClient) VolumeInspectWithRaw(arg0 context.Context, arg1 string) (types.Volume, []byte, error) { +// VolumeList mocks base method. +func (m *MockAPIClient) VolumeList(arg0 context.Context, arg1 client.VolumeListOptions) (client.VolumeListResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "VolumeInspectWithRaw", arg0, arg1) - ret0, _ := ret[0].(types.Volume) - ret1, _ := ret[1].([]byte) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 + ret := m.ctrl.Call(m, "VolumeList", arg0, arg1) + ret0, _ := ret[0].(client.VolumeListResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// VolumeInspectWithRaw indicates an expected call of VolumeInspectWithRaw -func (mr *MockCommonAPIClientMockRecorder) VolumeInspectWithRaw(arg0, arg1 interface{}) *gomock.Call { +// VolumeList indicates an expected call of VolumeList. +func (mr *MockAPIClientMockRecorder) VolumeList(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "VolumeInspectWithRaw", reflect.TypeOf((*MockCommonAPIClient)(nil).VolumeInspectWithRaw), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "VolumeList", reflect.TypeOf((*MockAPIClient)(nil).VolumeList), arg0, arg1) } -// VolumeList mocks base method -func (m *MockCommonAPIClient) VolumeList(arg0 context.Context, arg1 filters.Args) (volume.VolumeListOKBody, error) { +// VolumePrune mocks base method. +func (m *MockAPIClient) VolumePrune(arg0 context.Context, arg1 client.VolumePruneOptions) (client.VolumePruneResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "VolumeList", arg0, arg1) - ret0, _ := ret[0].(volume.VolumeListOKBody) + ret := m.ctrl.Call(m, "VolumePrune", arg0, arg1) + ret0, _ := ret[0].(client.VolumePruneResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// VolumeList indicates an expected call of VolumeList -func (mr *MockCommonAPIClientMockRecorder) VolumeList(arg0, arg1 interface{}) *gomock.Call { +// VolumePrune indicates an expected call of VolumePrune. +func (mr *MockAPIClientMockRecorder) VolumePrune(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "VolumeList", reflect.TypeOf((*MockCommonAPIClient)(nil).VolumeList), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "VolumePrune", reflect.TypeOf((*MockAPIClient)(nil).VolumePrune), arg0, arg1) } -// VolumeRemove mocks base method -func (m *MockCommonAPIClient) VolumeRemove(arg0 context.Context, arg1 string, arg2 bool) error { +// VolumeRemove mocks base method. +func (m *MockAPIClient) VolumeRemove(arg0 context.Context, arg1 string, arg2 client.VolumeRemoveOptions) (client.VolumeRemoveResult, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "VolumeRemove", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 + ret0, _ := ret[0].(client.VolumeRemoveResult) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// VolumeRemove indicates an expected call of VolumeRemove -func (mr *MockCommonAPIClientMockRecorder) VolumeRemove(arg0, arg1, arg2 interface{}) *gomock.Call { +// VolumeRemove indicates an expected call of VolumeRemove. +func (mr *MockAPIClientMockRecorder) VolumeRemove(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "VolumeRemove", reflect.TypeOf((*MockCommonAPIClient)(nil).VolumeRemove), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "VolumeRemove", reflect.TypeOf((*MockAPIClient)(nil).VolumeRemove), arg0, arg1, arg2) } -// VolumesPrune mocks base method -func (m *MockCommonAPIClient) VolumesPrune(arg0 context.Context, arg1 filters.Args) (types.VolumesPruneReport, error) { +// VolumeUpdate mocks base method. +func (m *MockAPIClient) VolumeUpdate(arg0 context.Context, arg1 string, arg2 client.VolumeUpdateOptions) (client.VolumeUpdateResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "VolumesPrune", arg0, arg1) - ret0, _ := ret[0].(types.VolumesPruneReport) + ret := m.ctrl.Call(m, "VolumeUpdate", arg0, arg1, arg2) + ret0, _ := ret[0].(client.VolumeUpdateResult) ret1, _ := ret[1].(error) return ret0, ret1 } -// VolumesPrune indicates an expected call of VolumesPrune -func (mr *MockCommonAPIClientMockRecorder) VolumesPrune(arg0, arg1 interface{}) *gomock.Call { +// VolumeUpdate indicates an expected call of VolumeUpdate. +func (mr *MockAPIClientMockRecorder) VolumeUpdate(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "VolumesPrune", reflect.TypeOf((*MockCommonAPIClient)(nil).VolumesPrune), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "VolumeUpdate", reflect.TypeOf((*MockAPIClient)(nil).VolumeUpdate), arg0, arg1, arg2) } diff --git a/pkg/testmocks/mock_image_factory.go b/pkg/testmocks/mock_image_factory.go index c8fa2a9ae6..3c25c82b8a 100644 --- a/pkg/testmocks/mock_image_factory.go +++ b/pkg/testmocks/mock_image_factory.go @@ -9,33 +9,35 @@ import ( imgutil "github.com/buildpacks/imgutil" gomock "github.com/golang/mock/gomock" + + dist "github.com/buildpacks/pack/pkg/dist" ) -// MockImageFactory is a mock of ImageFactory interface +// MockImageFactory is a mock of ImageFactory interface. type MockImageFactory struct { ctrl *gomock.Controller recorder *MockImageFactoryMockRecorder } -// MockImageFactoryMockRecorder is the mock recorder for MockImageFactory +// MockImageFactoryMockRecorder is the mock recorder for MockImageFactory. type MockImageFactoryMockRecorder struct { mock *MockImageFactory } -// NewMockImageFactory creates a new mock instance +// NewMockImageFactory creates a new mock instance. func NewMockImageFactory(ctrl *gomock.Controller) *MockImageFactory { mock := &MockImageFactory{ctrl: ctrl} mock.recorder = &MockImageFactoryMockRecorder{mock} return mock } -// EXPECT returns an object that allows the caller to indicate expected use +// EXPECT returns an object that allows the caller to indicate expected use. func (m *MockImageFactory) EXPECT() *MockImageFactoryMockRecorder { return m.recorder } -// NewImage mocks base method -func (m *MockImageFactory) NewImage(arg0 string, arg1 bool, arg2 string) (imgutil.Image, error) { +// NewImage mocks base method. +func (m *MockImageFactory) NewImage(arg0 string, arg1 bool, arg2 dist.Target) (imgutil.Image, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "NewImage", arg0, arg1, arg2) ret0, _ := ret[0].(imgutil.Image) @@ -43,7 +45,7 @@ func (m *MockImageFactory) NewImage(arg0 string, arg1 bool, arg2 string) (imguti return ret0, ret1 } -// NewImage indicates an expected call of NewImage +// NewImage indicates an expected call of NewImage. func (mr *MockImageFactoryMockRecorder) NewImage(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NewImage", reflect.TypeOf((*MockImageFactory)(nil).NewImage), arg0, arg1, arg2) diff --git a/pkg/testmocks/mock_image_fetcher.go b/pkg/testmocks/mock_image_fetcher.go index 5d1c3ce338..60b0563986 100644 --- a/pkg/testmocks/mock_image_fetcher.go +++ b/pkg/testmocks/mock_image_fetcher.go @@ -14,30 +14,44 @@ import ( image "github.com/buildpacks/pack/pkg/image" ) -// MockImageFetcher is a mock of ImageFetcher interface +// MockImageFetcher is a mock of ImageFetcher interface. type MockImageFetcher struct { ctrl *gomock.Controller recorder *MockImageFetcherMockRecorder } -// MockImageFetcherMockRecorder is the mock recorder for MockImageFetcher +// MockImageFetcherMockRecorder is the mock recorder for MockImageFetcher. type MockImageFetcherMockRecorder struct { mock *MockImageFetcher } -// NewMockImageFetcher creates a new mock instance +// NewMockImageFetcher creates a new mock instance. func NewMockImageFetcher(ctrl *gomock.Controller) *MockImageFetcher { mock := &MockImageFetcher{ctrl: ctrl} mock.recorder = &MockImageFetcherMockRecorder{mock} return mock } -// EXPECT returns an object that allows the caller to indicate expected use +// EXPECT returns an object that allows the caller to indicate expected use. func (m *MockImageFetcher) EXPECT() *MockImageFetcherMockRecorder { return m.recorder } -// Fetch mocks base method +// CheckReadAccess mocks base method. +func (m *MockImageFetcher) CheckReadAccess(arg0 string, arg1 image.FetchOptions) bool { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CheckReadAccess", arg0, arg1) + ret0, _ := ret[0].(bool) + return ret0 +} + +// CheckReadAccess indicates an expected call of CheckReadAccess. +func (mr *MockImageFetcherMockRecorder) CheckReadAccess(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CheckReadAccess", reflect.TypeOf((*MockImageFetcher)(nil).CheckReadAccess), arg0, arg1) +} + +// Fetch mocks base method. func (m *MockImageFetcher) Fetch(arg0 context.Context, arg1 string, arg2 image.FetchOptions) (imgutil.Image, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Fetch", arg0, arg1, arg2) @@ -46,8 +60,23 @@ func (m *MockImageFetcher) Fetch(arg0 context.Context, arg1 string, arg2 image.F return ret0, ret1 } -// Fetch indicates an expected call of Fetch +// Fetch indicates an expected call of Fetch. func (mr *MockImageFetcherMockRecorder) Fetch(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Fetch", reflect.TypeOf((*MockImageFetcher)(nil).Fetch), arg0, arg1, arg2) } + +// FetchForPlatform mocks base method. +func (m *MockImageFetcher) FetchForPlatform(arg0 context.Context, arg1 string, arg2 image.FetchOptions) (imgutil.Image, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "FetchForPlatform", arg0, arg1, arg2) + ret0, _ := ret[0].(imgutil.Image) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// FetchForPlatform indicates an expected call of FetchForPlatform. +func (mr *MockImageFetcherMockRecorder) FetchForPlatform(arg0, arg1, arg2 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FetchForPlatform", reflect.TypeOf((*MockImageFetcher)(nil).FetchForPlatform), arg0, arg1, arg2) +} diff --git a/pkg/testmocks/mock_index_factory.go b/pkg/testmocks/mock_index_factory.go new file mode 100644 index 0000000000..8607c12df7 --- /dev/null +++ b/pkg/testmocks/mock_index_factory.go @@ -0,0 +1,129 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/buildpacks/pack/pkg/client (interfaces: IndexFactory) + +// Package testmocks is a generated GoMock package. +package testmocks + +import ( + reflect "reflect" + + imgutil "github.com/buildpacks/imgutil" + gomock "github.com/golang/mock/gomock" +) + +// MockIndexFactory is a mock of IndexFactory interface. +type MockIndexFactory struct { + ctrl *gomock.Controller + recorder *MockIndexFactoryMockRecorder +} + +// MockIndexFactoryMockRecorder is the mock recorder for MockIndexFactory. +type MockIndexFactoryMockRecorder struct { + mock *MockIndexFactory +} + +// NewMockIndexFactory creates a new mock instance. +func NewMockIndexFactory(ctrl *gomock.Controller) *MockIndexFactory { + mock := &MockIndexFactory{ctrl: ctrl} + mock.recorder = &MockIndexFactoryMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockIndexFactory) EXPECT() *MockIndexFactoryMockRecorder { + return m.recorder +} + +// CreateIndex mocks base method. +func (m *MockIndexFactory) CreateIndex(arg0 string, arg1 ...imgutil.IndexOption) (imgutil.ImageIndex, error) { + m.ctrl.T.Helper() + varargs := []interface{}{arg0} + for _, a := range arg1 { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "CreateIndex", varargs...) + ret0, _ := ret[0].(imgutil.ImageIndex) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CreateIndex indicates an expected call of CreateIndex. +func (mr *MockIndexFactoryMockRecorder) CreateIndex(arg0 interface{}, arg1 ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{arg0}, arg1...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateIndex", reflect.TypeOf((*MockIndexFactory)(nil).CreateIndex), varargs...) +} + +// Exists mocks base method. +func (m *MockIndexFactory) Exists(arg0 string) bool { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Exists", arg0) + ret0, _ := ret[0].(bool) + return ret0 +} + +// Exists indicates an expected call of Exists. +func (mr *MockIndexFactoryMockRecorder) Exists(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Exists", reflect.TypeOf((*MockIndexFactory)(nil).Exists), arg0) +} + +// FetchIndex mocks base method. +func (m *MockIndexFactory) FetchIndex(arg0 string, arg1 ...imgutil.IndexOption) (imgutil.ImageIndex, error) { + m.ctrl.T.Helper() + varargs := []interface{}{arg0} + for _, a := range arg1 { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "FetchIndex", varargs...) + ret0, _ := ret[0].(imgutil.ImageIndex) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// FetchIndex indicates an expected call of FetchIndex. +func (mr *MockIndexFactoryMockRecorder) FetchIndex(arg0 interface{}, arg1 ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{arg0}, arg1...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FetchIndex", reflect.TypeOf((*MockIndexFactory)(nil).FetchIndex), varargs...) +} + +// FindIndex mocks base method. +func (m *MockIndexFactory) FindIndex(arg0 string, arg1 ...imgutil.IndexOption) (imgutil.ImageIndex, error) { + m.ctrl.T.Helper() + varargs := []interface{}{arg0} + for _, a := range arg1 { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "FindIndex", varargs...) + ret0, _ := ret[0].(imgutil.ImageIndex) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// FindIndex indicates an expected call of FindIndex. +func (mr *MockIndexFactoryMockRecorder) FindIndex(arg0 interface{}, arg1 ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{arg0}, arg1...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindIndex", reflect.TypeOf((*MockIndexFactory)(nil).FindIndex), varargs...) +} + +// LoadIndex mocks base method. +func (m *MockIndexFactory) LoadIndex(arg0 string, arg1 ...imgutil.IndexOption) (imgutil.ImageIndex, error) { + m.ctrl.T.Helper() + varargs := []interface{}{arg0} + for _, a := range arg1 { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "LoadIndex", varargs...) + ret0, _ := ret[0].(imgutil.ImageIndex) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// LoadIndex indicates an expected call of LoadIndex. +func (mr *MockIndexFactoryMockRecorder) LoadIndex(arg0 interface{}, arg1 ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{arg0}, arg1...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "LoadIndex", reflect.TypeOf((*MockIndexFactory)(nil).LoadIndex), varargs...) +} diff --git a/pkg/testmocks/mock_registry_resolver.go b/pkg/testmocks/mock_registry_resolver.go index 5ef77e6a10..b4630806da 100644 --- a/pkg/testmocks/mock_registry_resolver.go +++ b/pkg/testmocks/mock_registry_resolver.go @@ -10,30 +10,30 @@ import ( gomock "github.com/golang/mock/gomock" ) -// MockRegistryResolver is a mock of RegistryResolver interface +// MockRegistryResolver is a mock of RegistryResolver interface. type MockRegistryResolver struct { ctrl *gomock.Controller recorder *MockRegistryResolverMockRecorder } -// MockRegistryResolverMockRecorder is the mock recorder for MockRegistryResolver +// MockRegistryResolverMockRecorder is the mock recorder for MockRegistryResolver. type MockRegistryResolverMockRecorder struct { mock *MockRegistryResolver } -// NewMockRegistryResolver creates a new mock instance +// NewMockRegistryResolver creates a new mock instance. func NewMockRegistryResolver(ctrl *gomock.Controller) *MockRegistryResolver { mock := &MockRegistryResolver{ctrl: ctrl} mock.recorder = &MockRegistryResolverMockRecorder{mock} return mock } -// EXPECT returns an object that allows the caller to indicate expected use +// EXPECT returns an object that allows the caller to indicate expected use. func (m *MockRegistryResolver) EXPECT() *MockRegistryResolverMockRecorder { return m.recorder } -// Resolve mocks base method +// Resolve mocks base method. func (m *MockRegistryResolver) Resolve(arg0, arg1 string) (string, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Resolve", arg0, arg1) @@ -42,7 +42,7 @@ func (m *MockRegistryResolver) Resolve(arg0, arg1 string) (string, error) { return ret0, ret1 } -// Resolve indicates an expected call of Resolve +// Resolve indicates an expected call of Resolve. func (mr *MockRegistryResolverMockRecorder) Resolve(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Resolve", reflect.TypeOf((*MockRegistryResolver)(nil).Resolve), arg0, arg1) diff --git a/testhelpers/assertions.go b/testhelpers/assertions.go index d66df8495d..858a968318 100644 --- a/testhelpers/assertions.go +++ b/testhelpers/assertions.go @@ -68,7 +68,7 @@ func (a AssertionManager) Equal(actual, expected interface{}) { a.testObject.Helper() if diff := cmp.Diff(actual, expected); diff != "" { - a.testObject.Fatalf(diff) + a.testObject.Fatal(diff) } } @@ -76,7 +76,7 @@ func (a AssertionManager) NotEqual(actual, expected interface{}) { a.testObject.Helper() if diff := cmp.Diff(actual, expected); diff == "" { - a.testObject.Fatalf(diff) + a.testObject.Fatal(diff) } } diff --git a/testhelpers/comparehelpers/deep_compare.go b/testhelpers/comparehelpers/deep_compare.go index 084f5394de..9ac29e4c5f 100644 --- a/testhelpers/comparehelpers/deep_compare.go +++ b/testhelpers/comparehelpers/deep_compare.go @@ -9,13 +9,18 @@ import ( // Note this method searches all objects in 'container' for containee // Contains is defined by the following relationship // basic data types (string, float, int,...): -// container == containee +// +// container == containee +// // maps: -// every key-value pair from containee is in container -// Ex: {"a": 1, "b": 2, "c": 3} contains {"a": 1, "c": 3} +// +// every key-value pair from containee is in container +// Ex: {"a": 1, "b": 2, "c": 3} contains {"a": 1, "c": 3} +// // arrays: -// every element in containee is present and ordered in an array in container -// Ex: [1, 1, 4, 3, 10, 4] contains [1, 3, 4 ] +// +// every element in containee is present and ordered in an array in container +// Ex: [1, 1, 4, 3, 10, 4] contains [1, 3, 4 ] // // Limitaions: // Cannot handle the following types: Pointers, Func @@ -72,8 +77,8 @@ func mapContains(v1, v2 reflect.Value, depth int) bool { } } for _, k := range v1.MapKeys() { - kVal := v1.MapIndex(k) - if deepContains(kVal, v2, depth+1) { + val := v1.MapIndex(k) + if deepContains(val, v2, depth+1) { return true } } @@ -81,10 +86,10 @@ func mapContains(v1, v2 reflect.Value, depth int) bool { } func arrayLikeContains(v1, v2 reflect.Value, depth int) bool { - t2 := v2.Kind() - if t2 == reflect.Interface { + switch v2.Kind() { + case reflect.Interface: return mapContains(v1, v2.Elem(), depth+1) - } else if t2 == reflect.Array || t2 == reflect.Slice { + case reflect.Array, reflect.Slice: v1Index := 0 v2Index := 0 for v1Index < v1.Len() && v2Index < v2.Len() { diff --git a/testhelpers/image_index.go b/testhelpers/image_index.go new file mode 100644 index 0000000000..856096ae85 --- /dev/null +++ b/testhelpers/image_index.go @@ -0,0 +1,269 @@ +package testhelpers + +import ( + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "os" + "path/filepath" + "testing" + + "github.com/buildpacks/imgutil" + "github.com/buildpacks/imgutil/fakes" + imgutilRemote "github.com/buildpacks/imgutil/remote" + "github.com/google/go-containerregistry/pkg/authn" + "github.com/google/go-containerregistry/pkg/name" + v1 "github.com/google/go-containerregistry/pkg/v1" + "github.com/google/go-containerregistry/pkg/v1/random" + "github.com/google/go-containerregistry/pkg/v1/remote" + "github.com/google/go-containerregistry/pkg/v1/types" +) + +func NewRandomIndexRepoName() string { + return "test-index-" + RandString(10) +} + +func AssertPathExists(t *testing.T, path string) { + t.Helper() + _, err := os.Stat(path) + if os.IsNotExist(err) { + t.Errorf("Expected %q to exist", path) + } else if err != nil { + t.Fatalf("Error stating %q: %v", path, err) + } +} + +func AssertPathDoesNotExists(t *testing.T, path string) { + t.Helper() + _, err := os.Stat(path) + if err == nil { + t.Errorf("Expected %q to not exists", path) + } +} + +func FetchImageIndexDescriptor(t *testing.T, repoName string) v1.ImageIndex { + t.Helper() + + r, err := name.ParseReference(repoName, name.WeakValidation) + AssertNil(t, err) + + auth, err := authn.DefaultKeychain.Resolve(r.Context().Registry) + AssertNil(t, err) + + index, err := remote.Index(r, remote.WithTransport(http.DefaultTransport), remote.WithAuth(auth)) + AssertNil(t, err) + + return index +} + +func AssertRemoteImageIndex(t *testing.T, repoName string, mediaType types.MediaType, expectedNumberOfManifests int) { + t.Helper() + + remoteIndex := FetchImageIndexDescriptor(t, repoName) + AssertNotNil(t, remoteIndex) + remoteIndexMediaType, err := remoteIndex.MediaType() + AssertNil(t, err) + AssertEq(t, remoteIndexMediaType, mediaType) + remoteIndexManifest, err := remoteIndex.IndexManifest() + AssertNil(t, err) + AssertNotNil(t, remoteIndexManifest) + AssertEq(t, len(remoteIndexManifest.Manifests), expectedNumberOfManifests) +} + +func CreateRemoteImage(t *testing.T, repoName, tag, baseImage string) *imgutilRemote.Image { + img1RepoName := fmt.Sprintf("%s:%s", repoName, tag) + img1, err := imgutilRemote.NewImage(img1RepoName, authn.DefaultKeychain, imgutilRemote.FromBaseImage(baseImage)) + AssertNil(t, err) + err = img1.Save() + AssertNil(t, err) + return img1 +} + +func ReadIndexManifest(t *testing.T, path string) *v1.IndexManifest { + t.Helper() + + indexPath := filepath.Join(path, "index.json") + AssertPathExists(t, filepath.Join(path, "oci-layout")) + AssertPathExists(t, indexPath) + + // check index file + data, err := os.ReadFile(indexPath) + AssertNil(t, err) + + index := &v1.IndexManifest{} + err = json.Unmarshal(data, index) + AssertNil(t, err) + return index +} + +func RandomCNBIndex(t *testing.T, repoName string, layers, count int64) *imgutil.CNBIndex { + t.Helper() + + randomIndex, err := random.Index(1024, layers, count) + AssertNil(t, err) + options := &imgutil.IndexOptions{ + BaseIndex: randomIndex, + LayoutIndexOptions: imgutil.LayoutIndexOptions{ + XdgPath: os.Getenv("XDG_RUNTIME_DIR"), + }, + } + idx, err := imgutil.NewCNBIndex(repoName, *options) + AssertNil(t, err) + return idx +} + +func RandomCNBIndexAndDigest(t *testing.T, repoName string, layers, count int64) (idx imgutil.ImageIndex, digest name.Digest) { + idx = RandomCNBIndex(t, repoName, layers, count) + + imgIdx, ok := idx.(*imgutil.CNBIndex) + AssertEq(t, ok, true) + + mfest, err := imgIdx.IndexManifest() + AssertNil(t, err) + + digest, err = name.NewDigest(fmt.Sprintf("%s@%s", repoName, mfest.Manifests[0].Digest.String())) + AssertNil(t, err) + + return idx, digest +} + +// MockImageIndex wraps a real CNBIndex to record if some key methods are invoke +type MockImageIndex struct { + imgutil.CNBIndex + ErrorOnSave bool + PushCalled bool + PurgeOption bool + DeleteDirCalled bool +} + +// NewMockImageIndex creates a random index with the given number of layers and manifests count +func NewMockImageIndex(t *testing.T, repoName string, layers, count int64) *MockImageIndex { + cnbIdx := RandomCNBIndex(t, repoName, layers, count) + idx := &MockImageIndex{ + CNBIndex: *cnbIdx, + } + return idx +} + +func (i *MockImageIndex) SaveDir() error { + if i.ErrorOnSave { + return errors.New("something failed writing the index on disk") + } + return i.CNBIndex.SaveDir() +} + +func (i *MockImageIndex) Push(ops ...imgutil.IndexOption) error { + var pushOps = &imgutil.IndexOptions{} + for _, op := range ops { + if err := op(pushOps); err != nil { + return err + } + } + + i.PushCalled = true + i.PurgeOption = pushOps.Purge + return nil +} + +func (i *MockImageIndex) DeleteDir() error { + i.DeleteDirCalled = true + return nil +} + +func NewFakeWithRandomUnderlyingV1Image(t *testing.T, repoName string, identifier imgutil.Identifier) *FakeWithRandomUnderlyingImage { + fakeCNBImage := fakes.NewImage(repoName, "", identifier) + underlyingImage, err := random.Image(1024, 1) + AssertNil(t, err) + return &FakeWithRandomUnderlyingImage{ + Image: fakeCNBImage, + underlyingImage: underlyingImage, + } +} + +type FakeWithRandomUnderlyingImage struct { + *fakes.Image + underlyingImage v1.Image +} + +func (t *FakeWithRandomUnderlyingImage) UnderlyingImage() v1.Image { + return t.underlyingImage +} + +func (t *FakeWithRandomUnderlyingImage) GetLayer(sha string) (io.ReadCloser, error) { + hash, err := v1.NewHash(sha) + if err != nil { + return nil, err + } + + layer, err := t.UnderlyingImage().LayerByDiffID(hash) + if err != nil { + return nil, err + } + return layer.Uncompressed() +} + +// SetUpRandomRemoteIndexWithPlatforms creates an image index with platform-specific images and pushes it to the registry +// Uses imgutil for both image creation and index management, following the pattern from manifest_create.go +func SetUpRandomRemoteIndexWithPlatforms(t *testing.T, indexRepoName string, platforms []struct{ OS, Arch string }) { + t.Helper() + + // Create platform-specific images using imgutil and collect their identifiers + var imageDigests []string + for _, platform := range platforms { + platformTag := fmt.Sprintf("%s-%s", platform.OS, platform.Arch) + platformImageName := fmt.Sprintf("%s:%s", indexRepoName, platformTag) + + // Use imgutil to create image with proper platform + img, err := imgutilRemote.NewImage(platformImageName, authn.DefaultKeychain, imgutilRemote.WithDefaultPlatform(imgutil.Platform{ + OS: platform.OS, + Architecture: platform.Arch, + })) + AssertNil(t, err) + AssertNil(t, img.Save()) + + // Extract the digest identifier + id, err := img.Identifier() + AssertNil(t, err) + imageDigests = append(imageDigests, id.String()) + } + + // Create a CNBIndex (similar to indexFactory.CreateIndex in manifest_create.go) + tmpDir, err := os.MkdirTemp("", "index-test") + AssertNil(t, err) + defer os.RemoveAll(tmpDir) + + idx, err := imgutil.NewCNBIndex(indexRepoName, imgutil.IndexOptions{ + RemoteIndexOptions: imgutil.RemoteIndexOptions{ + Keychain: authn.DefaultKeychain, + }, + LayoutIndexOptions: imgutil.LayoutIndexOptions{ + XdgPath: tmpDir, + }, + }) + AssertNil(t, err) + + // Add each image to the index (similar to addManifestToIndex in common.go) + for i, digestStr := range imageDigests { + // Fetch the image using the digest + imageToAdd, err := imgutilRemote.NewImage(digestStr, authn.DefaultKeychain, imgutilRemote.FromBaseImage(digestStr)) + AssertNil(t, err) + + // Add the underlying v1.Image to the index + idx.AddManifest(imageToAdd.UnderlyingImage()) + + // Set platform metadata for the manifest + digestRef, err := name.NewDigest(digestStr) + AssertNil(t, err) + + err = idx.SetOS(digestRef, platforms[i].OS) + AssertNil(t, err) + err = idx.SetArchitecture(digestRef, platforms[i].Arch) + AssertNil(t, err) + } + + // Push the index to the registry (similar to manifest_create.go with Publish option) + err = idx.Push(imgutil.WithPurge(true), imgutil.WithMediaType(types.OCIImageIndex)) + AssertNil(t, err) +} diff --git a/testhelpers/registry.go b/testhelpers/registry.go index cd2d3efb89..247d1f2dfe 100644 --- a/testhelpers/registry.go +++ b/testhelpers/registry.go @@ -5,7 +5,6 @@ import ( "encoding/base64" "fmt" "io" - "io/ioutil" "net" "net/url" "os" @@ -13,12 +12,13 @@ import ( "testing" "time" - dockertypes "github.com/docker/docker/api/types" - dockercontainer "github.com/docker/docker/api/types/container" - "github.com/docker/go-connections/nat" + "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/plumbing/object" + dockercontainer "github.com/moby/moby/api/types/container" + dockernetwork "github.com/moby/moby/api/types/network" + dockerregistry "github.com/moby/moby/api/types/registry" + "github.com/moby/moby/client" "golang.org/x/crypto/bcrypt" - "gopkg.in/src-d/go-git.v4" - "gopkg.in/src-d/go-git.v4/plumbing/object" "github.com/buildpacks/pack/pkg/archive" ) @@ -120,8 +120,8 @@ func waitForRegistryToBeAvailable(t *testing.T, registryConfig *TestRegistryConf } } -func (rc *TestRegistryConfig) AuthConfig() dockertypes.AuthConfig { - return dockertypes.AuthConfig{ +func (rc *TestRegistryConfig) AuthConfig() dockerregistry.AuthConfig { + return dockerregistry.AuthConfig{ Username: rc.username, Password: rc.password, ServerAddress: RegistryHost(rc.RunRegistryHost, rc.RunRegistryPort), @@ -130,7 +130,7 @@ func (rc *TestRegistryConfig) AuthConfig() dockertypes.AuthConfig { func (rc *TestRegistryConfig) Login(t *testing.T, username string, password string) { Eventually(t, func() bool { - _, err := dockerCli(t).RegistryLogin(context.Background(), dockertypes.AuthConfig{ + _, err := dockerCli(t).RegistryLogin(context.Background(), client.RegistryLoginOptions{ Username: username, Password: password, ServerAddress: RegistryHost(rc.RunRegistryHost, rc.RunRegistryPort), @@ -142,37 +142,45 @@ func (rc *TestRegistryConfig) Login(t *testing.T, username string, password stri func startRegistry(t *testing.T, runRegistryName, username, password string) (string, string, string) { ctx := context.Background() - daemonInfo, err := dockerCli(t).Info(ctx) + daemonInfoResult, err := dockerCli(t).Info(ctx, client.InfoOptions{}) AssertNil(t, err) - registryContainerName := registryContainerNames[daemonInfo.OSType] + registryContainerName := registryContainerNames[daemonInfoResult.Info.OSType] AssertNil(t, PullImageWithAuth(dockerCli(t), registryContainerName, "")) htpasswdTar := generateHtpasswd(t, username, password) defer htpasswdTar.Close() - ctr, err := dockerCli(t).ContainerCreate(ctx, &dockercontainer.Config{ - Image: registryContainerName, - Labels: map[string]string{"author": "pack"}, - Env: []string{ - "REGISTRY_AUTH=htpasswd", - "REGISTRY_AUTH_HTPASSWD_REALM=Registry Realm", - "REGISTRY_AUTH_HTPASSWD_PATH=/registry_test_htpasswd", + ctrResult, err := dockerCli(t).ContainerCreate(ctx, client.ContainerCreateOptions{ + Name: runRegistryName, + Config: &dockercontainer.Config{ + Image: registryContainerName, + Labels: map[string]string{"author": "pack"}, + Env: []string{ + "REGISTRY_AUTH=htpasswd", + "REGISTRY_AUTH_HTPASSWD_REALM=Registry Realm", + "REGISTRY_AUTH_HTPASSWD_PATH=/registry_test_htpasswd", + }, }, - }, &dockercontainer.HostConfig{ - AutoRemove: true, - PortBindings: nat.PortMap{ - "5000/tcp": []nat.PortBinding{{HostPort: "0"}}, + HostConfig: &dockercontainer.HostConfig{ + AutoRemove: true, + PortBindings: dockernetwork.PortMap{ + dockernetwork.MustParsePort("5000/tcp"): []dockernetwork.PortBinding{{HostPort: "0"}}, + }, }, - }, nil, nil, runRegistryName) + }) AssertNil(t, err) - err = dockerCli(t).CopyToContainer(ctx, ctr.ID, "/", htpasswdTar, dockertypes.CopyToContainerOptions{}) + + _, err = dockerCli(t).CopyToContainer(ctx, ctrResult.ID, client.CopyToContainerOptions{ + DestinationPath: "/", + Content: htpasswdTar, + }) AssertNil(t, err) - err = dockerCli(t).ContainerStart(ctx, ctr.ID, dockertypes.ContainerStartOptions{}) + _, err = dockerCli(t).ContainerStart(ctx, ctrResult.ID, client.ContainerStartOptions{}) AssertNil(t, err) - runRegistryPort, err := waitForPortBinding(t, ctr.ID, "5000/tcp", 30*time.Second) + runRegistryPort, err := waitForPortBinding(t, ctrResult.ID, "5000/tcp", 30*time.Second) AssertNil(t, err) runRegistryHost := DockerHostname(t) @@ -189,12 +197,14 @@ func waitForPortBinding(t *testing.T, containerID, portSpec string, duration tim for { select { case <-ticker.C: - inspect, err := dockerCli(t).ContainerInspect(context.TODO(), containerID) + inspectResult, err := dockerCli(t).ContainerInspect(context.TODO(), containerID, client.ContainerInspectOptions{}) if err != nil { return "", err } + inspect := inspectResult.Container - portBindings := inspect.NetworkSettings.Ports[nat.Port(portSpec)] + portPort, _ := dockernetwork.ParsePort(portSpec) + portBindings := inspect.NetworkSettings.Ports[portPort] if len(portBindings) > 0 { return portBindings[0].HostPort, nil } @@ -222,12 +232,12 @@ func DockerHostname(t *testing.T) string { // if DOCKER_HOST is non-tcp, we assume that we are // talking to the daemon over a local pipe. default: - daemonInfo, err := dockerCli.Info(context.TODO()) + daemonInfoResult, err := dockerCli.Info(context.TODO(), client.InfoOptions{}) if err != nil { t.Fatalf("unable to fetch client.DockerInfo: %s", err) } - if daemonInfo.OSType == "windows" { + if daemonInfoResult.Info.OSType == "windows" { // try to lookup the host IP by helper domain name (https://docs.docker.com/docker-for-windows/networking/#use-cases-and-workarounds) // Note: pack appears to not support /etc/hosts-based insecure-registries addrs, err := net.LookupHost("host.docker.internal") @@ -254,10 +264,10 @@ func generateHtpasswd(t *testing.T, username string, password string) io.ReadClo } func setupDockerConfigWithAuth(t *testing.T, username string, password string, runRegistryHost string, runRegistryPort string) string { - dockerConfigDir, err := ioutil.TempDir("", "pack.test.docker.config.dir") + dockerConfigDir, err := os.MkdirTemp("", "pack.test.docker.config.dir") AssertNil(t, err) - AssertNil(t, ioutil.WriteFile(filepath.Join(dockerConfigDir, "config.json"), []byte(fmt.Sprintf(`{ + AssertNil(t, os.WriteFile(filepath.Join(dockerConfigDir, "config.json"), []byte(fmt.Sprintf(`{ "auths": { "%s": { "auth": "%s" @@ -285,7 +295,7 @@ func (rc *TestRegistryConfig) RmRegistry(t *testing.T) { func (rc *TestRegistryConfig) StopRegistry(t *testing.T) { t.Log("stop registry") t.Helper() - dockerCli(t).ContainerKill(context.Background(), rc.runRegistryName, "SIGKILL") + dockerCli(t).ContainerKill(context.Background(), rc.runRegistryName, client.ContainerKillOptions{Signal: "SIGKILL"}) err := os.RemoveAll(rc.DockerConfigDir) AssertNil(t, err) diff --git a/testhelpers/tar_assertions.go b/testhelpers/tar_assertions.go index 1de4442a44..55b2834a46 100644 --- a/testhelpers/tar_assertions.go +++ b/testhelpers/tar_assertions.go @@ -20,6 +20,8 @@ var gzipMagicHeader = []byte{'\x1f', '\x8b'} type TarEntryAssertion func(t *testing.T, header *tar.Header, data []byte) +type TarEntriesAssertion func(t *testing.T, header1 *tar.Header, data1 []byte, header2 *tar.Header, data2 []byte) + func AssertOnTarEntry(t *testing.T, tarPath, entryPath string, assertFns ...TarEntryAssertion) { t.Helper() @@ -48,6 +50,27 @@ func AssertOnNestedTar(nestedEntryPath string, assertions ...TarEntryAssertion) } } +func AssertOnTarEntries(t *testing.T, tarPath string, entryPath1, entryPath2 string, assertFns ...TarEntriesAssertion) { + t.Helper() + + tarFile, err := os.Open(filepath.Clean(tarPath)) + AssertNil(t, err) + defer tarFile.Close() + + header1, data1, err := readTarFileEntry(tarFile, entryPath1) + AssertNil(t, err) + + _, err = tarFile.Seek(0, io.SeekStart) + AssertNil(t, err) + + header2, data2, err := readTarFileEntry(tarFile, entryPath2) + AssertNil(t, err) + + for _, fn := range assertFns { + fn(t, header1, data1, header2, data2) + } +} + func readTarFileEntry(reader io.Reader, entryPath string) (*tar.Header, []byte, error) { var ( gzipReader *gzip.Reader @@ -113,6 +136,19 @@ func SymlinksTo(expectedTarget string) TarEntryAssertion { } } +func AreEquivalentHardLinks() TarEntriesAssertion { + return func(t *testing.T, header1 *tar.Header, _ []byte, header2 *tar.Header, _ []byte) { + t.Helper() + if header1.Typeflag != tar.TypeLink && header2.Typeflag != tar.TypeLink { + t.Fatalf("path '%s' and '%s' are not hardlinks, type flags are '%c' and '%c'", header1.Name, header2.Name, header1.Typeflag, header2.Typeflag) + } + + if header1.Linkname != header2.Name && header2.Linkname != header1.Name { + t.Fatalf("'%s' and '%s' are not the same file", header1.Name, header2.Name) + } + } +} + func HasOwnerAndGroup(expectedUID int, expectedGID int) TarEntryAssertion { return func(t *testing.T, header *tar.Header, _ []byte) { t.Helper() diff --git a/testhelpers/testhelpers.go b/testhelpers/testhelpers.go index 12a4d4a70d..eb4b1068dd 100644 --- a/testhelpers/testhelpers.go +++ b/testhelpers/testhelpers.go @@ -8,7 +8,6 @@ import ( "encoding/json" "fmt" "io" - "io/ioutil" "math/rand" "net/http" "os" @@ -22,16 +21,21 @@ import ( "testing" "time" - dockertypes "github.com/docker/docker/api/types" - "github.com/docker/docker/api/types/container" - "github.com/docker/docker/client" + "github.com/buildpacks/imgutil" + v1 "github.com/google/go-containerregistry/pkg/v1" + + "github.com/buildpacks/imgutil/fakes" + "github.com/docker/docker/pkg/jsonmessage" "github.com/docker/docker/pkg/stdcopy" + "github.com/go-git/go-git/v5" "github.com/google/go-cmp/cmp" "github.com/heroku/color" + dcontainer "github.com/moby/moby/api/types/container" + "github.com/moby/moby/client" "github.com/pkg/errors" - "gopkg.in/src-d/go-git.v4" + "github.com/buildpacks/pack/internal/container" "github.com/buildpacks/pack/internal/stringset" "github.com/buildpacks/pack/internal/style" "github.com/buildpacks/pack/pkg/archive" @@ -48,9 +52,9 @@ func RandString(n int) string { } // Assert deep equality (and provide useful difference as a test failure) -func AssertEq(t *testing.T, actual, expected interface{}) { +func AssertEq(t *testing.T, actual, expected interface{}, opts ...cmp.Option) { t.Helper() - if diff := cmp.Diff(expected, actual); diff != "" { + if diff := cmp.Diff(expected, actual, opts...); diff != "" { t.Fatal(diff) } } @@ -177,6 +181,33 @@ func AssertNotContains(t *testing.T, actual, expected string) { } } +type KeyValue[k comparable, v any] struct { + key k + value v +} + +func NewKeyValue[k comparable, v any](key k, value v) KeyValue[k, v] { + return KeyValue[k, v]{key: key, value: value} +} + +func AssertMapContains[key comparable, value any](t *testing.T, actual map[key]value, expected ...KeyValue[key, value]) { + t.Helper() + for _, i := range expected { + if v, ok := actual[i.key]; !ok || !reflect.DeepEqual(v, i.value) { + t.Fatalf("Expected %s to contain elements %s", reflect.ValueOf(actual), reflect.ValueOf(expected)) + } + } +} + +func AssertMapNotContains[key comparable, value any](t *testing.T, actual map[key]value, expected ...KeyValue[key, value]) { + t.Helper() + for _, i := range expected { + if v, ok := actual[i.key]; ok && reflect.DeepEqual(v, i.value) { + t.Fatalf("Expected %s to not contain elements %s", reflect.ValueOf(actual), reflect.ValueOf(expected)) + } + } +} + func AssertSliceContains(t *testing.T, slice []string, expected ...string) { t.Helper() _, missing, _ := stringset.Compare(slice, expected) @@ -303,13 +334,18 @@ func hasMatches(actual, exp string) bool { return len(matches) > 0 } -var dockerCliVal client.CommonAPIClient +// IndexOf returns the index of the first occurrence of substr in s, or -1 if not found +func IndexOf(s, substr string) int { + return strings.Index(s, substr) +} + +var dockerCliVal *client.Client var dockerCliOnce sync.Once var dockerCliErr error -func dockerCli(t *testing.T) client.CommonAPIClient { +func dockerCli(t *testing.T) *client.Client { dockerCliOnce.Do(func() { - dockerCliVal, dockerCliErr = client.NewClientWithOpts(client.FromEnv, client.WithVersion("1.38")) + dockerCliVal, dockerCliErr = client.New(client.FromEnv) }) AssertNil(t, dockerCliErr) return dockerCliVal @@ -335,13 +371,13 @@ func Eventually(t *testing.T, test func() bool, every time.Duration, timeout tim } } -func CreateImage(t *testing.T, dockerCli client.CommonAPIClient, repoName, dockerFile string) { +func CreateImage(t *testing.T, dockerCli *client.Client, repoName, dockerFile string) { t.Helper() buildContext := archive.CreateSingleFileTarReader("Dockerfile", dockerFile) defer buildContext.Close() - resp, err := dockerCli.ImageBuild(context.Background(), buildContext, dockertypes.ImageBuildOptions{ + resp, err := dockerCli.ImageBuild(context.Background(), buildContext, client.ImageBuildOptions{ Tags: []string{repoName}, SuppressOutput: true, Remove: true, @@ -354,11 +390,11 @@ func CreateImage(t *testing.T, dockerCli client.CommonAPIClient, repoName, docke AssertNil(t, errors.Wrapf(err, "building image %s", style.Symbol(repoName))) } -func CreateImageFromDir(t *testing.T, dockerCli client.CommonAPIClient, repoName string, dir string) { +func CreateImageFromDir(t *testing.T, dockerCli *client.Client, repoName string, dir string) { t.Helper() buildContext := archive.ReadDirAsTar(dir, "/", 0, 0, -1, true, false, nil) - resp, err := dockerCli.ImageBuild(context.Background(), buildContext, dockertypes.ImageBuildOptions{ + resp, err := dockerCli.ImageBuild(context.Background(), buildContext, client.ImageBuildOptions{ Tags: []string{repoName}, Remove: true, ForceRemove: true, @@ -371,7 +407,7 @@ func CreateImageFromDir(t *testing.T, dockerCli client.CommonAPIClient, repoName AssertNil(t, errors.Wrapf(err, "building image %s", style.Symbol(repoName))) } -func CheckImageBuildResult(response dockertypes.ImageBuildResponse, err error) error { +func CheckImageBuildResult(response client.ImageBuildResult, err error) error { if err != nil { return err } @@ -381,7 +417,7 @@ func CheckImageBuildResult(response dockertypes.ImageBuildResponse, err error) e } func checkResponse(responseBody io.Reader) error { - body, err := ioutil.ReadAll(responseBody) + body, err := io.ReadAll(responseBody) if err != nil { return errors.Wrap(err, "reading body") } @@ -410,7 +446,7 @@ func checkResponse(responseBody io.Reader) error { return nil } -func CreateImageOnRemote(t *testing.T, dockerCli client.CommonAPIClient, registryConfig *TestRegistryConfig, repoName, dockerFile string) string { +func CreateImageOnRemote(t *testing.T, dockerCli *client.Client, registryConfig *TestRegistryConfig, repoName, dockerFile string) string { t.Helper() imageName := registryConfig.RepoName(repoName) CreateImage(t, dockerCli, imageName, dockerFile) @@ -418,14 +454,14 @@ func CreateImageOnRemote(t *testing.T, dockerCli client.CommonAPIClient, registr return imageName } -func DockerRmi(dockerCli client.CommonAPIClient, repoNames ...string) error { +func DockerRmi(dockerCli *client.Client, repoNames ...string) error { var err error ctx := context.Background() for _, name := range repoNames { _, e := dockerCli.ImageRemove( ctx, name, - dockertypes.ImageRemoveOptions{Force: true, PruneChildren: true}, + client.ImageRemoveOptions{Force: true, PruneChildren: true}, ) if e != nil && err == nil { err = e @@ -434,8 +470,8 @@ func DockerRmi(dockerCli client.CommonAPIClient, repoNames ...string) error { return err } -func PushImage(dockerCli client.CommonAPIClient, ref string, registryConfig *TestRegistryConfig) error { - rc, err := dockerCli.ImagePush(context.Background(), ref, dockertypes.ImagePushOptions{RegistryAuth: registryConfig.RegistryAuth()}) +func PushImage(dockerCli *client.Client, ref string, registryConfig *TestRegistryConfig) error { + rc, err := dockerCli.ImagePush(context.Background(), ref, client.ImagePushOptions{RegistryAuth: registryConfig.RegistryAuth()}) if err != nil { return errors.Wrap(err, "pushing image") } @@ -469,7 +505,7 @@ func HTTPGetE(url string, headers map[string]string) (string, error) { if resp.StatusCode >= 300 { return "", fmt.Errorf("HTTP Status was bad: %s => %d", url, resp.StatusCode) } - b, err := ioutil.ReadAll(resp.Body) + b, err := io.ReadAll(resp.Body) if err != nil { return "", errors.Wrap(err, "reading body") } @@ -478,14 +514,14 @@ func HTTPGetE(url string, headers map[string]string) (string, error) { func ImageID(t *testing.T, repoName string) string { t.Helper() - inspect, _, err := dockerCli(t).ImageInspectWithRaw(context.Background(), repoName) + inspect, err := dockerCli(t).ImageInspect(context.Background(), repoName) AssertNil(t, err) return strings.TrimPrefix(inspect.ID, "sha256:") } func Digest(t *testing.T, repoName string) string { t.Helper() - inspect, _, err := dockerCli(t).ImageInspectWithRaw(context.Background(), repoName) + inspect, err := dockerCli(t).ImageInspect(context.Background(), repoName) AssertNil(t, err) if len(inspect.RepoDigests) < 1 { t.Fatalf("image '%s' has no repo digests", repoName) @@ -499,7 +535,7 @@ func Digest(t *testing.T, repoName string) string { func TopLayerDiffID(t *testing.T, repoName string) string { t.Helper() - inspect, _, err := dockerCli(t).ImageInspectWithRaw(context.Background(), repoName) + inspect, err := dockerCli(t).ImageInspect(context.Background(), repoName) AssertNil(t, err) if len(inspect.RootFS.Layers) < 1 { t.Fatalf("image '%s' has no layers", repoName) @@ -523,15 +559,15 @@ func RunE(cmd *exec.Cmd) (string, error) { return string(output), nil } -func PullImageWithAuth(dockerCli client.CommonAPIClient, ref, registryAuth string) error { - rc, err := dockerCli.ImagePull(context.Background(), ref, dockertypes.ImagePullOptions{RegistryAuth: registryAuth}) +func PullImageWithAuth(dockerCli *client.Client, ref, registryAuth string) error { + pullResult, err := dockerCli.ImagePull(context.Background(), ref, client.ImagePullOptions{RegistryAuth: registryAuth}) if err != nil { return err } - if _, err := io.Copy(ioutil.Discard, rc); err != nil { + if _, err := io.Copy(io.Discard, pullResult); err != nil { return err } - return rc.Close() + return pullResult.Close() } func CopyFile(t *testing.T, src, dst string) { @@ -576,12 +612,16 @@ func RecursiveCopy(t *testing.T, src, dst string) { } func RecursiveCopyE(src, dst string) error { - fis, err := ioutil.ReadDir(src) + fis, err := os.ReadDir(src) if err != nil { return err } - for _, fi := range fis { + for _, entry := range fis { + fi, err := entry.Info() + if err != nil { + return err + } if fi.Mode().IsRegular() { err = CopyFileE(filepath.Join(src, fi.Name()), filepath.Join(dst, fi.Name())) if err != nil { @@ -628,10 +668,28 @@ func SkipUnless(t *testing.T, expression bool, reason string) { } } -func RunContainer(ctx context.Context, dockerCli client.CommonAPIClient, id string, stdout io.Writer, stderr io.Writer) error { - bodyChan, errChan := dockerCli.ContainerWait(ctx, id, container.WaitConditionNextExit) +// dockerClientAdapter adapts moby client to internal/container.DockerClient interface +type dockerClientAdapter struct { + *client.Client +} + +func (a *dockerClientAdapter) ContainerWait(ctx context.Context, containerID string, options client.ContainerWaitOptions) client.ContainerWaitResult { + return a.Client.ContainerWait(ctx, containerID, options) +} + +func (a *dockerClientAdapter) ContainerAttach(ctx context.Context, container string, options client.ContainerAttachOptions) (client.ContainerAttachResult, error) { + return a.Client.ContainerAttach(ctx, container, options) +} - logs, err := dockerCli.ContainerAttach(ctx, id, dockertypes.ContainerAttachOptions{ +func (a *dockerClientAdapter) ContainerStart(ctx context.Context, container string, options client.ContainerStartOptions) (client.ContainerStartResult, error) { + return a.Client.ContainerStart(ctx, container, options) +} + +func RunContainer(ctx context.Context, dockerCli *client.Client, id string, stdout io.Writer, stderr io.Writer) error { + adapter := &dockerClientAdapter{Client: dockerCli} + bodyChan, errChan := container.ContainerWaitWrapper(ctx, adapter, id, dcontainer.WaitConditionNextExit) + + logsResult, err := dockerCli.ContainerAttach(ctx, id, client.ContainerAttachOptions{ Stream: true, Stdout: true, Stderr: true, @@ -640,13 +698,14 @@ func RunContainer(ctx context.Context, dockerCli client.CommonAPIClient, id stri return err } - if err := dockerCli.ContainerStart(ctx, id, dockertypes.ContainerStartOptions{}); err != nil { + _, err = dockerCli.ContainerStart(ctx, id, client.ContainerStartOptions{}) + if err != nil { return errors.Wrap(err, "container start") } copyErr := make(chan error) go func() { - _, err := stdcopy.StdCopy(stdout, stderr, logs.Reader) + _, err := stdcopy.StdCopy(stdout, stderr, logsResult.Reader) copyErr <- err }() @@ -664,7 +723,7 @@ func RunContainer(ctx context.Context, dockerCli client.CommonAPIClient, id stri func CreateTGZ(t *testing.T, srcDir, tarDir string, mode int64) string { t.Helper() - fh, err := ioutil.TempFile("", "*.tgz") + fh, err := os.CreateTemp("", "*.tgz") AssertNil(t, err) defer fh.Close() @@ -679,7 +738,7 @@ func CreateTGZ(t *testing.T, srcDir, tarDir string, mode int64) string { func CreateTAR(t *testing.T, srcDir, tarDir string, mode int64) string { t.Helper() - fh, err := ioutil.TempFile("", "*.tgz") + fh, err := os.CreateTemp("", "*.tgz") AssertNil(t, err) defer fh.Close() @@ -702,9 +761,11 @@ func RecursiveCopyNow(t *testing.T, src, dst string) { err := os.MkdirAll(dst, 0750) AssertNil(t, err) - fis, err := ioutil.ReadDir(src) + fis, err := os.ReadDir(src) AssertNil(t, err) - for _, fi := range fis { + for _, entry := range fis { + fi, err := entry.Info() + AssertNil(t, err) if fi.Mode().IsRegular() { srcFile, err := os.Open(filepath.Join(filepath.Clean(src), fi.Name())) AssertNil(t, err) @@ -755,7 +816,7 @@ func tarFileContents(t *testing.T, tarfile, path string) (exist bool, contents s AssertNil(t, err) if header.Name == path { - buf, err := ioutil.ReadAll(tr) + buf, err := io.ReadAll(tr) AssertNil(t, err) return true, string(buf) } @@ -795,12 +856,14 @@ func tarHasFile(t *testing.T, tarFile, path string) (exist bool) { return false } -func AssertBuildpacksHaveDescriptors(t *testing.T, bps []buildpack.Buildpack, descriptors []dist.BuildpackDescriptor) { - AssertEq(t, len(bps), len(descriptors)) - for _, bp := range bps { +func AssertBuildpacksHaveDescriptors(t *testing.T, modules []buildpack.BuildModule, descriptors []dist.BuildpackDescriptor) { + AssertEq(t, len(modules), len(descriptors)) + for _, mod := range modules { found := false + modDesc, ok := mod.Descriptor().(*dist.BuildpackDescriptor) + AssertEq(t, ok, true) for _, descriptor := range descriptors { - if diff := cmp.Diff(bp.Descriptor(), descriptor); diff == "" { + if diff := cmp.Diff(*modDesc, descriptor); diff == "" { found = true break } @@ -825,6 +888,13 @@ func AssertGitHeadEq(t *testing.T, path1, path2 string) { AssertEq(t, h1.Hash().String(), h2.Hash().String()) } +func AssertBlobsLen(t *testing.T, path string, expected int) { + t.Helper() + fis, err := os.ReadDir(filepath.Join(path, "blobs", "sha256")) + AssertNil(t, err) + AssertEq(t, len(fis), expected) +} + func MockWriterAndOutput() (*color.Console, func() string) { r, w, _ := os.Pipe() console := color.NewConsole(w) @@ -836,3 +906,38 @@ func MockWriterAndOutput() (*color.Console, func() string) { return b.String() } } + +func LayerFileName(bp buildpack.BuildModule) string { + return fmt.Sprintf("%s.%s.tar", bp.Descriptor().Info().ID, bp.Descriptor().Info().Version) +} + +type FakeAddedLayerImage struct { + *fakes.Image + addedLayersOrder []string +} + +func (f *FakeAddedLayerImage) AddedLayersOrder() []string { + return f.addedLayersOrder +} + +func (f *FakeAddedLayerImage) AddLayerWithDiffID(path, diffID string) error { + f.addedLayersOrder = append(f.addedLayersOrder, path) + return f.Image.AddLayerWithDiffID(path, diffID) +} + +type FakeWithUnderlyingImage struct { + *fakes.Image + underlyingImage v1.Image +} + +func (t *FakeWithUnderlyingImage) UnderlyingImage() v1.Image { + return t.underlyingImage +} + +func NewFakeWithUnderlyingV1Image(repoName string, identifier imgutil.Identifier, underlyingImage v1.Image) *FakeWithUnderlyingImage { + fakeCNBImage := fakes.NewImage(repoName, "", identifier) + return &FakeWithUnderlyingImage{ + Image: fakeCNBImage, + underlyingImage: underlyingImage, + } +} diff --git a/tools/go.mod b/tools/go.mod index de759c54b5..44c6b9dc69 100644 --- a/tools/go.mod +++ b/tools/go.mod @@ -1,11 +1,207 @@ module github.com/buildpacks/pack/tools -go 1.17 +go 1.25.0 + +toolchain go1.25.5 require ( github.com/golang/mock v1.6.0 - github.com/golangci/golangci-lint v1.43.0 - golang.org/x/tools v0.1.7 + github.com/golangci/golangci-lint/v2 v2.0.2 + golang.org/x/tools v0.31.0 ) -require github.com/stretchr/objx v0.2.0 // indirect +require ( + 4d63.com/gocheckcompilerdirectives v1.3.0 // indirect + 4d63.com/gochecknoglobals v0.2.2 // indirect + github.com/4meepo/tagalign v1.4.2 // indirect + github.com/Abirdcfly/dupword v0.1.3 // indirect + github.com/Antonboom/errname v1.1.0 // indirect + github.com/Antonboom/nilnil v1.1.0 // indirect + github.com/Antonboom/testifylint v1.6.0 // indirect + github.com/BurntSushi/toml v1.5.0 // indirect + github.com/Crocmagnon/fatcontext v0.7.1 // indirect + github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 // indirect + github.com/GaijinEntertainment/go-exhaustruct/v3 v3.3.1 // indirect + github.com/Masterminds/semver/v3 v3.3.1 // indirect + github.com/OpenPeeDeeP/depguard/v2 v2.2.1 // indirect + github.com/alecthomas/go-check-sumtype v0.3.1 // indirect + github.com/alexkohler/nakedret/v2 v2.0.5 // indirect + github.com/alexkohler/prealloc v1.0.0 // indirect + github.com/alingse/asasalint v0.0.11 // indirect + github.com/alingse/nilnesserr v0.1.2 // indirect + github.com/ashanbrown/forbidigo v1.6.0 // indirect + github.com/ashanbrown/makezero v1.2.0 // indirect + github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/bkielbasa/cyclop v1.2.3 // indirect + github.com/blizzy78/varnamelen v0.8.0 // indirect + github.com/bombsimon/wsl/v4 v4.6.0 // indirect + github.com/breml/bidichk v0.3.3 // indirect + github.com/breml/errchkjson v0.4.1 // indirect + github.com/butuzov/ireturn v0.3.1 // indirect + github.com/butuzov/mirror v1.3.0 // indirect + github.com/catenacyber/perfsprint v0.9.1 // indirect + github.com/ccojocar/zxcvbn-go v1.0.2 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/charithe/durationcheck v0.0.10 // indirect + github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect + github.com/charmbracelet/lipgloss v1.1.0 // indirect + github.com/charmbracelet/x/ansi v0.8.0 // indirect + github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect + github.com/charmbracelet/x/term v0.2.1 // indirect + github.com/chavacava/garif v0.1.0 // indirect + github.com/ckaznocha/intrange v0.3.1 // indirect + github.com/curioswitch/go-reassign v0.3.0 // indirect + github.com/daixiang0/gci v0.13.6 // indirect + github.com/dave/dst v0.27.3 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/denis-tingaikin/go-header v0.5.0 // indirect + github.com/ettle/strcase v0.2.0 // indirect + github.com/fatih/color v1.18.0 // indirect + github.com/fatih/structtag v1.2.0 // indirect + github.com/firefart/nonamedreturns v1.0.5 // indirect + github.com/fsnotify/fsnotify v1.5.4 // indirect + github.com/fzipp/gocyclo v0.6.0 // indirect + github.com/ghostiam/protogetter v0.3.12 // indirect + github.com/go-critic/go-critic v0.13.0 // indirect + github.com/go-toolsmith/astcast v1.1.0 // indirect + github.com/go-toolsmith/astcopy v1.1.0 // indirect + github.com/go-toolsmith/astequal v1.2.0 // indirect + github.com/go-toolsmith/astfmt v1.1.0 // indirect + github.com/go-toolsmith/astp v1.1.0 // indirect + github.com/go-toolsmith/strparse v1.1.0 // indirect + github.com/go-toolsmith/typep v1.1.0 // indirect + github.com/go-viper/mapstructure/v2 v2.4.0 // indirect + github.com/go-xmlfmt/xmlfmt v1.1.3 // indirect + github.com/gobwas/glob v0.2.3 // indirect + github.com/gofrs/flock v0.12.1 // indirect + github.com/golang/protobuf v1.5.3 // indirect + github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32 // indirect + github.com/golangci/go-printf-func-name v0.1.0 // indirect + github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d // indirect + github.com/golangci/golines v0.0.0-20250217134842-442fd0091d95 // indirect + github.com/golangci/misspell v0.6.0 // indirect + github.com/golangci/plugin-module-register v0.1.1 // indirect + github.com/golangci/revgrep v0.8.0 // indirect + github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed // indirect + github.com/google/go-cmp v0.7.0 // indirect + github.com/gordonklaus/ineffassign v0.1.0 // indirect + github.com/gostaticanalysis/analysisutil v0.7.1 // indirect + github.com/gostaticanalysis/comment v1.5.0 // indirect + github.com/gostaticanalysis/forcetypeassert v0.2.0 // indirect + github.com/gostaticanalysis/nilerr v0.1.1 // indirect + github.com/hashicorp/go-immutable-radix/v2 v2.1.0 // indirect + github.com/hashicorp/go-version v1.7.0 // indirect + github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect + github.com/hashicorp/hcl v1.0.0 // indirect + github.com/hexops/gotextdiff v1.0.3 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/jgautheron/goconst v1.7.1 // indirect + github.com/jingyugao/rowserrcheck v1.1.1 // indirect + github.com/jjti/go-spancheck v0.6.4 // indirect + github.com/julz/importas v0.2.0 // indirect + github.com/karamaru-alpha/copyloopvar v1.2.1 // indirect + github.com/kisielk/errcheck v1.9.0 // indirect + github.com/kkHAIKE/contextcheck v1.1.6 // indirect + github.com/kulti/thelper v0.6.3 // indirect + github.com/kunwardeep/paralleltest v1.0.10 // indirect + github.com/lasiar/canonicalheader v1.1.2 // indirect + github.com/ldez/exptostd v0.4.2 // indirect + github.com/ldez/gomoddirectives v0.6.1 // indirect + github.com/ldez/grignotin v0.9.0 // indirect + github.com/ldez/tagliatelle v0.7.1 // indirect + github.com/ldez/usetesting v0.4.2 // indirect + github.com/leonklingele/grouper v1.1.2 // indirect + github.com/lucasb-eyer/go-colorful v1.2.0 // indirect + github.com/macabu/inamedparam v0.2.0 // indirect + github.com/magiconair/properties v1.8.6 // indirect + github.com/maratori/testableexamples v1.0.0 // indirect + github.com/maratori/testpackage v1.1.1 // indirect + github.com/matoous/godox v1.1.0 // indirect + github.com/mattn/go-colorable v0.1.14 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-runewidth v0.0.16 // indirect + github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect + github.com/mgechev/revive v1.7.0 // indirect + github.com/mitchellh/go-homedir v1.1.0 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/moricho/tparallel v0.3.2 // indirect + github.com/muesli/termenv v0.16.0 // indirect + github.com/nakabonne/nestif v0.3.1 // indirect + github.com/nishanths/exhaustive v0.12.0 // indirect + github.com/nishanths/predeclared v0.2.2 // indirect + github.com/nunnatsa/ginkgolinter v0.19.1 // indirect + github.com/olekukonko/tablewriter v0.0.5 // indirect + github.com/pelletier/go-toml v1.9.5 // indirect + github.com/pelletier/go-toml/v2 v2.2.3 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/polyfloyd/go-errorlint v1.7.1 // indirect + github.com/prometheus/client_golang v1.12.1 // indirect + github.com/prometheus/client_model v0.2.0 // indirect + github.com/prometheus/common v0.32.1 // indirect + github.com/prometheus/procfs v0.7.3 // indirect + github.com/quasilyte/go-ruleguard v0.4.4 // indirect + github.com/quasilyte/go-ruleguard/dsl v0.3.22 // indirect + github.com/quasilyte/gogrep v0.5.0 // indirect + github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 // indirect + github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 // indirect + github.com/raeperd/recvcheck v0.2.0 // indirect + github.com/rivo/uniseg v0.4.7 // indirect + github.com/rogpeppe/go-internal v1.14.1 // indirect + github.com/ryancurrah/gomodguard v1.4.1 // indirect + github.com/ryanrolds/sqlclosecheck v0.5.1 // indirect + github.com/sanposhiho/wastedassign/v2 v2.1.0 // indirect + github.com/santhosh-tekuri/jsonschema/v6 v6.0.1 // indirect + github.com/sashamelentyev/interfacebloat v1.1.0 // indirect + github.com/sashamelentyev/usestdlibvars v1.28.0 // indirect + github.com/securego/gosec/v2 v2.22.2 // indirect + github.com/sirupsen/logrus v1.9.3 // indirect + github.com/sivchari/containedctx v1.0.3 // indirect + github.com/sonatard/noctx v0.1.0 // indirect + github.com/sourcegraph/go-diff v0.7.0 // indirect + github.com/spf13/afero v1.12.0 // indirect + github.com/spf13/cast v1.5.0 // indirect + github.com/spf13/cobra v1.9.1 // indirect + github.com/spf13/jwalterweatherman v1.1.0 // indirect + github.com/spf13/pflag v1.0.6 // indirect + github.com/spf13/viper v1.12.0 // indirect + github.com/ssgreg/nlreturn/v2 v2.2.1 // indirect + github.com/stbenjam/no-sprintf-host-port v0.2.0 // indirect + github.com/stretchr/objx v0.5.2 // indirect + github.com/stretchr/testify v1.10.0 // indirect + github.com/subosito/gotenv v1.4.1 // indirect + github.com/tdakkota/asciicheck v0.4.1 // indirect + github.com/tetafro/godot v1.5.0 // indirect + github.com/timakin/bodyclose v0.0.0-20241222091800-1db5c5ca4d67 // indirect + github.com/timonwong/loggercheck v0.10.1 // indirect + github.com/tomarrell/wrapcheck/v2 v2.10.0 // indirect + github.com/tommy-muehle/go-mnd/v2 v2.5.1 // indirect + github.com/ultraware/funlen v0.2.0 // indirect + github.com/ultraware/whitespace v0.2.0 // indirect + github.com/uudashr/gocognit v1.2.0 // indirect + github.com/uudashr/iface v1.3.1 // indirect + github.com/xen0n/gosmopolitan v1.3.0 // indirect + github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect + github.com/yagipy/maintidx v1.0.0 // indirect + github.com/yeya24/promlinter v0.3.0 // indirect + github.com/ykadowak/zerologlint v0.1.5 // indirect + gitlab.com/bosi/decorder v0.4.2 // indirect + go-simpler.org/musttag v0.13.0 // indirect + go-simpler.org/sloglint v0.9.0 // indirect + go.uber.org/atomic v1.7.0 // indirect + go.uber.org/automaxprocs v1.6.0 // indirect + go.uber.org/multierr v1.6.0 // indirect + go.uber.org/zap v1.24.0 // indirect + golang.org/x/exp/typeparams v0.0.0-20250210185358-939b2ce775ac // indirect + golang.org/x/mod v0.24.0 // indirect + golang.org/x/sync v0.12.0 // indirect + golang.org/x/sys v0.31.0 // indirect + golang.org/x/text v0.23.0 // indirect + google.golang.org/protobuf v1.36.5 // indirect + gopkg.in/ini.v1 v1.67.0 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect + honnef.co/go/tools v0.6.1 // indirect + mvdan.cc/gofumpt v0.7.0 // indirect + mvdan.cc/unparam v0.0.0-20250301125049-0df0534333a4 // indirect +) diff --git a/tools/go.sum b/tools/go.sum index 47d61c1436..b018109bd2 100644 --- a/tools/go.sum +++ b/tools/go.sum @@ -1,6 +1,7 @@ -4d63.com/gochecknoglobals v0.1.0 h1:zeZSRqj5yCg28tCkIV/z/lWbwvNm5qnKVS15PI8nhD0= -4d63.com/gochecknoglobals v0.1.0/go.mod h1:wfdC5ZjKSPr7CybKEcgJhUOgeAQW1+7WcyK8OvUilfo= -bitbucket.org/creachadair/shell v0.0.6/go.mod h1:8Qqi/cYk7vPnsOePHroKXDJYmb5x7ENhtiFtfZq8K+M= +4d63.com/gocheckcompilerdirectives v1.3.0 h1:Ew5y5CtcAAQeTVKUVFrE7EwHMrTO6BggtEj8BZSjZ3A= +4d63.com/gocheckcompilerdirectives v1.3.0/go.mod h1:ofsJ4zx2QAuIP/NO/NAh1ig6R1Fb18/GI7RVMwz7kAY= +4d63.com/gochecknoglobals v0.2.2 h1:H1vdnwnMaZdQW/N+NrkT1SZMTBmcwHe9Vq8lJcYYTtU= +4d63.com/gochecknoglobals v0.2.2/go.mod h1:lLxwTQjL5eIesRbvnzIP3jZtG140FnTdz+AlMa+ogt0= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= @@ -14,19 +15,8 @@ cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6 cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= -cloud.google.com/go v0.60.0/go.mod h1:yw2G51M9IfRboUH61Us8GqCeF1PzPblB823Mn2q2eAU= cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= -cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= -cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= -cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= -cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= -cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= -cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= -cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= -cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= -cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= -cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= @@ -35,200 +25,198 @@ cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4g cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= -cloud.google.com/go/firestore v1.6.0/go.mod h1:afJwI0vaXwAG54kI7A//lP/lSPDkQORQuMkv56TxEPU= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/pubsub v1.5.0/go.mod h1:ZEwJccE3z93Z2HWvstpri00jOg7oO4UZDtKhwDwqF0w= -cloud.google.com/go/spanner v1.7.0/go.mod h1:sd3K2gZ9Fd0vMPLXzeCrF6fq4i63Q7aTLW/lBIfBkIk= cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -contrib.go.opencensus.io/exporter/stackdriver v0.13.4/go.mod h1:aXENhDJ1Y4lIg4EUaVTwzvYETVNZk10Pu26tevFKLUc= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/Antonboom/errname v0.1.5 h1:IM+A/gz0pDhKmlt5KSNTVAvfLMb+65RxavBXpRtCUEg= -github.com/Antonboom/errname v0.1.5/go.mod h1:DugbBstvPFQbv/5uLcRRzfrNqKE9tVdVCqWCLp6Cifo= -github.com/Antonboom/nilnil v0.1.0 h1:DLDavmg0a6G/F4Lt9t7Enrbgb3Oph6LnDE6YVsmTt74= -github.com/Antonboom/nilnil v0.1.0/go.mod h1:PhHLvRPSghY5Y7mX4TW+BHZQYo1A8flE5H20D3IPZBo= +github.com/4meepo/tagalign v1.4.2 h1:0hcLHPGMjDyM1gHG58cS73aQF8J4TdVR96TZViorO9E= +github.com/4meepo/tagalign v1.4.2/go.mod h1:+p4aMyFM+ra7nb41CnFG6aSDXqRxU/w1VQqScKqDARI= +github.com/Abirdcfly/dupword v0.1.3 h1:9Pa1NuAsZvpFPi9Pqkd93I7LIYRURj+A//dFd5tgBeE= +github.com/Abirdcfly/dupword v0.1.3/go.mod h1:8VbB2t7e10KRNdwTVoxdBaxla6avbhGzb8sCTygUMhw= +github.com/Antonboom/errname v1.1.0 h1:A+ucvdpMwlo/myWrkHEUEBWc/xuXdud23S8tmTb/oAE= +github.com/Antonboom/errname v1.1.0/go.mod h1:O1NMrzgUcVBGIfi3xlVuvX8Q/VP/73sseCaAppfjqZw= +github.com/Antonboom/nilnil v1.1.0 h1:jGxJxjgYS3VUUtOTNk8Z1icwT5ESpLH/426fjmQG+ng= +github.com/Antonboom/nilnil v1.1.0/go.mod h1:b7sAlogQjFa1wV8jUW3o4PMzDVFLbTux+xnQdvzdcIE= +github.com/Antonboom/testifylint v1.6.0 h1:6rdILVPt4+rqcvhid8w9wJNynKLUgqHNpFyM67UeXyc= +github.com/Antonboom/testifylint v1.6.0/go.mod h1:k+nEkathI2NFjKO6HvwmSrbzUcQ6FAnbZV+ZRrnXPLI= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/toml v0.4.1 h1:GaI7EiDXDRfa8VshkTj7Fym7ha+y8/XxIgD2okUIjLw= -github.com/BurntSushi/toml v0.4.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= +github.com/BurntSushi/toml v1.5.0 h1:W5quZX/G/csjUnuI8SUYlsHs9M38FC7znL0lIO+DvMg= +github.com/BurntSushi/toml v1.5.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/Crocmagnon/fatcontext v0.7.1 h1:SC/VIbRRZQeQWj/TcQBS6JmrXcfA+BU4OGSVUt54PjM= +github.com/Crocmagnon/fatcontext v0.7.1/go.mod h1:1wMvv3NXEBJucFGfwOJBxSVWcoIO6emV215SMkW9MFU= github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 h1:sHglBQTwgx+rWPdisA5ynNEsoARbiCBOyGcJM4/OzsM= github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs= -github.com/Masterminds/goutils v1.1.0/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= -github.com/Masterminds/semver v1.4.2/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= -github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= -github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= -github.com/Masterminds/sprig v2.15.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= -github.com/Masterminds/sprig v2.22.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= -github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/OpenPeeDeeP/depguard v1.0.1 h1:VlW4R6jmBIv3/u1JNlawEvJMM4J+dPORPaZasQee8Us= -github.com/OpenPeeDeeP/depguard v1.0.1/go.mod h1:xsIw86fROiiwelg+jB2uM9PiKihMMmUx/1V+TNhjQvM= -github.com/StackExchange/wmi v1.2.1/go.mod h1:rcmrprowKIVzvc+NUiLncP2uuArMWLCbu9SBzvHz7e8= +github.com/GaijinEntertainment/go-exhaustruct/v3 v3.3.1 h1:Sz1JIXEcSfhz7fUi7xHnhpIE0thVASYjvosApmHuD2k= +github.com/GaijinEntertainment/go-exhaustruct/v3 v3.3.1/go.mod h1:n/LSCXNuIYqVfBlVXyHfMQkZDdp1/mmxfSjADd3z1Zg= +github.com/Masterminds/semver/v3 v3.3.1 h1:QtNSWtVZ3nBfk8mAOu/B6v7FMJ+NHTIgUPi7rj+4nv4= +github.com/Masterminds/semver/v3 v3.3.1/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= +github.com/OpenPeeDeeP/depguard/v2 v2.2.1 h1:vckeWVESWp6Qog7UZSARNqfu/cZqvki8zsuj3piCMx4= +github.com/OpenPeeDeeP/depguard/v2 v2.2.1/go.mod h1:q4DKzC4UcVaAvcfd41CZh0PWpGgzrVxUYBlgKNGquUo= +github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8vS6K3D0= +github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k= +github.com/alecthomas/go-check-sumtype v0.3.1 h1:u9aUvbGINJxLVXiFvHUlPEaD7VDULsrxJb4Aq31NLkU= +github.com/alecthomas/go-check-sumtype v0.3.1/go.mod h1:A8TSiN3UPRw3laIgWEUOHHLPa6/r9MtoigdlP5h3K/E= +github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc= +github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= +github.com/alexkohler/nakedret/v2 v2.0.5 h1:fP5qLgtwbx9EJE8dGEERT02YwS8En4r9nnZ71RK+EVU= +github.com/alexkohler/nakedret/v2 v2.0.5/go.mod h1:bF5i0zF2Wo2o4X4USt9ntUWve6JbFv02Ff4vlkmS/VU= github.com/alexkohler/prealloc v1.0.0 h1:Hbq0/3fJPQhNkN0dR95AVrr6R7tou91y0uHG5pOcUuw= github.com/alexkohler/prealloc v1.0.0/go.mod h1:VetnK3dIgFBBKmg0YnD9F9x6Icjd+9cvfHR56wJVlKE= -github.com/andybalholm/brotli v1.0.2/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= -github.com/andybalholm/brotli v1.0.3/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= -github.com/antihax/optional v0.0.0-20180407024304-ca021399b1a6/go.mod h1:V8iCPQYkqmusNa815XgQio277wI47sdRh1dUOLdyC6Q= -github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= -github.com/aokoli/goutils v1.0.1/go.mod h1:SijmP0QR8LtwsmDs8Yii5Z/S4trXFGFC2oO5g9DP+DQ= -github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= -github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= -github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= -github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= -github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= -github.com/ashanbrown/forbidigo v1.2.0 h1:RMlEFupPCxQ1IogYOQUnIQwGEUGK8g5vAPMRyJoSxbc= -github.com/ashanbrown/forbidigo v1.2.0/go.mod h1:vVW7PEdqEFqapJe95xHkTfB1+XvZXBFg8t0sG2FIxmI= -github.com/ashanbrown/makezero v0.0.0-20210520155254-b6261585ddde h1:YOsoVXsZQPA9aOTy1g0lAJv5VzZUvwQuZqug8XPeqfM= -github.com/ashanbrown/makezero v0.0.0-20210520155254-b6261585ddde/go.mod h1:oG9Dnez7/ESBqc4EdrdNlryeo7d0KcW1ftXHm7nU/UU= -github.com/aws/aws-sdk-go v1.23.20/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= -github.com/aws/aws-sdk-go v1.25.37/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= -github.com/aws/aws-sdk-go v1.36.30/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro= +github.com/alingse/asasalint v0.0.11 h1:SFwnQXJ49Kx/1GghOFz1XGqHYKp21Kq1nHad/0WQRnw= +github.com/alingse/asasalint v0.0.11/go.mod h1:nCaoMhw7a9kSJObvQyVzNTPBDbNpdocqrSP7t/cW5+I= +github.com/alingse/nilnesserr v0.1.2 h1:Yf8Iwm3z2hUUrP4muWfW83DF4nE3r1xZ26fGWUKCZlo= +github.com/alingse/nilnesserr v0.1.2/go.mod h1:1xJPrXonEtX7wyTq8Dytns5P2hNzoWymVUIaKm4HNFg= +github.com/ashanbrown/forbidigo v1.6.0 h1:D3aewfM37Yb3pxHujIPSpTf6oQk9sc9WZi8gerOIVIY= +github.com/ashanbrown/forbidigo v1.6.0/go.mod h1:Y8j9jy9ZYAEHXdu723cUlraTqbzjKF1MUyfOKL+AjcU= +github.com/ashanbrown/makezero v1.2.0 h1:/2Lp1bypdmK9wDIq7uWBlDF1iMUpIIS4A+pF6C9IEUU= +github.com/ashanbrown/makezero v1.2.0/go.mod h1:dxlPhHbDMC6N6xICzFBSK+4njQDdK8euNO0qjQMtGY4= +github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= +github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8= +github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= +github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= -github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= -github.com/bkielbasa/cyclop v1.2.0 h1:7Jmnh0yL2DjKfw28p86YTd/B4lRGcNuu12sKE35sM7A= -github.com/bkielbasa/cyclop v1.2.0/go.mod h1:qOI0yy6A7dYC4Zgsa72Ppm9kONl0RoIlPbzot9mhmeI= -github.com/blizzy78/varnamelen v0.3.0 h1:80mYO7Y5ppeEefg1Jzu+NBg16iwToOQVnDnNIoWSShs= -github.com/blizzy78/varnamelen v0.3.0/go.mod h1:hbwRdBvoBqxk34XyQ6HA0UH3G0/1TKuv5AC4eaBT0Ec= -github.com/bombsimon/wsl/v3 v3.3.0 h1:Mka/+kRLoQJq7g2rggtgQsjuI/K5Efd87WX96EWFxjM= -github.com/bombsimon/wsl/v3 v3.3.0/go.mod h1:st10JtZYLE4D5sC7b8xV4zTKZwAQjCH/Hy2Pm1FNZIc= -github.com/breml/bidichk v0.1.1 h1:Qpy8Rmgos9qdJxhka0K7ADEE5bQZX9PQUthkgggHpFM= -github.com/breml/bidichk v0.1.1/go.mod h1:zbfeitpevDUGI7V91Uzzuwrn4Vls8MoBMrwtt78jmso= -github.com/butuzov/ireturn v0.1.1 h1:QvrO2QF2+/Cx1WA/vETCIYBKtRjc30vesdoPUNo1EbY= -github.com/butuzov/ireturn v0.1.1/go.mod h1:Wh6Zl3IMtTpaIKbmwzqi6olnM9ptYQxxVacMsOEFPoc= +github.com/bkielbasa/cyclop v1.2.3 h1:faIVMIGDIANuGPWH031CZJTi2ymOQBULs9H21HSMa5w= +github.com/bkielbasa/cyclop v1.2.3/go.mod h1:kHTwA9Q0uZqOADdupvcFJQtp/ksSnytRMe8ztxG8Fuo= +github.com/blizzy78/varnamelen v0.8.0 h1:oqSblyuQvFsW1hbBHh1zfwrKe3kcSj0rnXkKzsQ089M= +github.com/blizzy78/varnamelen v0.8.0/go.mod h1:V9TzQZ4fLJ1DSrjVDfl89H7aMnTvKkApdHeyESmyR7k= +github.com/bombsimon/wsl/v4 v4.6.0 h1:ew2R/N42su553DKTYqt3HSxaQN+uHQPv4xZ2MBmwaW4= +github.com/bombsimon/wsl/v4 v4.6.0/go.mod h1:uV/+6BkffuzSAVYD+yGyld1AChO7/EuLrCF/8xTiapg= +github.com/breml/bidichk v0.3.3 h1:WSM67ztRusf1sMoqH6/c4OBCUlRVTKq+CbSeo0R17sE= +github.com/breml/bidichk v0.3.3/go.mod h1:ISbsut8OnjB367j5NseXEGGgO/th206dVa427kR8YTE= +github.com/breml/errchkjson v0.4.1 h1:keFSS8D7A2T0haP9kzZTi7o26r7kE3vymjZNeNDRDwg= +github.com/breml/errchkjson v0.4.1/go.mod h1:a23OvR6Qvcl7DG/Z4o0el6BRAjKnaReoPQFciAl9U3s= +github.com/butuzov/ireturn v0.3.1 h1:mFgbEI6m+9W8oP/oDdfA34dLisRFCj2G6o/yiI1yZrY= +github.com/butuzov/ireturn v0.3.1/go.mod h1:ZfRp+E7eJLC0NQmk1Nrm1LOrn/gQlOykv+cVPdiXH5M= +github.com/butuzov/mirror v1.3.0 h1:HdWCXzmwlQHdVhwvsfBb2Au0r3HyINry3bDWLYXiKoc= +github.com/butuzov/mirror v1.3.0/go.mod h1:AEij0Z8YMALaq4yQj9CPPVYOyJQyiexpQEQgihajRfI= +github.com/catenacyber/perfsprint v0.9.1 h1:5LlTp4RwTooQjJCvGEFV6XksZvWE7wCOUvjD2z0vls0= +github.com/catenacyber/perfsprint v0.9.1/go.mod h1:q//VWC2fWbcdSLEY1R3l8n0zQCDPdE4IjZwyY1HMunM= +github.com/ccojocar/zxcvbn-go v1.0.2 h1:na/czXU8RrhXO4EZme6eQJLR4PzcGsahsBOAwU6I3Vg= +github.com/ccojocar/zxcvbn-go v1.0.2/go.mod h1:g1qkXtUSvHP8lhHp5GrSmTz6uWALGRMQdw6Qnz/hi60= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= -github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= -github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/charithe/durationcheck v0.0.9 h1:mPP4ucLrf/rKZiIG/a9IPXHGlh8p4CzgpyTy6EEutYk= -github.com/charithe/durationcheck v0.0.9/go.mod h1:SSbRIBVfMjCi/kEB6K65XEA83D6prSM8ap1UCpNKtgg= -github.com/chavacava/garif v0.0.0-20210405164556-e8a0a408d6af h1:spmv8nSH9h5oCQf40jt/ufBCt9j0/58u4G+rkeMqXGI= -github.com/chavacava/garif v0.0.0-20210405164556-e8a0a408d6af/go.mod h1:Qjyv4H3//PWVzTeCezG2b9IRn6myJxJSr4TD/xo6ojU= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/charithe/durationcheck v0.0.10 h1:wgw73BiocdBDQPik+zcEoBG/ob8uyBHf2iyoHGPf5w4= +github.com/charithe/durationcheck v0.0.10/go.mod h1:bCWXb7gYRysD1CU3C+u4ceO49LoGOY1C1L6uouGNreQ= +github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc h1:4pZI35227imm7yK2bGPcfpFEmuY1gc2YSTShr4iJBfs= +github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc/go.mod h1:X4/0JoqgTIPSFcRA/P6INZzIuyqdFY5rm8tb41s9okk= +github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY= +github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30= +github.com/charmbracelet/x/ansi v0.8.0 h1:9GTq3xq9caJW8ZrBTe0LIe2fvfLR/bYXKTx2llXn7xE= +github.com/charmbracelet/x/ansi v0.8.0/go.mod h1:wdYl/ONOLHLIVmQaxbIYEC/cRKOQyjTkowiI4blgS9Q= +github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd h1:vy0GVL4jeHEwG5YOXDmi86oYw2yuYUGqz6a8sLwg0X8= +github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs= +github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ= +github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg= +github.com/chavacava/garif v0.1.0 h1:2JHa3hbYf5D9dsgseMKAmc/MZ109otzgNFk5s87H9Pc= +github.com/chavacava/garif v0.1.0/go.mod h1:XMyYCkEL58DF0oyW4qDjjnPWONs2HBqYKI+UIPD+Gww= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/ckaznocha/intrange v0.3.1 h1:j1onQyXvHUsPWujDH6WIjhyH26gkRt/txNlV7LspvJs= +github.com/ckaznocha/intrange v0.3.1/go.mod h1:QVepyz1AkUoFQkpEqksSYpNpUo3c5W7nWh/s6SHIJJk= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= -github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= -github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= -github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= -github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= -github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/coreos/go-systemd v0.0.0-20190620071333-e64a0ec8b42a/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= -github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= -github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= -github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= -github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= -github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= -github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= -github.com/daixiang0/gci v0.2.9 h1:iwJvwQpBZmMg31w+QQ6jsyZ54KEATn6/nfARbBNW294= -github.com/daixiang0/gci v0.2.9/go.mod h1:+4dZ7TISfSmqfAGv59ePaHfNzgGtIkHAhhdKggP1JAc= -github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= +github.com/curioswitch/go-reassign v0.3.0 h1:dh3kpQHuADL3cobV/sSGETA8DOv457dwl+fbBAhrQPs= +github.com/curioswitch/go-reassign v0.3.0/go.mod h1:nApPCCTtqLJN/s8HfItCcKV0jIPwluBOvZP+dsJGA88= +github.com/daixiang0/gci v0.13.6 h1:RKuEOSkGpSadkGbvZ6hJ4ddItT3cVZ9Vn9Rybk6xjl8= +github.com/daixiang0/gci v0.13.6/go.mod h1:12etP2OniiIdP4q+kjUGrC/rUagga7ODbqsom5Eo5Yk= +github.com/dave/dst v0.27.3 h1:P1HPoMza3cMEquVf9kKy8yXsFirry4zEnWOdYPOoIzY= +github.com/dave/dst v0.27.3/go.mod h1:jHh6EOibnHgcUW3WjKHisiooEkYwqpHLBSX1iOBhEyc= +github.com/dave/jennifer v1.7.1 h1:B4jJJDHelWcDhlRQxWeo0Npa/pYKBLrirAQoTN45txo= +github.com/dave/jennifer v1.7.1/go.mod h1:nXbxhEmQfOZhWml3D1cDK5M1FLnMSozpbFN/m3RmGZc= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/denis-tingajkin/go-header v0.4.2 h1:jEeSF4sdv8/3cT/WY8AgDHUoItNSoEZ7qg9dX7pc218= -github.com/denis-tingajkin/go-header v0.4.2/go.mod h1:eLRHAVXzE5atsKAnNRDB90WHCFFnBUn4RN0nRcs1LJA= -github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= -github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= -github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/denis-tingaikin/go-header v0.5.0 h1:SRdnP5ZKvcO9KKRP1KJrhFR3RrlGuD+42t4429eC9k8= +github.com/denis-tingaikin/go-header v0.5.0/go.mod h1:mMenU5bWrok6Wl2UsZjy+1okegmwQ3UgWl4V1D8gjlY= +github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI= +github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= -github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= -github.com/envoyproxy/protoc-gen-validate v0.0.14/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/esimonov/ifshort v1.0.3 h1:JD6x035opqGec5fZ0TLjXeROD2p5H7oLGn8MKfy9HTM= -github.com/esimonov/ifshort v1.0.3/go.mod h1:yZqNJUrNn20K8Q9n2CrjTKYyVEmX209Hgu+M1LBpeZE= -github.com/ettle/strcase v0.1.1 h1:htFueZyVeE1XNnMEfbqp5r67qAN/4r6ya1ysq8Q+Zcw= -github.com/ettle/strcase v0.1.1/go.mod h1:hzDLsPC7/lwKyBOywSHEP89nt2pDgdy+No1NBA9o9VY= -github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= -github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= -github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= -github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/ettle/strcase v0.2.0 h1:fGNiVF21fHXpX1niBgk0aROov1LagYsOwV/xqKDKR/Q= +github.com/ettle/strcase v0.2.0/go.mod h1:DajmHElDSaX76ITe3/VHVyMin4LWSJN5Z909Wp+ED1A= +github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= +github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4= github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94= -github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= -github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/fsnotify/fsnotify v1.5.1 h1:mZcQUHVQUQWoPXXtuf9yuEXKudkV2sx1E06UadKWpgI= -github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= -github.com/fullstorydev/grpcurl v1.6.0/go.mod h1:ZQ+ayqbKMJNhzLmbpCiurTVlaK2M/3nqZCxaQ2Ze/sM= -github.com/fzipp/gocyclo v0.3.1 h1:A9UeX3HJSXTBzvHzhqoYVuE0eAhe+aM8XBCCwsPMZOc= -github.com/fzipp/gocyclo v0.3.1/go.mod h1:DJHO6AUmbdqj2ET4Z9iArSuwWgYDRryYt2wASxc7x3E= -github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/go-critic/go-critic v0.6.1 h1:lS8B9LH/VVsvQQP7Ao5TJyQqteVKVs3E4dXiHMyubtI= -github.com/go-critic/go-critic v0.6.1/go.mod h1:SdNCfU0yF3UBjtaZGw6586/WocupMOJuiqgom5DsQxM= +github.com/firefart/nonamedreturns v1.0.5 h1:tM+Me2ZaXs8tfdDw3X6DOX++wMCOqzYUho6tUTYIdRA= +github.com/firefart/nonamedreturns v1.0.5/go.mod h1:gHJjDqhGM4WyPt639SOZs+G89Ko7QKH5R5BhnO6xJhw= +github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= +github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps= +github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwVZI= +github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU= +github.com/fzipp/gocyclo v0.6.0 h1:lsblElZG7d3ALtGMx9fmxeTKZaLLpU8mET09yN4BBLo= +github.com/fzipp/gocyclo v0.6.0/go.mod h1:rXPyn8fnlpa0R2csP/31uerbiVBugk5whMdlyaLkLoA= +github.com/ghostiam/protogetter v0.3.12 h1:xTPjH97iKph27vXRRKV0OCke5sAMoHPbVeVstdzmCLE= +github.com/ghostiam/protogetter v0.3.12/go.mod h1:WZ0nw9pfzsgxuRsPOFQomgDVSWtDLJRfQJEhsGbmQMA= +github.com/go-critic/go-critic v0.13.0 h1:kJzM7wzltQasSUXtYyTl6UaPVySO6GkaR1thFnJ6afY= +github.com/go-critic/go-critic v0.13.0/go.mod h1:M/YeuJ3vOCQDnP2SU+ZhjgRzwzcBW87JqLpMJLrZDLI= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= -github.com/go-ole/go-ole v1.2.5/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= -github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= -github.com/go-redis/redis v6.15.8+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= -github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= -github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-quicktest/qt v1.101.0 h1:O1K29Txy5P2OK0dGo59b7b0LR6wKfIhttaAhHUyn7eI= +github.com/go-quicktest/qt v1.101.0/go.mod h1:14Bz/f7NwaXPtdYEgzsx46kqSxVwTbzVZsDC26tQJow= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= -github.com/go-toolsmith/astcast v1.0.0 h1:JojxlmI6STnFVG9yOImLeGREv8W2ocNUM+iOhR6jE7g= -github.com/go-toolsmith/astcast v1.0.0/go.mod h1:mt2OdQTeAQcY4DQgPSArJjHCcOwlX+Wl/kwN+LbLGQ4= -github.com/go-toolsmith/astcopy v1.0.0 h1:OMgl1b1MEpjFQ1m5ztEO06rz5CUd3oBv9RF7+DyvdG8= -github.com/go-toolsmith/astcopy v1.0.0/go.mod h1:vrgyG+5Bxrnz4MZWPF+pI4R8h3qKRjjyvV/DSez4WVQ= -github.com/go-toolsmith/astequal v1.0.0/go.mod h1:H+xSiq0+LtiDC11+h1G32h7Of5O3CYFJ99GVbS5lDKY= -github.com/go-toolsmith/astequal v1.0.1 h1:JbSszi42Jiqu36Gnf363HWS9MTEAz67vTQLponh3Moc= -github.com/go-toolsmith/astequal v1.0.1/go.mod h1:4oGA3EZXTVItV/ipGiOx7NWkY5veFfcsOJVS2YxltLw= -github.com/go-toolsmith/astfmt v1.0.0 h1:A0vDDXt+vsvLEdbMFJAUBI/uTbRw1ffOPnxsILnFL6k= -github.com/go-toolsmith/astfmt v1.0.0/go.mod h1:cnWmsOAuq4jJY6Ct5YWlVLmcmLMn1JUPuQIHCY7CJDw= -github.com/go-toolsmith/astinfo v0.0.0-20180906194353-9809ff7efb21/go.mod h1:dDStQCHtmZpYOmjRP/8gHHnCCch3Zz3oEgCdZVdtweU= -github.com/go-toolsmith/astp v1.0.0 h1:alXE75TXgcmupDsMK1fRAy0YUzLzqPVvBKoyWV+KPXg= -github.com/go-toolsmith/astp v1.0.0/go.mod h1:RSyrtpVlfTFGDYRbrjyWP1pYu//tSFcvdYrA8meBmLI= -github.com/go-toolsmith/pkgload v1.0.0 h1:4DFWWMXVfbcN5So1sBNW9+yeiMqLFGl1wFLTL5R0Tgg= -github.com/go-toolsmith/pkgload v1.0.0/go.mod h1:5eFArkbO80v7Z0kdngIxsRXRMTaX4Ilcwuh3clNrQJc= -github.com/go-toolsmith/strparse v1.0.0 h1:Vcw78DnpCAKlM20kSbAyO4mPfJn/lyYA4BJUDxe2Jb4= +github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI= +github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8= +github.com/go-toolsmith/astcast v1.1.0 h1:+JN9xZV1A+Re+95pgnMgDboWNVnIMMQXwfBwLRPgSC8= +github.com/go-toolsmith/astcast v1.1.0/go.mod h1:qdcuFWeGGS2xX5bLM/c3U9lewg7+Zu4mr+xPwZIB4ZU= +github.com/go-toolsmith/astcopy v1.1.0 h1:YGwBN0WM+ekI/6SS6+52zLDEf8Yvp3n2seZITCUBt5s= +github.com/go-toolsmith/astcopy v1.1.0/go.mod h1:hXM6gan18VA1T/daUEHCFcYiW8Ai1tIwIzHY6srfEAw= +github.com/go-toolsmith/astequal v1.0.3/go.mod h1:9Ai4UglvtR+4up+bAD4+hCj7iTo4m/OXVTSLnCyTAx4= +github.com/go-toolsmith/astequal v1.1.0/go.mod h1:sedf7VIdCL22LD8qIvv7Nn9MuWJruQA/ysswh64lffQ= +github.com/go-toolsmith/astequal v1.2.0 h1:3Fs3CYZ1k9Vo4FzFhwwewC3CHISHDnVUPC4x0bI2+Cw= +github.com/go-toolsmith/astequal v1.2.0/go.mod h1:c8NZ3+kSFtFY/8lPso4v8LuJjdJiUFVnSuU3s0qrrDY= +github.com/go-toolsmith/astfmt v1.1.0 h1:iJVPDPp6/7AaeLJEruMsBUlOYCmvg0MoCfJprsOmcco= +github.com/go-toolsmith/astfmt v1.1.0/go.mod h1:OrcLlRwu0CuiIBp/8b5PYF9ktGVZUjlNMV634mhwuQ4= +github.com/go-toolsmith/astp v1.1.0 h1:dXPuCl6u2llURjdPLLDxJeZInAeZ0/eZwFJmqZMnpQA= +github.com/go-toolsmith/astp v1.1.0/go.mod h1:0T1xFGz9hicKs8Z5MfAqSUitoUYS30pDMsRVIDHs8CA= +github.com/go-toolsmith/pkgload v1.2.2 h1:0CtmHq/02QhxcF7E9N5LIFcYFsMR5rdovfqTtRKkgIk= +github.com/go-toolsmith/pkgload v1.2.2/go.mod h1:R2hxLNRKuAsiXCo2i5J6ZQPhnPMOVtU+f0arbFPWCus= github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8= -github.com/go-toolsmith/typep v1.0.0/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU= -github.com/go-toolsmith/typep v1.0.2 h1:8xdsa1+FSIH/RhEkgnD1j2CJOy5mNllW1Q9tRiYwvlk= -github.com/go-toolsmith/typep v1.0.2/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU= -github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b h1:khEcpUM4yFcxg4/FHQWkvVRmgijNXRfzkIDHh23ggEo= -github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM= +github.com/go-toolsmith/strparse v1.1.0 h1:GAioeZUK9TGxnLS+qfdqNbA4z0SSm5zVNtCQiyP2Bvw= +github.com/go-toolsmith/strparse v1.1.0/go.mod h1:7ksGy58fsaQkGQlY8WVoBFNyEPMGuJin1rfoPS4lBSQ= +github.com/go-toolsmith/typep v1.1.0 h1:fIRYDyF+JywLfqzyhdiHzRop/GQDxxNhLGQ6gFUNHus= +github.com/go-toolsmith/typep v1.1.0/go.mod h1:fVIw+7zjdsMxDA3ITWnH1yOiw1rnTQKCsF/sk2H/qig= +github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs= +github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= +github.com/go-xmlfmt/xmlfmt v1.1.3 h1:t8Ey3Uy7jDSEisW2K3somuMKIpzktkWptA0iFCnRUWY= +github.com/go-xmlfmt/xmlfmt v1.1.3/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM= github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= -github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= -github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= -github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= +github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E= +github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= -github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= -github.com/gogo/protobuf v1.3.0/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= -github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= -github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= @@ -239,10 +227,8 @@ github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= -github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= -github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -258,34 +244,29 @@ github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QD github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= -github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2 h1:23T5iq8rbUYlhpt5DB4XJkc6BU31uODLD1o1gKvZmD0= -github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2/go.mod h1:k9Qvh+8juN+UKMCS/3jFtGICgW8O96FVaZsaxdzDkR4= -github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a h1:w8hkcTqaFpzKqonE9uMCefW1WDie15eSP/4MssdenaM= -github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a/go.mod h1:ryS0uhF+x9jgbj/N71xsEqODy9BN81/GonCZiOzirOk= -github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613 h1:9kfjN3AdxcbsZBf8NjltjWihK2QfBBBZuv91cMFfDHw= -github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613/go.mod h1:SyvUF2NxV+sN8upjjeVYr5W7tyxaT1JVtvhKhOn2ii8= -github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a h1:iR3fYXUjHCR97qWS8ch1y9zPNsgXThGwjKPrYfqMPks= -github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a/go.mod h1:9qCChq59u/eW8im404Q2WWTrnBUQKjpNYKMbU4M7EFU= -github.com/golangci/golangci-lint v1.43.0 h1:SLwZFEmDgopqZpfP495zCtV9REUf551JJlJ51Ql7NZA= -github.com/golangci/golangci-lint v1.43.0/go.mod h1:VIFlUqidx5ggxDfQagdvd9E67UjMXtTHBkBQ7sHoC5Q= -github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0 h1:MfyDlzVjl1hoaPzPD4Gpb/QgoRfSBR0jdhwGyAWwMSA= -github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0/go.mod h1:66R6K6P6VWk9I95jvqGxkqJxVWGFy9XlDwLwVz1RCFg= -github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca h1:kNY3/svz5T29MYHubXix4aDDuE3RWHkPvopM/EDv/MA= -github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca/go.mod h1:tvlJhZqDe4LMs4ZHD0oMUlt9G2LWuDGoisJTBzLMV9o= -github.com/golangci/misspell v0.3.5 h1:pLzmVdl3VxTOncgzHcvLOKirdvcx/TydsClUQXTehjo= -github.com/golangci/misspell v0.3.5/go.mod h1:dEbvlSfYbMQDtrpRMQU675gSDLDNa8sCPPChZ7PhiVA= -github.com/golangci/revgrep v0.0.0-20210930125155-c22e5001d4f2 h1:SgM7GDZTxtTTQPU84heOxy34iG5Du7F2jcoZnvp+fXI= -github.com/golangci/revgrep v0.0.0-20210930125155-c22e5001d4f2/go.mod h1:LK+zW4MpyytAWQRz0M4xnzEk50lSvqDQKfx304apFkY= -github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4 h1:zwtduBRr5SSWhqsYNgcuWO2kFlpdOZbP0+yRjmvPGys= -github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4/go.mod h1:Izgrg8RkN3rCIMLGE9CyYmU9pY2Jer6DgANEnZ/L/cQ= +github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= +github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32 h1:WUvBfQL6EW/40l6OmeSBYQJNSif4O11+bmWEz+C7FYw= +github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32/go.mod h1:NUw9Zr2Sy7+HxzdjIULge71wI6yEg1lWQr7Evcu8K0E= +github.com/golangci/go-printf-func-name v0.1.0 h1:dVokQP+NMTO7jwO4bwsRwLWeudOVUPPyAKJuzv8pEJU= +github.com/golangci/go-printf-func-name v0.1.0/go.mod h1:wqhWFH5mUdJQhweRnldEywnR5021wTdZSNgwYceV14s= +github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d h1:viFft9sS/dxoYY0aiOTsLKO2aZQAPT4nlQCsimGcSGE= +github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d/go.mod h1:ivJ9QDg0XucIkmwhzCDsqcnxxlDStoTl89jDMIoNxKY= +github.com/golangci/golangci-lint/v2 v2.0.2 h1:dMCC8ikPiLDvHMFy3+XypSAuGDBOLzwWqqamer+bWsY= +github.com/golangci/golangci-lint/v2 v2.0.2/go.mod h1:ptNNMeGBQrbves0Qq38xvfdJg18PzxmT+7KRCOpm6i8= +github.com/golangci/golines v0.0.0-20250217134842-442fd0091d95 h1:AkK+w9FZBXlU/xUmBtSJN1+tAI4FIvy5WtnUnY8e4p8= +github.com/golangci/golines v0.0.0-20250217134842-442fd0091d95/go.mod h1:k9mmcyWKSTMcPPvQUCfRWWQ9VHJ1U9Dc0R7kaXAgtnQ= +github.com/golangci/misspell v0.6.0 h1:JCle2HUTNWirNlDIAUO44hUsKhOFqGPoC4LZxlaSXDs= +github.com/golangci/misspell v0.6.0/go.mod h1:keMNyY6R9isGaSAu+4Q8NMBwMPkh15Gtc8UCVoDtAWo= +github.com/golangci/plugin-module-register v0.1.1 h1:TCmesur25LnyJkpsVrupv1Cdzo+2f7zX0H6Jkw1Ol6c= +github.com/golangci/plugin-module-register v0.1.1/go.mod h1:TTpqoB6KkwOJMV8u7+NyXMrkwwESJLOkfl9TxR1DGFc= +github.com/golangci/revgrep v0.8.0 h1:EZBctwbVd0aMeRnNUsFogoyayvKHyxlV3CdUA46FX2s= +github.com/golangci/revgrep v0.8.0/go.mod h1:U4R/s9dlXZsg8uJmaR1GrloUr14D7qDl8gi2iPXJH8k= +github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed h1:IURFTjxeTfNFP0hTEi1YKjB/ub8zkpaOqFFMApi2EAs= +github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed/go.mod h1:XLXN8bNw4CGRPaqgl3bv/lhz7bsGPh4/xSaMTbo2vkQ= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/certificate-transparency-go v1.0.21/go.mod h1:QeJfpSbVSfYc7RgB3gJFj9cbuQMMchQxrWXz8Ruopmg= -github.com/google/certificate-transparency-go v1.1.1/go.mod h1:FDKqPvSXawb2ecErVRrD+nfy23RCzyl7eqVCEmlT1Zs= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= @@ -294,512 +275,365 @@ github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200507031123-427632fa3b1c/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20241210010833-40e02aabc2ad h1:a6HEuzUHeKH6hwfN/ZoQgRgVIWFJljSWa/zetS2WTvg= +github.com/google/pprof v0.0.0-20241210010833-40e02aabc2ad/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= -github.com/google/trillian v1.3.11/go.mod h1:0tPraVHrSDkA3BO6vKX67zgLXs6SsOAbHEivX+9mPgw= -github.com/google/uuid v0.0.0-20161128191214-064e2069ce9c/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= -github.com/gookit/color v1.4.2/go.mod h1:fqRyamkC1W8uxl+lxCQxOT09l/vYfZ+QeiX3rKQHCoQ= -github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= -github.com/gordonklaus/ineffassign v0.0.0-20200309095847-7953dde2c7bf/go.mod h1:cuNKsD1zp2v6XfE/orVX2QE1LC+i254ceGcVeDT3pTU= -github.com/gordonklaus/ineffassign v0.0.0-20210225214923-2e10b2664254 h1:Nb2aRlC404yz7gQIfRZxX9/MLvQiqXyiBTJtgAy6yrI= -github.com/gordonklaus/ineffassign v0.0.0-20210225214923-2e10b2664254/go.mod h1:M9mZEtGIsR1oDaZagNPNG9iq9n2HrhZ17dsXk73V3Lw= -github.com/gorhill/cronexpr v0.0.0-20180427100037-88b0669f7d75/go.mod h1:g2644b03hfBX9Ov0ZBDgXXens4rxSxmqFBbhvKv2yVA= -github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= -github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= -github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/gostaticanalysis/analysisutil v0.0.0-20190318220348-4088753ea4d3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE= -github.com/gostaticanalysis/analysisutil v0.0.3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE= -github.com/gostaticanalysis/analysisutil v0.1.0/go.mod h1:dMhHRU9KTiDcuLGdy87/2gTR8WruwYZrKdRq9m1O6uw= -github.com/gostaticanalysis/analysisutil v0.4.1/go.mod h1:18U/DLpRgIUd459wGxVHE0fRgmo1UgHDcbw7F5idXu0= +github.com/gordonklaus/ineffassign v0.1.0 h1:y2Gd/9I7MdY1oEIt+n+rowjBNDcLQq3RsH5hwJd0f9s= +github.com/gordonklaus/ineffassign v0.1.0/go.mod h1:Qcp2HIAYhR7mNUVSIxZww3Guk4it82ghYcEXIAk+QT0= github.com/gostaticanalysis/analysisutil v0.7.1 h1:ZMCjoue3DtDWQ5WyU16YbjbQEQ3VuzwxALrpYd+HeKk= github.com/gostaticanalysis/analysisutil v0.7.1/go.mod h1:v21E3hY37WKMGSnbsw2S/ojApNWb6C1//mXO48CXbVc= -github.com/gostaticanalysis/comment v1.3.0/go.mod h1:xMicKDx7XRXYdVwY9f9wQpDJVnqWxw9wCauCMKp+IBI= github.com/gostaticanalysis/comment v1.4.1/go.mod h1:ih6ZxzTHLdadaiSnF5WY3dxUoXfXAlTaRzuaNDlSado= -github.com/gostaticanalysis/comment v1.4.2 h1:hlnx5+S2fY9Zo9ePo4AhgYsYHbM2+eAv8m/s1JiCd6Q= github.com/gostaticanalysis/comment v1.4.2/go.mod h1:KLUTGDv6HOCotCH8h2erHKmpci2ZoR8VPu34YA2uzdM= -github.com/gostaticanalysis/forcetypeassert v0.0.0-20200621232751-01d4955beaa5 h1:rx8127mFPqXXsfPSo8BwnIU97MKFZc89WHAHt8PwDVY= -github.com/gostaticanalysis/forcetypeassert v0.0.0-20200621232751-01d4955beaa5/go.mod h1:qZEedyP/sY1lTGV1uJ3VhWZ2mqag3IkWsDHVbplHXak= +github.com/gostaticanalysis/comment v1.5.0 h1:X82FLl+TswsUMpMh17srGRuKaaXprTaytmEpgnKIDu8= +github.com/gostaticanalysis/comment v1.5.0/go.mod h1:V6eb3gpCv9GNVqb6amXzEUX3jXLVK/AdA+IrAMSqvEc= +github.com/gostaticanalysis/forcetypeassert v0.2.0 h1:uSnWrrUEYDr86OCxWa4/Tp2jeYDlogZiZHzGkWFefTk= +github.com/gostaticanalysis/forcetypeassert v0.2.0/go.mod h1:M5iPavzE9pPqWyeiVXSFghQjljW1+l/Uke3PXHS6ILY= github.com/gostaticanalysis/nilerr v0.1.1 h1:ThE+hJP0fEp4zWLkWHWcRyI2Od0p7DlgYG3Uqrmrcpk= github.com/gostaticanalysis/nilerr v0.1.1/go.mod h1:wZYb6YI5YAxxq0i1+VJbY0s2YONW0HU0GPE3+5PWN4A= github.com/gostaticanalysis/testutil v0.3.1-0.20210208050101-bfb5c8eec0e4/go.mod h1:D+FIZ+7OahH3ePw/izIEeH5I06eKs1IKI4Xr64/Am3M= -github.com/gostaticanalysis/testutil v0.4.0 h1:nhdCmubdmDF6VEatUNjgUZBJKWRqugoISdUv3PPQgHY= -github.com/gostaticanalysis/testutil v0.4.0/go.mod h1:bLIoPefWXrRi/ssLFWX1dx7Repi5x3CuviD3dgAZaBU= -github.com/gregjones/httpcache v0.0.0-20190611155906-901d90724c79/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= -github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= -github.com/grpc-ecosystem/go-grpc-middleware v1.2.2/go.mod h1:EaizFBKfUKtMIF5iaDEhniwNedqGo9FuLFzppDr3uwI= -github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= -github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= -github.com/grpc-ecosystem/grpc-gateway v1.12.1/go.mod h1:8XEsbTttt/W+VvjtQhLACqCisSPWTxCZ7sBRjU6iH9c= -github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= -github.com/hashicorp/consul/api v1.10.1/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M= -github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= -github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms= -github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= -github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= -github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= -github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= -github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= -github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= -github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= -github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= -github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= -github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= -github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= -github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= -github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-version v1.2.1 h1:zEfKbn2+PDgroKdiOzqiE8rsmLqU2uwi5PB5pBJ3TkI= +github.com/gostaticanalysis/testutil v0.5.0 h1:Dq4wT1DdTwTGCQQv3rl3IvD5Ld0E6HiY+3Zh0sUGqw8= +github.com/gostaticanalysis/testutil v0.5.0/go.mod h1:OLQSbuM6zw2EvCcXTz1lVq5unyoNft372msDY0nY5Hs= +github.com/hashicorp/go-immutable-radix/v2 v2.1.0 h1:CUW5RYIcysz+D3B+l1mDeXrQ7fUvGGCwJfdASSzbrfo= +github.com/hashicorp/go-immutable-radix/v2 v2.1.0/go.mod h1:hgdqLXA4f6NIjRVisM1TJ9aOJVNRqKZj+xDGF6m7PBw= +github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= +github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-version v1.2.1/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= +github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= +github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= +github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= -github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= -github.com/hashicorp/mdns v1.0.1/go.mod h1:4gW7WsVCke5TE7EPeYliwHlRUyBtfCwuFwuMg2DmyNY= -github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= -github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= -github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= -github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk= -github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/huandu/xstrings v1.0.0/go.mod h1:4qWG/gcEcfX4z/mBDHJ++3ReCw9ibxbsNJbcucJdbSo= -github.com/huandu/xstrings v1.2.0/go.mod h1:DvyZB1rfVYsBIigL8HwpZgxHwXozlTgGqn63UyNX5k4= +github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= +github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/imdario/mergo v0.3.4/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= -github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= -github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= -github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/jgautheron/goconst v1.5.1 h1:HxVbL1MhydKs8R8n/HE5NPvzfaYmQJA3o879lE4+WcM= -github.com/jgautheron/goconst v1.5.1/go.mod h1:aAosetZ5zaeC/2EfMeRswtxUFBpe2Hr7HzkgX4fanO4= -github.com/jhump/protoreflect v1.6.1/go.mod h1:RZQ/lnuN+zqeRVpQigTwO6o0AJUkxbnSnpuG7toUTG4= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/jgautheron/goconst v1.7.1 h1:VpdAG7Ca7yvvJk5n8dMwQhfEZJh95kl/Hl9S1OI5Jkk= +github.com/jgautheron/goconst v1.7.1/go.mod h1:aAosetZ5zaeC/2EfMeRswtxUFBpe2Hr7HzkgX4fanO4= github.com/jingyugao/rowserrcheck v1.1.1 h1:zibz55j/MJtLsjP1OF4bSdgXxwL1b+Vn7Tjzq7gFzUs= github.com/jingyugao/rowserrcheck v1.1.1/go.mod h1:4yvlZSDb3IyDTUZJUmpZfm2Hwok+Dtp+nu2qOq+er9c= -github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af h1:KA9BjwUk7KlCh6S9EAGWBt1oExIUv9WyNCiRz5amv48= -github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af/go.mod h1:HEWGJkRDzjJY2sqdDwxccsGicWEf9BQOZsq2tV+xzM0= -github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= -github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= -github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= -github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks= -github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= -github.com/jonboulle/clockwork v0.2.0/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8= -github.com/josharian/txtarfs v0.0.0-20210218200122-0702f000015a/go.mod h1:izVPOvVRsHiKkeGCT6tYBNWyDVuzj9wAaBb5R9qamfw= +github.com/jjti/go-spancheck v0.6.4 h1:Tl7gQpYf4/TMU7AT84MN83/6PutY21Nb9fuQjFTpRRc= +github.com/jjti/go-spancheck v0.6.4/go.mod h1:yAEYdKJ2lRkDA8g7X+oKUHXOWVAXSBJRv04OhF+QUjk= +github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= -github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= -github.com/juju/ratelimit v1.0.1/go.mod h1:qapgC/Gy+xNh9UxzV13HGGl/6UXNN+ct+vwSgWNm/qk= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= -github.com/julz/importas v0.0.0-20210419104244-841f0c0fe66d h1:XeSMXURZPtUffuWAaq90o6kLgZdgu+QA8wk4MPC8ikI= -github.com/julz/importas v0.0.0-20210419104244-841f0c0fe66d/go.mod h1:oSFU2R4XK/P7kNBrnL/FEQlDGN1/6WoxXEjSSXO0DV0= -github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k= -github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= -github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= -github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= -github.com/kisielk/errcheck v1.6.0 h1:YTDO4pNy7AUN/021p+JGHycQyYNIyMoenM1YDVK6RlY= -github.com/kisielk/errcheck v1.6.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= -github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg= +github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/julz/importas v0.2.0 h1:y+MJN/UdL63QbFJHws9BVC5RpA2iq0kpjrFajTGivjQ= +github.com/julz/importas v0.2.0/go.mod h1:pThlt589EnCYtMnmhmRYY/qn9lCf/frPOK+WMx3xiJY= +github.com/karamaru-alpha/copyloopvar v1.2.1 h1:wmZaZYIjnJ0b5UoKDjUHrikcV0zuPyyxI4SVplLd2CI= +github.com/karamaru-alpha/copyloopvar v1.2.1/go.mod h1:nFmMlFNlClC2BPvNaHMdkirmTJxVCY0lhxBtlfOypMM= +github.com/kisielk/errcheck v1.9.0 h1:9xt1zI9EBfcYBvdU1nVrzMzzUPUtPKs9bVSIM3TAb3M= +github.com/kisielk/errcheck v1.9.0/go.mod h1:kQxWMMVZgIkDq7U8xtG/n2juOjbLgZtedi0D+/VL/i8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.13.4/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= -github.com/klauspost/compress v1.13.5/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/kkHAIKE/contextcheck v1.1.6 h1:7HIyRcnyzxL9Lz06NGhiKvenXq7Zw6Q0UQu/ttjfJCE= +github.com/kkHAIKE/contextcheck v1.1.6/go.mod h1:3dDbMRNBFaq8HFXWC1JyvDSPm43CmE6IuHam8Wr0rkg= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/kulti/thelper v0.4.0 h1:2Nx7XbdbE/BYZeoip2mURKUdtHQRuy6Ug+wR7K9ywNM= -github.com/kulti/thelper v0.4.0/go.mod h1:vMu2Cizjy/grP+jmsvOFDx1kYP6+PD1lqg4Yu5exl2U= -github.com/kunwardeep/paralleltest v1.0.3 h1:UdKIkImEAXjR1chUWLn+PNXqWUGs//7tzMeWuP7NhmI= -github.com/kunwardeep/paralleltest v1.0.3/go.mod h1:vLydzomDFpk7yu5UX02RmP0H8QfRPOV/oFhWN85Mjb4= -github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= -github.com/kyoh86/exportloopref v0.1.8 h1:5Ry/at+eFdkX9Vsdw3qU4YkvGtzuVfzT4X7S77LoN/M= -github.com/kyoh86/exportloopref v0.1.8/go.mod h1:1tUcJeiioIs7VWe5gcOObrux3lb66+sBqGZrRkMwPgg= -github.com/ldez/gomoddirectives v0.2.2 h1:p9/sXuNFArS2RLc+UpYZSI4KQwGMEDWC/LbtF5OPFVg= -github.com/ldez/gomoddirectives v0.2.2/go.mod h1:cpgBogWITnCfRq2qGoDkKMEVSaarhdBr6g8G04uz6d0= -github.com/ldez/tagliatelle v0.2.0 h1:693V8Bf1NdShJ8eu/s84QySA0J2VWBanVBa2WwXD/Wk= -github.com/ldez/tagliatelle v0.2.0/go.mod h1:8s6WJQwEYHbKZDsp/LjArytKOG8qaMrKQQ3mFukHs88= -github.com/letsencrypt/pkcs11key/v4 v4.0.0/go.mod h1:EFUvBDay26dErnNb70Nd0/VW3tJiIbETBPTl9ATXQag= -github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/lib/pq v1.8.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/lib/pq v1.9.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/lib/pq v1.10.3/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/logrusorgru/aurora v0.0.0-20181002194514-a7b3b318ed4e/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= -github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= -github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= -github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls= -github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= -github.com/maratori/testpackage v1.0.1 h1:QtJ5ZjqapShm0w5DosRjg0PRlSdAdlx+W6cCKoALdbQ= -github.com/maratori/testpackage v1.0.1/go.mod h1:ddKdw+XG0Phzhx8BFDTKgpWP4i7MpApTE5fXSKAqwDU= -github.com/matoous/godox v0.0.0-20210227103229-6504466cf951 h1:pWxk9e//NbPwfxat7RXkts09K+dEBJWakUWwICVqYbA= -github.com/matoous/godox v0.0.0-20210227103229-6504466cf951/go.mod h1:1BELzlh859Sh1c6+90blK8lbYy0kwQf1bYlBhBysy1s= +github.com/kulti/thelper v0.6.3 h1:ElhKf+AlItIu+xGnI990no4cE2+XaSu1ULymV2Yulxs= +github.com/kulti/thelper v0.6.3/go.mod h1:DsqKShOvP40epevkFrvIwkCMNYxMeTNjdWL4dqWHZ6I= +github.com/kunwardeep/paralleltest v1.0.10 h1:wrodoaKYzS2mdNVnc4/w31YaXFtsc21PCTdvWJ/lDDs= +github.com/kunwardeep/paralleltest v1.0.10/go.mod h1:2C7s65hONVqY7Q5Efj5aLzRCNLjw2h4eMc9EcypGjcY= +github.com/lasiar/canonicalheader v1.1.2 h1:vZ5uqwvDbyJCnMhmFYimgMZnJMjwljN5VGY0VKbMXb4= +github.com/lasiar/canonicalheader v1.1.2/go.mod h1:qJCeLFS0G/QlLQ506T+Fk/fWMa2VmBUiEI2cuMK4djI= +github.com/ldez/exptostd v0.4.2 h1:l5pOzHBz8mFOlbcifTxzfyYbgEmoUqjxLFHZkjlbHXs= +github.com/ldez/exptostd v0.4.2/go.mod h1:iZBRYaUmcW5jwCR3KROEZ1KivQQp6PHXbDPk9hqJKCQ= +github.com/ldez/gomoddirectives v0.6.1 h1:Z+PxGAY+217f/bSGjNZr/b2KTXcyYLgiWI6geMBN2Qc= +github.com/ldez/gomoddirectives v0.6.1/go.mod h1:cVBiu3AHR9V31em9u2kwfMKD43ayN5/XDgr+cdaFaKs= +github.com/ldez/grignotin v0.9.0 h1:MgOEmjZIVNn6p5wPaGp/0OKWyvq42KnzAt/DAb8O4Ow= +github.com/ldez/grignotin v0.9.0/go.mod h1:uaVTr0SoZ1KBii33c47O1M8Jp3OP3YDwhZCmzT9GHEk= +github.com/ldez/tagliatelle v0.7.1 h1:bTgKjjc2sQcsgPiT902+aadvMjCeMHrY7ly2XKFORIk= +github.com/ldez/tagliatelle v0.7.1/go.mod h1:3zjxUpsNB2aEZScWiZTHrAXOl1x25t3cRmzfK1mlo2I= +github.com/ldez/usetesting v0.4.2 h1:J2WwbrFGk3wx4cZwSMiCQQ00kjGR0+tuuyW0Lqm4lwA= +github.com/ldez/usetesting v0.4.2/go.mod h1:eEs46T3PpQ+9RgN9VjpY6qWdiw2/QmfiDeWmdZdrjIQ= +github.com/leonklingele/grouper v1.1.2 h1:o1ARBDLOmmasUaNDesWqWCIFH3u7hoFlM84YrjT3mIY= +github.com/leonklingele/grouper v1.1.2/go.mod h1:6D0M/HVkhs2yRKRFZUoGjeDy7EZTfFBE9gl4kjmIGkA= +github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY= +github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= +github.com/macabu/inamedparam v0.2.0 h1:VyPYpOc10nkhI2qeNUdh3Zket4fcZjEWe35poddBCpE= +github.com/macabu/inamedparam v0.2.0/go.mod h1:+Pee9/YfGe5LJ62pYXqB89lJ+0k5bsR8Wgz/C0Zlq3U= +github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo= +github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/maratori/testableexamples v1.0.0 h1:dU5alXRrD8WKSjOUnmJZuzdxWOEQ57+7s93SLMxb2vI= +github.com/maratori/testableexamples v1.0.0/go.mod h1:4rhjL1n20TUTT4vdh3RDqSizKLyXp7K2u6HgraZCGzE= +github.com/maratori/testpackage v1.1.1 h1:S58XVV5AD7HADMmD0fNnziNHqKvSdDuEKdPD1rNTU04= +github.com/maratori/testpackage v1.1.1/go.mod h1:s4gRK/ym6AMrqpOa/kEbQTV4Q4jb7WeLZzVhVVVOQMc= +github.com/matoous/godox v1.1.0 h1:W5mqwbyWrwZv6OQ5Z1a/DHGMOvXYCBP3+Ht7KMoJhq4= +github.com/matoous/godox v1.1.0/go.mod h1:jgE/3fUXiTurkdHOLT5WEkThTSuE7yxHv5iWPa80afs= github.com/matryer/is v1.4.0 h1:sosSmIWwkYITGrxZ25ULNDeKiMNzFSr4V/eqBQP0PeE= github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= -github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= -github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.11 h1:nQ+aFkoE2TMGc0b68U2OKSexC+eq46+XwZzWXHRmPYs= -github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= -github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= -github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= -github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= -github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= -github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= -github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= -github.com/mattn/go-runewidth v0.0.6/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= -github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0= +github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= +github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= -github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= -github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw= +github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= +github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/mbilski/exhaustivestruct v1.2.0 h1:wCBmUnSYufAHO6J4AVWY6ff+oxWxsVFrwgOdMUQePUo= -github.com/mbilski/exhaustivestruct v1.2.0/go.mod h1:OeTBVxQWoEmB2J2JCHmXWPJ0aksxSUOUy+nvtVEfzXc= -github.com/mgechev/dots v0.0.0-20210922191527-e955255bf517 h1:zpIH83+oKzcpryru8ceC6BxnoG8TBrhgAvRg8obzup0= -github.com/mgechev/dots v0.0.0-20210922191527-e955255bf517/go.mod h1:KQ7+USdGKfpPjXk4Ga+5XxQM4Lm4e3gAogrreFAYpOg= -github.com/mgechev/revive v1.1.2 h1:MiYA/o9M7REjvOF20QN43U8OtXDDHQFKLCtJnxLGLog= -github.com/mgechev/revive v1.1.2/go.mod h1:bnXsMr+ZTH09V5rssEI+jHAZ4z+ZdyhgO/zsy3EhK+0= -github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= -github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= -github.com/miekg/dns v1.1.35/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM= -github.com/miekg/pkcs11 v1.0.2/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= -github.com/miekg/pkcs11 v1.0.3/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= -github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= -github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= -github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= -github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mgechev/revive v1.7.0 h1:JyeQ4yO5K8aZhIKf5rec56u0376h8AlKNQEmjfkjKlY= +github.com/mgechev/revive v1.7.0/go.mod h1:qZnwcNhoguE58dfi96IJeSTPeZQejNeoMQLUZGi4SW4= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/go-ps v1.0.0/go.mod h1:J4lOc8z8yJs6vUwklHw2XEIiT4z4C40KtWVN3nvg8Pg= -github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= -github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= -github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= -github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/mapstructure v1.4.2 h1:6h7AQ0yhTcIsmFmnAwQls75jp2Gzs4iB8W7pjMO+rqo= -github.com/mitchellh/mapstructure v1.4.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= -github.com/mitchellh/reflectwalk v1.0.1/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= -github.com/moricho/tparallel v0.2.1 h1:95FytivzT6rYzdJLdtfn6m1bfFJylOJK41+lgv/EHf4= -github.com/moricho/tparallel v0.2.1/go.mod h1:fXEIZxG2vdfl0ZF8b42f5a78EhjjD5mX8qUplsoSU4k= -github.com/mozilla/scribe v0.0.0-20180711195314-fb71baf557c1/go.mod h1:FIczTrinKo8VaLxe6PWTPEXRXDIHz2QAwiaBaP5/4a8= -github.com/mozilla/tls-observatory v0.0.0-20210609171429-7bc42856d2e5/go.mod h1:FUqVoUPHSEdDR0MnFM3Dh8AU0pZHLXUD127SAJGER/s= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/moricho/tparallel v0.3.2 h1:odr8aZVFA3NZrNybggMkYO3rgPRcqjeQUlBBFVxKHTI= +github.com/moricho/tparallel v0.3.2/go.mod h1:OQ+K3b4Ln3l2TZveGCywybl68glfLEwFGqvnjok8b+U= +github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc= +github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/mwitkow/go-proto-validators v0.0.0-20180403085117-0950a7990007/go.mod h1:m2XC9Qq0AlmmVksL6FktJCdTYyLk7V3fKyp0sl1yWQo= -github.com/mwitkow/go-proto-validators v0.2.0/go.mod h1:ZfA1hW+UH/2ZHOWvQ3HnQaU0DtnpXu850MZiy+YUgcc= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/nakabonne/nestif v0.3.1 h1:wm28nZjhQY5HyYPx+weN3Q65k6ilSBxDb8v5S81B81U= github.com/nakabonne/nestif v0.3.1/go.mod h1:9EtoZochLn5iUprVDmDjqGKPofoUEBL8U4Ngq6aY7OE= -github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354 h1:4kuARK6Y6FxaNu/BnU2OAaLF86eTVhP2hjTB6iMvItA= -github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354/go.mod h1:KSVJerMDfblTH7p5MZaTt+8zaT2iEk3AkVb9PQdZuE8= -github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= -github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= -github.com/nishanths/exhaustive v0.2.3 h1:+ANTMqRNrqwInnP9aszg/0jDo+zbXa4x66U19Bx/oTk= -github.com/nishanths/exhaustive v0.2.3/go.mod h1:bhIX678Nx8inLM9PbpvK1yv6oGtoP8BfaIeMzgBNKvc= -github.com/nishanths/predeclared v0.0.0-20190419143655-18a43bb90ffc/go.mod h1:62PewwiQTlm/7Rj+cxVYqZvDIUc+JjZq6GHAC1fsObQ= -github.com/nishanths/predeclared v0.2.1 h1:1TXtjmy4f3YCFjTxRd8zcFHOmoUir+gp0ESzjFzG2sw= -github.com/nishanths/predeclared v0.2.1/go.mod h1:HvkGJcA3naj4lOwnFXFDkFxVtSqQMB9sbB1usJ+xjQE= -github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= -github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= -github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= -github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= -github.com/olekukonko/tablewriter v0.0.1/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= -github.com/olekukonko/tablewriter v0.0.2/go.mod h1:rSAaSIOAGT9odnlyGlUfAJaoc5w2fSBUmeGDbRWPxyQ= +github.com/nishanths/exhaustive v0.12.0 h1:vIY9sALmw6T/yxiASewa4TQcFsVYZQQRUQJhKRf3Swg= +github.com/nishanths/exhaustive v0.12.0/go.mod h1:mEZ95wPIZW+x8kC4TgC+9YCUgiST7ecevsVDTgc2obs= +github.com/nishanths/predeclared v0.2.2 h1:V2EPdZPliZymNAn79T8RkNApBjMmVKh5XRpLm/w98Vk= +github.com/nishanths/predeclared v0.2.2/go.mod h1:RROzoN6TnGQupbC+lqggsOlcgysk3LMK/HI84Mp280c= +github.com/nunnatsa/ginkgolinter v0.19.1 h1:mjwbOlDQxZi9Cal+KfbEJTCz327OLNfwNvoZ70NJ+c4= +github.com/nunnatsa/ginkgolinter v0.19.1/go.mod h1:jkQ3naZDmxaZMXPWaS9rblH+i+GWXQCaS/JFIWcOH2s= github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= -github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= -github.com/onsi/ginkgo v1.16.4 h1:29JGrr5oVBm5ulCWet69zQkzWipVXIol6ygQUe/EzNc= -github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= -github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= -github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= -github.com/onsi/gomega v1.16.0 h1:6gjqkI8iiRHMvdccRJM8rVKjCWk6ZIm6FTm3ddIe4/c= -github.com/onsi/gomega v1.16.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= -github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= -github.com/otiai10/copy v1.2.0 h1:HvG945u96iNadPoG2/Ja2+AUJeW5YuFQMixq9yirC+k= +github.com/onsi/ginkgo/v2 v2.22.2 h1:/3X8Panh8/WwhU/3Ssa6rCKqPLuAkVY2I0RoyDLySlU= +github.com/onsi/ginkgo/v2 v2.22.2/go.mod h1:oeMosUL+8LtarXBHu/c0bx2D/K9zyQ6uX3cTyztHwsk= +github.com/onsi/gomega v1.36.2 h1:koNYke6TVk6ZmnyHrCXba/T/MoLBXFjeC1PtvYgw0A8= +github.com/onsi/gomega v1.36.2/go.mod h1:DdwyADRjrc825LhMEkD76cHR5+pUnjhUN8GlHlRPHzY= github.com/otiai10/copy v1.2.0/go.mod h1:rrF5dJ5F0t/EWSYODDu4j9/vEeYHMkc8jt0zJChqQWw= +github.com/otiai10/copy v1.14.0 h1:dCI/t1iTdYGtkvCuBG2BgR6KZa83PTclw4U5n2wAllU= +github.com/otiai10/copy v1.14.0/go.mod h1:ECfuL02W+/FkTWZWgQqXPWZgW9oeKCSQ5qVfSc4qc4w= github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE= github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs= github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo= github.com/otiai10/mint v1.3.1/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc= -github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= -github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= -github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= -github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= -github.com/pelletier/go-toml v1.9.4 h1:tjENF6MfZAg8e4ZmZTeWaWiT2vXtsoO6+iuOjFhECwM= -github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= -github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= -github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d h1:CdDQnGF8Nq9ocOS/xlSptM1N3BbrA6/kmaep5ggwaIA= -github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d/go.mod h1:3OzsM7FXDQlpCiw2j81fOmAwQLnZnLGXVKUzeKQXIAw= +github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= +github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M= +github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= -github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/polyfloyd/go-errorlint v0.0.0-20210722154253-910bb7978349 h1:Kq/3kL0k033ds3tyez5lFPrfQ74fNJ+OqCclRipubwA= -github.com/polyfloyd/go-errorlint v0.0.0-20210722154253-910bb7978349/go.mod h1:wi9BfjxjF/bwiZ701TzmfKu6UKC357IOAtNr0Td0Lvw= -github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= -github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= +github.com/polyfloyd/go-errorlint v1.7.1 h1:RyLVXIbosq1gBdk/pChWA8zWYLsq9UEw7a1L5TVMCnA= +github.com/polyfloyd/go-errorlint v1.7.1/go.mod h1:aXjNb1x2TNhoLsk26iv1yl7a+zTnXPhwEMtEXukiLR8= +github.com/prashantv/gostub v1.1.0 h1:BTyx3RfQjRHnUWaGF9oQos79AlQ5k8WNktv7VGvVH4g= +github.com/prashantv/gostub v1.1.0/go.mod h1:A5zLQHz7ieHGG7is6LLXLz7I8+3LZzsrV0P1IAHhP5U= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= -github.com/prometheus/client_golang v1.7.1 h1:NTGy1Ja9pByO+xAeH/qiWnLrKtr3hJPNjaVUwnjpdpA= github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= +github.com/prometheus/client_golang v1.12.1 h1:ZiaPsmm9uiBeaSMRznKsCDNtPCS0T3JVDGF+06gjBzk= +github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= -github.com/prometheus/common v0.10.0 h1:RyRA7RzGXQZiW+tGMr7sxa85G1z0yOpM1qq5c8lNawc= github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= +github.com/prometheus/common v0.32.1 h1:hWIdL3N2HoUx3B8j3YN9mWor0qhY/NlEKZEaXxuIRh4= +github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= -github.com/prometheus/procfs v0.6.0 h1:mxy4L2jP6qMonqmq+aTtOx1ifVWUgG/TAmntgbh3xv4= github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= -github.com/pseudomuto/protoc-gen-doc v1.3.2/go.mod h1:y5+P6n3iGrbKG+9O04V5ld71in3v/bX88wUwgt+U8EA= -github.com/pseudomuto/protokit v0.2.0/go.mod h1:2PdH30hxVHsup8KpBTOXTBeMVhJZVio3Q8ViKSAXT0Q= -github.com/quasilyte/go-consistent v0.0.0-20190521200055-c6f3937de18c/go.mod h1:5STLWrekHfjyYwxBRVRXNOSewLJ3PWfDJd1VyTS21fI= -github.com/quasilyte/go-ruleguard v0.3.1-0.20210203134552-1b5a410e1cc8/go.mod h1:KsAh3x0e7Fkpgs+Q9pNLS5XpFSvYCEVl5gP9Pp1xp30= -github.com/quasilyte/go-ruleguard v0.3.13 h1:O1G41cq1jUr3cJmqp7vOUT0SokqjzmS9aESWJuIDRaY= -github.com/quasilyte/go-ruleguard v0.3.13/go.mod h1:Ul8wwdqR6kBVOCt2dipDBkE+T6vAV/iixkrKuRTN1oQ= -github.com/quasilyte/go-ruleguard/dsl v0.3.0/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= -github.com/quasilyte/go-ruleguard/dsl v0.3.10/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= -github.com/quasilyte/go-ruleguard/rules v0.0.0-20201231183845-9e62ed36efe1/go.mod h1:7JTjp89EGyU1d6XfBiXihJNG37wB2VRkd125Q1u7Plc= -github.com/quasilyte/go-ruleguard/rules v0.0.0-20210428214800-545e0d2e0bf7/go.mod h1:4cgAphtvu7Ftv7vOT2ZOYhC6CvBxZixcasr8qIOTA50= -github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95 h1:L8QM9bvf68pVdQ3bCFZMDmnt9yqcMBro1pC7F+IPYMY= -github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95/go.mod h1:rlzQ04UMyJXu/aOvhd8qT+hvDrFpiwqp8MRXDY9szc0= -github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= -github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= +github.com/prometheus/procfs v0.7.3 h1:4jVXhlkAyzOScmCkXBTOLRLTz8EeU+eyjrwB/EPq0VU= +github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/quasilyte/go-ruleguard v0.4.4 h1:53DncefIeLX3qEpjzlS1lyUmQoUEeOWPFWqaTJq9eAQ= +github.com/quasilyte/go-ruleguard v0.4.4/go.mod h1:Vl05zJ538vcEEwu16V/Hdu7IYZWyKSwIy4c88Ro1kRE= +github.com/quasilyte/go-ruleguard/dsl v0.3.22 h1:wd8zkOhSNr+I+8Qeciml08ivDt1pSXe60+5DqOpCjPE= +github.com/quasilyte/go-ruleguard/dsl v0.3.22/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= +github.com/quasilyte/gogrep v0.5.0 h1:eTKODPXbI8ffJMN+W2aE0+oL0z/nh8/5eNdiO34SOAo= +github.com/quasilyte/gogrep v0.5.0/go.mod h1:Cm9lpz9NZjEoL1tgZ2OgeUKPIxL1meE7eo60Z6Sk+Ng= +github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 h1:TCg2WBOl980XxGFEZSS6KlBGIV0diGdySzxATTWoqaU= +github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727/go.mod h1:rlzQ04UMyJXu/aOvhd8qT+hvDrFpiwqp8MRXDY9szc0= +github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 h1:M8mH9eK4OUR4lu7Gd+PU1fV2/qnDNfzT635KRSObncs= +github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567/go.mod h1:DWNGW8A4Y+GyBgPuaQJuWiy0XYftx4Xm/y5Jqk9I6VQ= +github.com/raeperd/recvcheck v0.2.0 h1:GnU+NsbiCqdC2XX5+vMZzP+jAJC5fht7rcVTAhX74UI= +github.com/raeperd/recvcheck v0.2.0/go.mod h1:n04eYkwIR0JbgD73wT8wL4JjPC3wm0nFtzBnWNocnYU= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= +github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.6.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= -github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= -github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/ryancurrah/gomodguard v1.2.3 h1:ww2fsjqocGCAFamzvv/b8IsRduuHHeK2MHTcTxZTQX8= -github.com/ryancurrah/gomodguard v1.2.3/go.mod h1:rYbA/4Tg5c54mV1sv4sQTP5WOPBcoLtnBZ7/TEhXAbg= -github.com/ryanrolds/sqlclosecheck v0.3.0 h1:AZx+Bixh8zdUBxUA1NxbxVAS78vTPq4rCb8OUZI9xFw= -github.com/ryanrolds/sqlclosecheck v0.3.0/go.mod h1:1gREqxyTGR3lVtpngyFo3hZAgk0KCtEdgEkHwDbigdA= -github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= -github.com/sagikazarmark/crypt v0.1.0/go.mod h1:B/mN0msZuINBtQ1zZLEQcegFJJf9vnYIR88KRMEuODE= -github.com/sanposhiho/wastedassign/v2 v2.0.6 h1:+6/hQIHKNJAUixEj6EmOngGIisyeI+T3335lYTyxRoA= -github.com/sanposhiho/wastedassign/v2 v2.0.6/go.mod h1:KyZ0MWTwxxBmfwn33zh3k1dmsbF2ud9pAAGfoLfjhtI= -github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= -github.com/securego/gosec/v2 v2.9.1 h1:anHKLS/ApTYU6NZkKa/5cQqqcbKZURjvc+MtR++S4EQ= -github.com/securego/gosec/v2 v2.9.1/go.mod h1:oDcDLcatOJxkCGaCaq8lua1jTnYf6Sou4wdiJ1n4iHc= -github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= -github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c h1:W65qqJCIOVP4jpqPQ0YvHYKwcMEMVWIzWC5iNQQfBTU= -github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c/go.mod h1:/PevMnwAxekIXwN8qQyfc5gl2NlkB3CQlkizAbOkeBs= -github.com/shirou/gopsutil/v3 v3.21.10/go.mod h1:t75NhzCZ/dYyPQjyQmrAYP6c8+LCdFANeBMdLPCNnew= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryancurrah/gomodguard v1.4.1 h1:eWC8eUMNZ/wM/PWuZBv7JxxqT5fiIKSIyTvjb7Elr+g= +github.com/ryancurrah/gomodguard v1.4.1/go.mod h1:qnMJwV1hX9m+YJseXEBhd2s90+1Xn6x9dLz11ualI1I= +github.com/ryanrolds/sqlclosecheck v0.5.1 h1:dibWW826u0P8jNLsLN+En7+RqWWTYrjCB9fJfSfdyCU= +github.com/ryanrolds/sqlclosecheck v0.5.1/go.mod h1:2g3dUjoS6AL4huFdv6wn55WpLIDjY7ZgUR4J8HOO/XQ= +github.com/sanposhiho/wastedassign/v2 v2.1.0 h1:crurBF7fJKIORrV85u9UUpePDYGWnwvv3+A96WvwXT0= +github.com/sanposhiho/wastedassign/v2 v2.1.0/go.mod h1:+oSmSC+9bQ+VUAxA66nBb0Z7N8CK7mscKTDYC6aIek4= +github.com/santhosh-tekuri/jsonschema/v6 v6.0.1 h1:PKK9DyHxif4LZo+uQSgXNqs0jj5+xZwwfKHgph2lxBw= +github.com/santhosh-tekuri/jsonschema/v6 v6.0.1/go.mod h1:JXeL+ps8p7/KNMjDQk3TCwPpBy0wYklyWTfbkIzdIFU= +github.com/sashamelentyev/interfacebloat v1.1.0 h1:xdRdJp0irL086OyW1H/RTZTr1h/tMEOsumirXcOJqAw= +github.com/sashamelentyev/interfacebloat v1.1.0/go.mod h1:+Y9yU5YdTkrNvoX0xHc84dxiN1iBi9+G8zZIhPVoNjQ= +github.com/sashamelentyev/usestdlibvars v1.28.0 h1:jZnudE2zKCtYlGzLVreNp5pmCdOxXUzwsMDBkR21cyQ= +github.com/sashamelentyev/usestdlibvars v1.28.0/go.mod h1:9nl0jgOfHKWNFS43Ojw0i7aRoS4j6EBye3YBhmAIRF8= +github.com/securego/gosec/v2 v2.22.2 h1:IXbuI7cJninj0nRpZSLCUlotsj8jGusohfONMrHoF6g= +github.com/securego/gosec/v2 v2.22.2/go.mod h1:UEBGA+dSKb+VqM6TdehR7lnQtIIMorYJ4/9CW1KVQBE= +github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ= +github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk= github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ= -github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= -github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= -github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/sivchari/tenv v1.4.7 h1:FdTpgRlTue5eb5nXIYgS/lyVXSjugU8UUVDwhP1NLU8= -github.com/sivchari/tenv v1.4.7/go.mod h1:5nF+bITvkebQVanjU6IuMbvIot/7ReNsUV7I5NbprB0= -github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= -github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= -github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= -github.com/sonatard/noctx v0.0.1 h1:VC1Qhl6Oxx9vvWo3UDgrGXYCeKCe3Wbw7qAWL6FrmTY= -github.com/sonatard/noctx v0.0.1/go.mod h1:9D2D/EoULe8Yy2joDHJj7bv3sZoq9AaSb8B4lqBjiZI= -github.com/sourcegraph/go-diff v0.6.1 h1:hmA1LzxW0n1c3Q4YbrFgg4P99GSnebYa3x8gr0HZqLQ= -github.com/sourcegraph/go-diff v0.6.1/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs= -github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= -github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= -github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY= -github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= -github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.4.1 h1:s0hze+J0196ZfEMTs80N7UlFt0BDuQ7Q+JDnHiMWKdA= -github.com/spf13/cast v1.4.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= -github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= -github.com/spf13/cobra v1.2.1 h1:+KmjbUw1hriSNMF55oPrkZcb27aECyrj8V2ytv7kWDw= -github.com/spf13/cobra v1.2.1/go.mod h1:ExllRjgxM/piMAM+3tAZvg8fsklGAf3tPfi+i8t68Nk= -github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/sivchari/containedctx v1.0.3 h1:x+etemjbsh2fB5ewm5FeLNi5bUjK0V8n0RB+Wwfd0XE= +github.com/sivchari/containedctx v1.0.3/go.mod h1:c1RDvCbnJLtH4lLcYD/GqwiBSSf4F5Qk0xld2rBqzJ4= +github.com/sonatard/noctx v0.1.0 h1:JjqOc2WN16ISWAjAk8M5ej0RfExEXtkEyExl2hLW+OM= +github.com/sonatard/noctx v0.1.0/go.mod h1:0RvBxqY8D4j9cTTTWE8ylt2vqj2EPI8fHmrxHdsaZ2c= +github.com/sourcegraph/go-diff v0.7.0 h1:9uLlrd5T46OXs5qpp8L/MTltk0zikUGi0sNNyCpA8G0= +github.com/sourcegraph/go-diff v0.7.0/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs= +github.com/spf13/afero v1.12.0 h1:UcOPyRBYczmFn6yvphxkn9ZEOY65cpwGKb5mL36mrqs= +github.com/spf13/afero v1.12.0/go.mod h1:ZTlWwG4/ahT8W7T0WQ5uYmjI9duaLQGy3Q2OAl4sk/4= +github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w= +github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU= +github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo= +github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0= github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= -github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= -github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns= -github.com/spf13/viper v1.9.0 h1:yR6EXjTp0y0cLN8OZg1CRZmOBdI88UcGkhgyJhu6nZk= -github.com/spf13/viper v1.9.0/go.mod h1:+i6ajR7OX2XaiBkrcZJFK21htRk7eDeLg7+O6bhUPP4= +github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o= +github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.12.0 h1:CZ7eSOd3kZoaYDLbXnmzgQI5RlciuXBMA+18HwHRfZQ= +github.com/spf13/viper v1.12.0/go.mod h1:b6COn30jlNxbm/V2IqWiNWkJ+vZNiMNksliPCiuKtSI= github.com/ssgreg/nlreturn/v2 v2.2.1 h1:X4XDI7jstt3ySqGU86YGAURbxw3oTDPK9sPEi6YEwQ0= github.com/ssgreg/nlreturn/v2 v2.2.1/go.mod h1:E/iiPB78hV7Szg2YfRgyIrk1AD6JVMTRkkxBiELzh2I= +github.com/stbenjam/no-sprintf-host-port v0.2.0 h1:i8pxvGrt1+4G0czLr/WnmyH7zbZ8Bg8etvARQ1rpyl4= +github.com/stbenjam/no-sprintf-host-port v0.2.0/go.mod h1:eL0bQ9PasS0hsyTyfTjjG+E80QIyPnBVQbYZyv20Jfk= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.2.0 h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48= -github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= -github.com/stretchr/testify v0.0.0-20170130113145-4d4bfba8f1d1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.1.4/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= -github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= -github.com/sylvia7788/contextcheck v1.0.4 h1:MsiVqROAdr0efZc/fOCt0c235qm9XJqHtWwM+2h2B04= -github.com/sylvia7788/contextcheck v1.0.4/go.mod h1:vuPKJMQ7MQ91ZTqfdyreNKwZjyUg6KO+IebVyQDedZQ= -github.com/tdakkota/asciicheck v0.0.0-20200416200610-e657995f937b h1:HxLVTlqcHhFAz3nWUcuvpH7WuOMv8LQoCWmruLfFH2U= -github.com/tdakkota/asciicheck v0.0.0-20200416200610-e657995f937b/go.mod h1:yHp0ai0Z9gUljN3o0xMhYJnH/IcvkdTBOX2fmJ93JEM= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/subosito/gotenv v1.4.1 h1:jyEFiXpy21Wm81FBN71l9VoMMV8H8jG+qIK3GCpY6Qs= +github.com/subosito/gotenv v1.4.1/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= +github.com/tdakkota/asciicheck v0.4.1 h1:bm0tbcmi0jezRA2b5kg4ozmMuGAFotKI3RZfrhfovg8= +github.com/tdakkota/asciicheck v0.4.1/go.mod h1:0k7M3rCfRXb0Z6bwgvkEIMleKH3kXNz9UqJ9Xuqopr8= github.com/tenntenn/modver v1.0.1 h1:2klLppGhDgzJrScMpkj9Ujy3rXPUspSjAcev9tSEBgA= github.com/tenntenn/modver v1.0.1/go.mod h1:bePIyQPb7UeioSRkw3Q0XeMhYZSMx9B8ePqg6SAMGH0= github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3 h1:f+jULpRQGxTSkNYKJ51yaw6ChIqO+Je8UqsTKN/cDag= github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3/go.mod h1:ON8b8w4BN/kE1EOhwT0o+d62W65a6aPw1nouo9LMgyY= -github.com/tetafro/godot v1.4.11 h1:BVoBIqAf/2QdbFmSwAWnaIqDivZdOV0ZRwEm6jivLKw= -github.com/tetafro/godot v1.4.11/go.mod h1:LR3CJpxDVGlYOWn3ZZg1PgNZdTUvzsZWu8xaEohUpn8= -github.com/timakin/bodyclose v0.0.0-20200424151742-cb6215831a94 h1:ig99OeTyDwQWhPe2iw9lwfQVF1KB3Q4fpP3X7/2VBG8= -github.com/timakin/bodyclose v0.0.0-20200424151742-cb6215831a94/go.mod h1:Qimiffbc6q9tBWlVV6x0P9sat/ao1xEkREYPPj9hphk= -github.com/tklauser/go-sysconf v0.3.9/go.mod h1:11DU/5sG7UexIrp/O6g35hrWzu0JxlwQ3LSFUzyeuhs= -github.com/tklauser/numcpus v0.3.0/go.mod h1:yFGUr7TUHQRAhyqBcEg0Ge34zDBAsIvJJcyE6boqnA8= -github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= -github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= -github.com/tmc/grpc-websocket-proxy v0.0.0-20200427203606-3cfed13b9966/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= -github.com/tomarrell/wrapcheck/v2 v2.4.0 h1:mU4H9KsqqPZUALOUbVOpjy8qNQbWLoLI9fV68/1tq30= -github.com/tomarrell/wrapcheck/v2 v2.4.0/go.mod h1:68bQ/eJg55BROaRTbMjC7vuhL2OgfoG8bLp9ZyoBfyY= -github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce/go.mod h1:o8v6yHRoik09Xen7gje4m9ERNah1d1PPsVq1VEx9vE4= -github.com/tommy-muehle/go-mnd/v2 v2.4.0 h1:1t0f8Uiaq+fqKteUR4N9Umr6E99R+lDnLnq7PwX2PPE= -github.com/tommy-muehle/go-mnd/v2 v2.4.0/go.mod h1:WsUAkMJMYww6l/ufffCD3m+P7LEvr8TnZn9lwVDlgzw= -github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= -github.com/ultraware/funlen v0.0.3 h1:5ylVWm8wsNwH5aWo9438pwvsK0QiqVuUrt9bn7S/iLA= -github.com/ultraware/funlen v0.0.3/go.mod h1:Dp4UiAus7Wdb9KUZsYWZEWiRzGuM2kXM1lPbfaF6xhA= -github.com/ultraware/whitespace v0.0.4 h1:If7Va4cM03mpgrNH9k49/VOicWpGoG70XPBFFODYDsg= -github.com/ultraware/whitespace v0.0.4/go.mod h1:aVMh/gQve5Maj9hQ/hg+F75lr/X5A89uZnzAmWSineA= -github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= -github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= -github.com/uudashr/gocognit v1.0.5 h1:rrSex7oHr3/pPLQ0xoWq108XMU8s678FJcQ+aSfOHa4= -github.com/uudashr/gocognit v1.0.5/go.mod h1:wgYz0mitoKOTysqxTDMOUXg+Jb5SvtihkfmugIZYpEA= -github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= -github.com/valyala/fasthttp v1.30.0/go.mod h1:2rsYD01CKFrjjsvFxx75KlEUNpWNBY9JWD3K/7o2Cus= -github.com/valyala/quicktemplate v1.7.0/go.mod h1:sqKJnoaOF88V07vkO+9FL8fb9uZg/VPSJnLYn+LmLk8= -github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc= -github.com/viki-org/dnscache v0.0.0-20130720023526-c70c1f23c5d8/go.mod h1:dniwbG03GafCjFohMDmz6Zc6oCuiqgH6tGNyXTkHzXE= -github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= -github.com/xo/terminfo v0.0.0-20210125001918-ca9a967f8778/go.mod h1:2MuV+tbUrU1zIOPMxZ5EncGwgmMJsa+9ucAQZXxsObs= -github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= -github.com/yeya24/promlinter v0.1.0 h1:goWULN0jH5Yajmu/K+v1xCqIREeB+48OiJ2uu2ssc7U= -github.com/yeya24/promlinter v0.1.0/go.mod h1:rs5vtZzeBHqqMwXqFScncpCF6u06lezhZepno9AB1Oc= -github.com/yudai/gojsondiff v1.0.0/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg= -github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM= -github.com/yudai/pp v2.0.1+incompatible/go.mod h1:PuxR/8QJ7cyCkFp/aUDS+JY727OFEZkTdatxwunjIkc= +github.com/tetafro/godot v1.5.0 h1:aNwfVI4I3+gdxjMgYPus9eHmoBeJIbnajOyqZYStzuw= +github.com/tetafro/godot v1.5.0/go.mod h1:2oVxTBSftRTh4+MVfUaUXR6bn2GDXCaMcOG4Dk3rfio= +github.com/timakin/bodyclose v0.0.0-20241222091800-1db5c5ca4d67 h1:9LPGD+jzxMlnk5r6+hJnar67cgpDIz/iyD+rfl5r2Vk= +github.com/timakin/bodyclose v0.0.0-20241222091800-1db5c5ca4d67/go.mod h1:mkjARE7Yr8qU23YcGMSALbIxTQ9r9QBVahQOBRfU460= +github.com/timonwong/loggercheck v0.10.1 h1:uVZYClxQFpw55eh+PIoqM7uAOHMrhVcDoWDery9R8Lg= +github.com/timonwong/loggercheck v0.10.1/go.mod h1:HEAWU8djynujaAVX7QI65Myb8qgfcZ1uKbdpg3ZzKl8= +github.com/tomarrell/wrapcheck/v2 v2.10.0 h1:SzRCryzy4IrAH7bVGG4cK40tNUhmVmMDuJujy4XwYDg= +github.com/tomarrell/wrapcheck/v2 v2.10.0/go.mod h1:g9vNIyhb5/9TQgumxQyOEqDHsmGYcGsVMOx/xGkqdMo= +github.com/tommy-muehle/go-mnd/v2 v2.5.1 h1:NowYhSdyE/1zwK9QCLeRb6USWdoif80Ie+v+yU8u1Zw= +github.com/tommy-muehle/go-mnd/v2 v2.5.1/go.mod h1:WsUAkMJMYww6l/ufffCD3m+P7LEvr8TnZn9lwVDlgzw= +github.com/ultraware/funlen v0.2.0 h1:gCHmCn+d2/1SemTdYMiKLAHFYxTYz7z9VIDRaTGyLkI= +github.com/ultraware/funlen v0.2.0/go.mod h1:ZE0q4TsJ8T1SQcjmkhN/w+MceuatI6pBFSxxyteHIJA= +github.com/ultraware/whitespace v0.2.0 h1:TYowo2m9Nfj1baEQBjuHzvMRbp19i+RCcRYrSWoFa+g= +github.com/ultraware/whitespace v0.2.0/go.mod h1:XcP1RLD81eV4BW8UhQlpaR+SDc2givTvyI8a586WjW8= +github.com/uudashr/gocognit v1.2.0 h1:3BU9aMr1xbhPlvJLSydKwdLN3tEUUrzPSSM8S4hDYRA= +github.com/uudashr/gocognit v1.2.0/go.mod h1:k/DdKPI6XBZO1q7HgoV2juESI2/Ofj9AcHPZhBBdrTU= +github.com/uudashr/iface v1.3.1 h1:bA51vmVx1UIhiIsQFSNq6GZ6VPTk3WNMZgRiCe9R29U= +github.com/uudashr/iface v1.3.1/go.mod h1:4QvspiRd3JLPAEXBQ9AiZpLbJlrWWgRChOKDJEuQTdg= +github.com/xen0n/gosmopolitan v1.3.0 h1:zAZI1zefvo7gcpbCOrPSHJZJYA9ZgLfJqtKzZ5pHqQM= +github.com/xen0n/gosmopolitan v1.3.0/go.mod h1:rckfr5T6o4lBtM1ga7mLGKZmLxswUoH1zxHgNXOsEt4= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM= +github.com/yagipy/maintidx v1.0.0 h1:h5NvIsCz+nRDapQ0exNv4aJ0yXSI0420omVANTv3GJM= +github.com/yagipy/maintidx v1.0.0/go.mod h1:0qNf/I/CCZXSMhsRsrEPDZ+DkekpKLXAJfsTACwgXLk= +github.com/yeya24/promlinter v0.3.0 h1:JVDbMp08lVCP7Y6NP3qHroGAO6z2yGKQtS5JsjqtoFs= +github.com/yeya24/promlinter v0.3.0/go.mod h1:cDfJQQYv9uYciW60QT0eeHlFodotkYZlL+YcPQN+mW4= +github.com/ykadowak/zerologlint v0.1.5 h1:Gy/fMz1dFQN9JZTPjv1hxEk+sRWm05row04Yoolgdiw= +github.com/ykadowak/zerologlint v0.1.5/go.mod h1:KaUskqF3e/v59oPmdq1U1DnKcuHokl2/K1U4pmIELKg= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= -go.etcd.io/bbolt v1.3.4/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= -go.etcd.io/etcd v0.0.0-20200513171258-e048e166ab9c/go.mod h1:xCI7ZzBfRuGgBXyXO6yfWfDmlWd35khcWpUa4L0xI/k= -go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= -go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= -go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= -go.mozilla.org/mozlog v0.0.0-20170222151521-4bb13139d403/go.mod h1:jHoPAGnDrCy6kaI2tAze5Prf0Nr0w/oNkROt2lw3n3o= +github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +gitlab.com/bosi/decorder v0.4.2 h1:qbQaV3zgwnBZ4zPMhGLW4KZe7A7NwxEhJx39R3shffo= +gitlab.com/bosi/decorder v0.4.2/go.mod h1:muuhHoaJkA9QLcYHq4Mj8FJUwDZ+EirSHRiaTcTf6T8= +go-simpler.org/assert v0.9.0 h1:PfpmcSvL7yAnWyChSjOz6Sp6m9j5lyK8Ok9pEL31YkQ= +go-simpler.org/assert v0.9.0/go.mod h1:74Eqh5eI6vCK6Y5l3PI8ZYFXG4Sa+tkr70OIPJAUr28= +go-simpler.org/musttag v0.13.0 h1:Q/YAW0AHvaoaIbsPj3bvEI5/QFP7w696IMUpnKXQfCE= +go-simpler.org/musttag v0.13.0/go.mod h1:FTzIGeK6OkKlUDVpj0iQUXZLUO1Js9+mvykDQy9C5yM= +go-simpler.org/sloglint v0.9.0 h1:/40NQtjRx9txvsB/RN022KsUJU+zaaSb/9q9BSefSrE= +go-simpler.org/sloglint v0.9.0/go.mod h1:G/OrAF6uxj48sHahCzrbarVMptL2kjWTaUeC8+fOGww= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= -go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= -go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= -go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= -go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= -go.uber.org/multierr v1.4.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= +go.uber.org/automaxprocs v1.6.0 h1:O3y2/QNTOdbF+e/dpXNNW7Rx2hZ4sTIPyybbxyNqTUs= +go.uber.org/automaxprocs v1.6.0/go.mod h1:ifeIMSnPZuznNm6jmdzmU3/bfk01Fe2fotchwEFJ8r8= +go.uber.org/goleak v1.1.11 h1:wy28qYRKZgnJTxGxvye5/wgWr1EKjmUDGYox5mGlRlI= +go.uber.org/goleak v1.1.11/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= +go.uber.org/multierr v1.6.0 h1:y6IPFStTAIT5Ytl7/XYmHvzXQ7S3g/IeZW9hyZ5thw4= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= -go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= -go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= -go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= -go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= -golang.org/x/crypto v0.0.0-20180501155221-613d6eafa307/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +go.uber.org/zap v1.24.0 h1:FiJd5l1UOLj0wCgbSE0rwwXHzEdAZS6hiiSnxJN/D60= +go.uber.org/zap v1.24.0/go.mod h1:2kMP+WWQ8aoFoedH3T2sq6iJ2yDWpHbP0f6MQbS9Gkg= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= -golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8= -golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -810,7 +644,12 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/exp v0.0.0-20200331195152-e8c3332aa8e5/go.mod h1:4M0jN8W1tt0AVLNr8HDosyJCDCDuyL9N9+3m7wDWgKw= +golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk= +golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY= +golang.org/x/exp/typeparams v0.0.0-20220428152302-39d4317da171/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= +golang.org/x/exp/typeparams v0.0.0-20230203172020-98cc5a0785f9/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= +golang.org/x/exp/typeparams v0.0.0-20250210185358-939b2ce775ac h1:TSSpLIG4v+p0rPv1pNOQtl1I8knsO4S9trOxNMOLVP4= +golang.org/x/exp/typeparams v0.0.0-20250210185358-939b2ce775ac/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -823,8 +662,6 @@ golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHl golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= @@ -833,18 +670,20 @@ golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzB golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.5.0 h1:UG21uOlmZabA4fW5i7ZX6bjw1xELEGg/ZLgZq9auk/Q= -golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= +golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= +golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= @@ -856,9 +695,6 @@ golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191002035440-2ec189313ef0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -866,49 +702,37 @@ golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200421231249-e086a090c8fd/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= -golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210510120150-4163338589ed/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d h1:20cMwl2fHAzkJMEA+8J4JgqBQcQGzbisXo31MIeenXI= -golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= +golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c= +golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190412183630-56d357773e84/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -916,16 +740,16 @@ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw= +golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -934,98 +758,79 @@ golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200420163511-1957bb5e6d1f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210816074244-15123e1e1f71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210915083310-ed5796bab164/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210917161153-d61c044b1678/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211013075003-97ac67df715c h1:taxlMj0D/1sOAuv/CbSD+MMDof2vbyPTqz5FNYKpXt8= -golang.org/x/sys v0.0.0-20211013075003-97ac67df715c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211105183446-c75c47738b0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= +golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= +golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190110163146-51295c7ec13a/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190307163923-6a08e3108db3/go.mod h1:25r3+/G6/xytQM8iWZKq3Hn0kr0rgFKPUNVEL/dr3z4= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190311215038-5c2858a9cfe5/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190321232350-e250d351ecad/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190322203728-c1a832b0ad89/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= @@ -1033,26 +838,16 @@ golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgw golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190910044552-dd2b5c81c578/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190916130336-e45ffcd953cc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191010075000-0337d82405ff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191216052735-49a3e744a425/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200117220505-0cba7a3a9ee9/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= @@ -1065,64 +860,37 @@ golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjs golang.org/x/tools v0.0.0-20200324003944-a576cf524670/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= golang.org/x/tools v0.0.0-20200329025819-fd4102a86c65/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200414032229-332987a829c3/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200422022333-3d57cf2e726e/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200426102838-f3a5411a4c3b/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200622203043-20e05c1c8ffa/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200624225443-88f3c62a19ff/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200625211823-6506e20df31f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200626171337-aa94e735be7f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200630154851-b2d8b0336632/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200706234117-b22de6825cf7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200724022722-7017fd6b1305/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200812195022-5ae4c3c160a0/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200820010801-b793a1359eac/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200831203904-5a2aa26beb65/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= -golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= -golang.org/x/tools v0.0.0-20201001104356-43ebab892c4c/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= -golang.org/x/tools v0.0.0-20201002184944-ecd9fd270d5d/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= golang.org/x/tools v0.0.0-20201023174141-c8cfbd0f21e6/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201028025901-8cd080b735b3/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201114224030-61ea331ec02b/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201118003311-bd56c0adb394/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201230224404-63754364767c/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210101214203-2dba1e4ea05c/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210104081019-d8d6ddbec6ee/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.1-0.20210205202024-ef80cdb6ec6d/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU= golang.org/x/tools v0.1.1-0.20210302220138-2ac05c832e1a/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU= golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.6/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= -golang.org/x/tools v0.1.7 h1:6j8CgantCy3yc8JGBqkDLMKWqZ0RDU2g1HVgacojGWQ= -golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= +golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= +golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg= +golang.org/x/tools v0.31.0 h1:0EedkvKDbh+qistFTd0Bcwe/YLh4vHwWEkiI0toFIBU= +golang.org/x/tools v0.31.0/go.mod h1:naFTU+Cev749tSJRXJlna0T3WxKvb1kWEx15xA4SdmQ= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.10.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= @@ -1135,29 +903,13 @@ google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0M google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= -google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= -google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= -google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= -google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= -google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= -google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8= -google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= -google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= -google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= -google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= -google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= -google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= -google.golang.org/appengine v1.6.2/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/genproto v0.0.0-20170818010345-ee236bd376b0/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20181107211654-5fc9ac540362/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= @@ -1165,7 +917,6 @@ google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRn google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= -google.golang.org/genproto v0.0.0-20190927181202-20e1ac93f88c/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= @@ -1179,70 +930,26 @@ google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfG google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= -google.golang.org/genproto v0.0.0-20200626011028-ee7919e894b5/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200707001353-8e8330bf89df/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= -google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= -google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= -google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= -google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= -google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= -google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= -google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= -google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/grpc v1.8.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.24.0/go.mod h1:XDChyiUovWa60DnaeDeZmSW86xtLtjtZbwvSiRnRtcA= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= -google.golang.org/grpc v1.29.0/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= -google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= -google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= -google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= -google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= -google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= -google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -1255,40 +962,27 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ= -google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM= +google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= -gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= -gopkg.in/cheggaaa/pb.v1 v1.0.28/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= -gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o= -gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/ini.v1 v1.63.2 h1:tGK/CyBg7SMzb60vP1M03vNZ3VDu3wGQJwn7Sxi9r3c= -gopkg.in/ini.v1 v1.63.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= -gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= -gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= +gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= +gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.6/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= @@ -1296,17 +990,12 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.2.1 h1:/EPr//+UMMXwMTkXvCCoaJDq8cpjMO80Ou+L4PDo2mY= -honnef.co/go/tools v0.2.1/go.mod h1:lPVVZ2BS5TfnjLyizF7o7hv7j9/L+8cZY2hLyjP9cGY= -mvdan.cc/gofumpt v0.1.1 h1:bi/1aS/5W00E2ny5q65w9SnKpWEF/UIOqDYBILpo9rA= -mvdan.cc/gofumpt v0.1.1/go.mod h1:yXG1r1WqZVKWbVRtBWKWX9+CxGYfA51nSomhM0woR48= -mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed h1:WX1yoOaKQfddO/mLzdV4wptyWgoH/6hwLs7QHTixo0I= -mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed/go.mod h1:Xkxe497xwlCKkIaQYRfC7CSLworTXY9RMqwhhCm+8Nc= -mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b h1:DxJ5nJdkhDlLok9K6qO+5290kphDJbHOQO1DFFFTeBo= -mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b/go.mod h1:2odslEg/xrtNQqCYg2/jCoyKnw3vv5biOc3JnIcYfL4= -mvdan.cc/unparam v0.0.0-20210104141923-aac4ce9116a7 h1:HT3e4Krq+IE44tiN36RvVEb6tvqeIdtsVSsxmNPqlFU= -mvdan.cc/unparam v0.0.0-20210104141923-aac4ce9116a7/go.mod h1:hBpJkZE8H/sb+VRFvw2+rBpHNsTBcvSpk61hr8mzXZE= +honnef.co/go/tools v0.6.1 h1:R094WgE8K4JirYjBaOpz/AvTyUu/3wbmAoskKN/pxTI= +honnef.co/go/tools v0.6.1/go.mod h1:3puzxxljPCe8RGJX7BIy1plGbxEOZni5mR2aXe3/uk4= +mvdan.cc/gofumpt v0.7.0 h1:bg91ttqXmi9y2xawvkuMXyvAA/1ZGJqYAEGjXuP0JXU= +mvdan.cc/gofumpt v0.7.0/go.mod h1:txVFJy/Sc/mvaycET54pV8SW8gWxTlUuGHVEcncmNUo= +mvdan.cc/unparam v0.0.0-20250301125049-0df0534333a4 h1:WjUu4yQoT5BHT1w8Zu56SP8367OuBV5jvo+4Ulppyf8= +mvdan.cc/unparam v0.0.0-20250301125049-0df0534333a4/go.mod h1:rthT7OuvRbaGcd5ginj6dA2oLE7YNlta9qhBNNdCaLE= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= -sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= diff --git a/tools/test-fork.sh b/tools/test-fork.sh new file mode 100755 index 0000000000..50389e6be4 --- /dev/null +++ b/tools/test-fork.sh @@ -0,0 +1,77 @@ +#!/usr/bin/env bash + +readonly wfdir=".github/workflows" + +# $1 - registry repo name + +echo "Parse registry: $1" +firstPart=$(echo "$1" | cut -d/ -f1) +secondPart=$(echo "$1" | cut -d/ -f2) +thirdPart=$(echo "$1" | cut -d/ -f3) + +registry="" +username="" +reponame="" +if [[ -z $thirdPart ]]; then # assume Docker Hub + registry="index.docker.io" + username=$firstPart + reponame=$secondPart +else + registry=$firstPart + username=$secondPart + reponame=$thirdPart +fi + +echo "Using registry $registry and username $username" +if [[ $reponame != "pack" ]]; then + echo "Repo name must be 'pack'" + exit 1 +fi + +echo "Disabling workflows that should not run on the forked repository" +disable=( + delivery-archlinux-git.yml + delivery-archlinux.yml + delivery-chocolatey.yml + delivery-homebrew.yml + delivery-release-dispatch.yml + delivery-ubuntu.yml + privileged-pr-process.yml +) +for d in "${disable[@]}"; do + if [ -e "$wfdir/$d" ]; then + mv "$wfdir/$d" "$wfdir/$d.disabled" + fi +done + +echo "Removing upstream maintainers from the benchmark alert CC" +sed -i '' "/alert-comment-cc-users:/d" $wfdir/benchmark.yml + +echo "Removing the architectures that require self-hosted runner from the build strategies." +sed -i '' "/config: \[.*\]/ s/windows-lcow, //g" $wfdir/build.yml +sed -i '' "/- config: windows-lcow/,+4d" $wfdir/build.yml + +echo "Replacing the registry account with owned one (assumes DOCKER_PASSWORD and DOCKER_USERNAME have been added to GitHub secrets, if not using ghcr.io)" +sed -i '' "s/buildpacksio\/pack/$registry\/$username\/$reponame/g" $wfdir/check-latest-release.yml +sed -i '' "/REGISTRY_NAME: 'index.docker.io'/ s/index.docker.io/$registry/g" $wfdir/delivery-docker.yml +sed -i '' "/USER_NAME: 'buildpacksio'/ s/buildpacksio/$username/g" $wfdir/delivery-docker.yml + +if [[ $registry != "index.docker.io" ]]; then + echo "Updating login action to specify the registry" + sed -i '' "s/username: \${{ secrets.DOCKER_USERNAME }}/registry: $registry\n username: $username/g" $wfdir/delivery-docker.yml +fi + +if [[ $registry == *"ghcr.io"* ]]; then + echo "Updating login action to use GitHub token for ghcr.io" + sed -i '' "s/secrets.DOCKER_PASSWORD/secrets.GITHUB_TOKEN/g" $wfdir/delivery-docker.yml + + echo "Adding workflow permissions to push images to ghcr.io" + LF=$'\n' + sed -i '' "/runs-on: ubuntu-latest/ a\\ + permissions:\\ + contents: read\\ + packages: write\\ + attestations: write\\ + id-token: write${LF}" $wfdir/delivery-docker.yml + LF="" +fi diff --git a/tools/tools.go b/tools/tools.go index 0add5c56f9..19c71eab77 100644 --- a/tools/tools.go +++ b/tools/tools.go @@ -1,10 +1,9 @@ //go:build tools -// +build tools package tools import ( _ "github.com/golang/mock/mockgen" - _ "github.com/golangci/golangci-lint/cmd/golangci-lint" + _ "github.com/golangci/golangci-lint/v2/cmd/golangci-lint" _ "golang.org/x/tools/cmd/goimports" ) diff --git a/version.go b/version.go deleted file mode 100644 index 6d858250f9..0000000000 --- a/version.go +++ /dev/null @@ -1,6 +0,0 @@ -package pack - -var ( - // Version is the version of `pack`. It is injected at compile time. - Version = "0.0.0" -)