From ca96939bcd19584b50db2a1d26694e4f96d0ea7e Mon Sep 17 00:00:00 2001 From: Joaquin Santana Date: Tue, 19 Nov 2024 12:42:01 +0100 Subject: [PATCH] chore: build dist (#1) --- dist/index.js | 56329 +++++++++++++++++++++++++++++++++++------------- 1 file changed, 41183 insertions(+), 15146 deletions(-) diff --git a/dist/index.js b/dist/index.js index 29994b0..f2f3fad 100644 --- a/dist/index.js +++ b/dist/index.js @@ -7783,6 +7783,15252 @@ exports.parse = __nccwpck_require__(33848) exports.stringify = __nccwpck_require__(66303) +/***/ }), + +/***/ 74482: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getDiffString = exports.getChanges = exports.parseChanges = exports.getAllDiffs = exports.getGitFileData = exports.findRepoRoot = exports.resolvePath = void 0; +const child_process_1 = __nccwpck_require__(32081); +const types_1 = __nccwpck_require__(80721); +const logger_1 = __nccwpck_require__(44869); +const fs_1 = __nccwpck_require__(57147); +const path = __nccwpck_require__(71017); +class InstallationError extends Error { + constructor(message) { + super(message); + this.name = 'InstallationError'; + } +} +/** + * Get the absolute path of a relative path + * @param {string} dir the wildcard directory containing git change, not necessarily the root git directory + * @returns {string} the absolute path relative to the path that the user executed the bash command in + */ +function resolvePath(dir) { + const absoluteDir = path.resolve(process.cwd(), dir); + return absoluteDir; +} +exports.resolvePath = resolvePath; +/** + * Get the git root directory. + * Errors if the directory provided is not a git directory. + * @param {string} dir an absolute directory + * @returns {string} the absolute path of the git directory root + */ +function findRepoRoot(dir) { + try { + return (0, child_process_1.execSync)('git rev-parse --show-toplevel', { cwd: dir }) + .toString() + .trimRight(); // remove the trailing \n + } + catch (err) { + logger_1.logger.error(`The directory provided is not a git directory: ${dir}`); + throw err; + } +} +exports.findRepoRoot = findRepoRoot; +/** + * Returns the git diff old/new mode, status, and path. Given a git diff. + * Errors if there is a parsing error + * @param {string} gitDiffPattern A single file diff. Renames and copies are broken up into separate diffs. See https://git-scm.com/docs/git-diff#Documentation/git-diff.txt-git-diff-filesltpatterngt82308203 for more details + * @returns indexable git diff fields: old/new mode, status, and path + */ +function parseGitDiff(gitDiffPattern) { + try { + const fields = gitDiffPattern.split(' '); + const newMode = fields[1]; + const oldMode = fields[0].substring(1); + const statusAndPath = fields[4].split('\t'); + const status = statusAndPath[0]; + const relativePath = statusAndPath[1]; + return { oldMode, newMode, status, relativePath }; + } + catch (err) { + logger_1.logger.warn(`\`git diff --raw\` may have changed formats: \n ${gitDiffPattern}`); + throw err; + } +} +/** + * Get the GitHub mode, file content, and relative path asynchronously + * Rejects if there is a git diff error, or if the file contents could not be loaded. + * @param {string} gitRootDir the root of the local GitHub repository + * @param {string} gitDiffPattern A single file diff. Renames and copies are broken up into separate diffs. See https://git-scm.com/docs/git-diff#Documentation/git-diff.txt-git-diff-filesltpatterngt82308203 for more details + * @returns {Promise} the current mode, the relative path of the file in the Git Repository, and the file status. + */ +function getGitFileData(gitRootDir, gitDiffPattern) { + return new Promise((resolve, reject) => { + try { + const { oldMode, newMode, status, relativePath } = parseGitDiff(gitDiffPattern); + // if file is deleted, do not attempt to read it + if (status === 'D') { + resolve({ path: relativePath, fileData: new types_1.FileData(null, oldMode) }); + } + else { + // else read the file + (0, fs_1.readFile)(gitRootDir + '/' + relativePath, { + encoding: 'utf-8', + }, (err, content) => { + if (err) { + logger_1.logger.error(`Error loading file ${relativePath} in git directory ${gitRootDir}`); + reject(err); + } + resolve({ + path: relativePath, + fileData: new types_1.FileData(content, newMode), + }); + }); + } + } + catch (err) { + reject(err); + } + }); +} +exports.getGitFileData = getGitFileData; +/** + * Get all the diffs using `git diff` of a git directory. + * Errors if the git directory provided is not a git directory. + * @param {string} gitRootDir a git directory + * @returns {string[]} a list of git diffs + */ +function getAllDiffs(gitRootDir) { + (0, child_process_1.execSync)('git add -A', { cwd: gitRootDir }); + const diffs = (0, child_process_1.execSync)('git diff --raw --staged --no-renames', { + cwd: gitRootDir, + }) + .toString() // strictly return buffer for mocking purposes. sinon ts doesn't infer {encoding: 'utf-8'} + .trimRight() // remove the trailing new line + .split('\n') + .filter(line => !!line.trim()); + (0, child_process_1.execSync)('git reset .', { cwd: gitRootDir }); + return diffs; +} +exports.getAllDiffs = getAllDiffs; +/** + * Get the git changes of the current project asynchronously. + * Rejects if any of the files fails to load (if not deleted), + * or if there is a git diff parse error + * @param {string[]} diffs the git diff raw output (which only shows relative paths) + * @param {string} gitDir the root of the local GitHub repository + * @returns {Promise} the changeset + */ +async function parseChanges(diffs, gitDir) { + try { + // get updated file contents + const changes = new Map(); + const changePromises = []; + for (let i = 0; i < diffs.length; i++) { + // TODO - handle memory constraint + changePromises.push(getGitFileData(gitDir, diffs[i])); + } + const gitFileDatas = await Promise.all(changePromises); + for (let i = 0; i < gitFileDatas.length; i++) { + changes.set(gitFileDatas[i].path, gitFileDatas[i].fileData); + } + return changes; + } + catch (err) { + logger_1.logger.error('Error parsing git changes'); + throw err; + } +} +exports.parseChanges = parseChanges; +/** + * Throws an error if git is not installed + * @returns {void} void if git is installed + */ +function validateGitInstalled() { + try { + (0, child_process_1.execSync)('git --version'); + } + catch (err) { + logger_1.logger.error('git not installed'); + throw new InstallationError('git command is not recognized. Make sure git is installed.'); + } +} +/** + * Load the change set asynchronously. + * @param dir the directory containing git changes + * @returns {Promise} the change set + */ +function getChanges(dir) { + try { + validateGitInstalled(); + const absoluteDir = resolvePath(dir); + const gitRootDir = findRepoRoot(absoluteDir); + const diffs = getAllDiffs(gitRootDir); + return parseChanges(diffs, gitRootDir); + } + catch (err) { + if (!(err instanceof InstallationError)) { + logger_1.logger.error('Error loadng git changes.'); + } + throw err; + } +} +exports.getChanges = getChanges; +/** + * Get the git changes of the current project asynchronously. + * Rejects if any of the files fails to load (if not deleted), + * or if there is a git diff parse error + * @param {string[]} diffs the git diff raw output (which only shows relative paths) + * @param {string} gitDir the root of the local GitHub repository + * @returns {string} the diff + */ +function getDiffString(dir) { + try { + validateGitInstalled(); + const absoluteDir = resolvePath(dir); + const gitRootDir = findRepoRoot(absoluteDir); + (0, child_process_1.execSync)('git add -A', { cwd: gitRootDir }); + const diff = (0, child_process_1.execSync)('git diff --staged --no-renames', { + cwd: gitRootDir, + }) + .toString() // strictly return buffer for mocking purposes. sinon ts doesn't infer {encoding: 'utf-8'} + .trimRight(); // remove the trailing new line + (0, child_process_1.execSync)('git reset .', { cwd: gitRootDir }); + return diff; + } + catch (err) { + if (!(err instanceof InstallationError)) { + logger_1.logger.error('Error loadng git changes.'); + } + throw err; + } +} +exports.getDiffString = getDiffString; +//# sourceMappingURL=handle-git-dir-change.js.map + +/***/ }), + +/***/ 13265: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.addReviewCommentsDefaults = exports.addPullRequestDefaults = void 0; +const DEFAULT_BRANCH_NAME = 'code-suggestions'; +const DEFAULT_PRIMARY_BRANCH = 'main'; +const DEFAULT_PAGE_SIZE = 100; +/** + * Add defaults to GitHub Pull Request options. + * Preserves the empty string. + * For ESCMAScript, null/undefined values are preserved for required fields. + * Recommended with an object validation function to check empty strings and incorrect types. + * @param {PullRequestUserOptions} options the user-provided github pull request options + * @returns {CreatePullRequest} git hub context with defaults applied + */ +function addPullRequestDefaults(options) { + const pullRequestSettings = { + upstreamOwner: options.upstreamOwner, + upstreamRepo: options.upstreamRepo, + description: options.description, + title: options.title, + message: options.message, + force: options.force || false, + branch: typeof options.branch === 'string' ? options.branch : DEFAULT_BRANCH_NAME, + primary: typeof options.primary === 'string' + ? options.primary + : DEFAULT_PRIMARY_BRANCH, + maintainersCanModify: options.maintainersCanModify === false ? false : true, + filesPerCommit: options.filesPerCommit, + }; + return pullRequestSettings; +} +exports.addPullRequestDefaults = addPullRequestDefaults; +/** + * Format user input for pull request review comments + * @param options The user's options input for review comments + * @returns the formatted version of user input for pull request review comments + */ +function addReviewCommentsDefaults(options) { + const createReviewComment = { + repo: options.repo, + owner: options.owner, + pullNumber: options.pullNumber, + // if zero set as 0 + pageSize: options.pageSize === null || options.pageSize === undefined + ? DEFAULT_PAGE_SIZE + : options.pageSize, + }; + return createReviewComment; +} +exports.addReviewCommentsDefaults = addReviewCommentsDefaults; +//# sourceMappingURL=default-options-handler.js.map + +/***/ }), + +/***/ 98535: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CommitError = void 0; +class CommitError extends Error { + constructor(message, cause) { + super(message); + this.cause = cause; + } +} +exports.CommitError = CommitError; +//# sourceMappingURL=errors.js.map + +/***/ }), + +/***/ 42602: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * // Copyright 2020 Google LLC + * // + * // Licensed under the Apache License, Version 2.0 (the "License"); + * // you may not use this file except in compliance with the License. + * // You may obtain a copy of the License at + * // + * // https://www.apache.org/licenses/LICENSE-2.0 + * // + * // Unless required by applicable law or agreed to in writing, software + * // distributed under the License is distributed on an "AS IS" BASIS, + * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * // See the License for the specific language governing permissions and + * // limitations under the License. + * // + * //Modifications made by Joaquin Santana on 18/11/24, 22:09 + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.branch = exports.createBranch = exports.existsBranchWithName = exports.getBranchHead = exports.createRef = void 0; +const logger_1 = __nccwpck_require__(44869); +const REF_PREFIX = 'refs/heads/'; +const DEFAULT_PRIMARY_BRANCH = 'main'; +/** + * Create a new branch reference with the ref prefix + * @param {string} branchName name of the branch + */ +function createRef(branchName) { + return REF_PREFIX + branchName; +} +exports.createRef = createRef; +/** + * get branch commit HEAD SHA of a repository + * Throws an error if the branch cannot be found + * @param {Octokit} octokit The authenticated octokit instance + * @param {RepoDomain} origin The domain information of the remote origin repository + * @param {string} branch the name of the branch + * @returns {Promise} branch commit HEAD SHA + */ +async function getBranchHead(octokit, origin, branch) { + const branchData = (await octokit.repos.getBranch({ + owner: origin.owner, + repo: origin.repo, + branch, + })).data; + // @ts-ignore gitea adaption + logger_1.logger.info(`Successfully found branch HEAD sha "${branchData.commit.id}".`); + // @ts-ignore gitea adaption + return branchData.commit.id; +} +exports.getBranchHead = getBranchHead; +/** + * Determine if there is a branch with the provided name in the remote GitHub repository + * @param {Octokit} octokit The authenticated octokit instance + * @param {RepoDomain} remote The domain information of the remote repository + * @param {string} name The branch name to create on the repository + * @returns {Promise} if there is a branch already existing in the remote GitHub repository + */ +async function existsBranchWithName(octokit, remote, name) { + try { + const data = (await octokit.request('GET /repos/{owner}/{repo}/branches/{branch}', { + owner: remote.owner, + repo: remote.repo, + branch: name, + })).data; + // @ts-ignore + return !!data.commit.id; + } + catch (err) { + if (err.status === 404) + return false; + else + throw err; + } +} +exports.existsBranchWithName = existsBranchWithName; +/** + * Create a branch on the remote repository if there is not an existing branch + * @param {Octokit} octokit The authenticated octokit instance + * @param {RepoDomain} remote The domain information of the remote origin repository + * @param {string} name The branch name to create on the origin repository + * @param {string} baseSha the sha that the base of the reference points to + * @param {boolean} duplicate whether there is an existing branch or not + * @returns {Promise} + */ +async function createBranch(octokit, remote, name, baseSha, duplicate) { + if (!duplicate) { + const refData = (await octokit.request('POST /repos/{owner}/{repo}/branches', { + owner: remote.owner, + repo: remote.repo, + new_branch_name: name, + old_ref_name: baseSha, + })).data; + logger_1.logger.info(`Successfully created branch at ${refData.commit.url}`); + } + else { + logger_1.logger.info('Skipping branch creation step...'); + } +} +exports.createBranch = createBranch; +/** + * Create a GitHub branch given a remote origin. + * Throws an exception if octokit fails, or if the base branch is invalid + * @param {Octokit} octokit The authenticated octokit instance + * @param {RepoDomain} origin The domain information of the remote origin repository + * @param {RepoDomain} upstream The domain information of the remote upstream repository + * @param {string} name The branch name to create on the origin repository + * @param {string} baseBranch the name of the branch to base the new branch off of. Default is main + * @returns {Promise} the base SHA for subsequent commits to be based off for the origin branch + */ +async function branch(octokit, origin, upstream, name, baseBranch = DEFAULT_PRIMARY_BRANCH) { + // create branch from primary branch HEAD SHA + try { + const baseSha = await getBranchHead(octokit, upstream, baseBranch); + const duplicate = await existsBranchWithName(octokit, origin, name); + await createBranch(octokit, origin, name, baseSha, duplicate); + return baseSha; + } + catch (err) { + logger_1.logger.error('Error when creating branch'); + throw err; + } +} +exports.branch = branch; +//# sourceMappingURL=branch.js.map + +/***/ }), + +/***/ 12796: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * // Copyright 2020 Google LLC + * // + * // Licensed under the Apache License, Version 2.0 (the "License"); + * // you may not use this file except in compliance with the License. + * // You may obtain a copy of the License at + * // + * // https://www.apache.org/licenses/LICENSE-2.0 + * // + * // Unless required by applicable law or agreed to in writing, software + * // distributed under the License is distributed on an "AS IS" BASIS, + * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * // See the License for the specific language governing permissions and + * // limitations under the License. + * // + * //Modifications made by Joaquin Santana on 18/11/24, 22:09 + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.commitAndPush = exports.updateRef = exports.createTree = exports.generateTreeObjects = void 0; +const logger_1 = __nccwpck_require__(44869); +const create_commit_1 = __nccwpck_require__(18746); +const errors_1 = __nccwpck_require__(98535); +const git = __nccwpck_require__(85114); +const DEFAULT_FILES_PER_COMMIT = 100; +/** + * Generate and return a GitHub tree object structure + * containing the target change data + * See https://developer.github.com/v3/git/trees/#tree-object + * @param {Changes} changes the set of repository changes + * @returns {TreeObject[]} The new GitHub changes + */ +function generateTreeObjects(changes) { + const tree = []; + changes.forEach((fileData, path) => { + if (fileData.content === null) { + // if no file content then file is deleted + tree.push({ + path, + mode: fileData.mode, + type: 'blob', + sha: null, + }); + } + else { + // update file with its content + tree.push({ + path, + mode: fileData.mode, + type: 'blob', + content: fileData.content, + }); + } + }); + return tree; +} +exports.generateTreeObjects = generateTreeObjects; +function* inGroupsOf(all, groupSize) { + for (let i = 0; i < all.length; i += groupSize) { + yield all.slice(i, i + groupSize); + } +} +/** + * Upload and create a remote GitHub tree + * and resolves with the new tree SHA. + * Rejects if GitHub V3 API fails with the GitHub error response + * @param {Octokit} octokit The authenticated octokit instance + * @param {RepoDomain} origin the the remote repository to push changes to + * @param {string} refHead the base of the new commit(s) + * @param {TreeObject[]} tree the set of GitHub changes to upload + * @param gitConfig + * @returns {Promise} the GitHub tree SHA + * @throws {CommitError} + */ +async function createTree(octokit, origin, refHead, tree, gitConfig) { + const oldTreeSha = (await octokit.git.getCommit({ + owner: origin.owner, + repo: origin.repo, + commit_sha: refHead, + })) // @ts-ignore + .data.commit.tree.sha; + logger_1.logger.info('Got the latest commit tree'); + try { + const oldTree = await git.readTree({ ...gitConfig, oid: oldTreeSha }); + const transformTree = await Promise.all(tree.map(async (value) => { + if (value.content) { + value.sha = await git.writeBlob({ + ...gitConfig, + blob: Buffer.from(value.content), + }); + } + const treeEntry = { + mode: value.mode, + path: value.path, + oid: value.sha || '', + type: value.type, + }; + return treeEntry; + })); + //add all the old tree entries to the new tree if the path is not already in the new tree + oldTree.tree.forEach(value => { + if (!transformTree.find(treeEntry => treeEntry.path === value.path)) { + transformTree.push(value); + } + }); + const treeSha = await git.writeTree({ ...gitConfig, tree: transformTree }); + logger_1.logger.info(`Successfully created a tree with the desired changes with SHA ${treeSha}`); + return treeSha; + } + catch (e) { + throw new errors_1.CommitError(`Error adding to tree: ${refHead}`, e); + } +} +exports.createTree = createTree; +/** + * Update a reference to a SHA + * Rejects if GitHub V3 API fails with the GitHub error response + * @param {BranchDomain} origin the the remote branch to push changes to + * @param {string} newSha the ref to update the commit HEAD to + * @param {boolean} force to force the commit changes given refHead + * @param gitConfig + * @returns {Promise} + */ +async function updateRef(origin, newSha, force, gitConfig) { + logger_1.logger.info(`Updating reference heads/${origin.branch} to ${newSha}`); + try { + await git.writeRef({ + ...gitConfig, + ref: `refs/heads/${origin.branch}`, + value: newSha, + force, + }); + logger_1.logger.info(`Successfully updated reference ${origin.branch} to ${newSha}`); + } + catch (e) { + throw new errors_1.CommitError(`Error updating ref heads/${origin.branch} to ${newSha}`, e); + } +} +exports.updateRef = updateRef; +/** + * Given a set of changes, apply the commit(s) on top of the given branch's head and upload it to GitHub + * Rejects if GitHub V3 API fails with the GitHub error response + * @param {Octokit} octokit The authenticated octokit instance + * @param {string} refHead the base of the new commit(s) + * @param {Changes} changes the set of repository changes + * @param originBranch + * @param {string} commitMessage the message of the new commit + * @param {boolean} force to force the commit changes given refHead + * @param options + * @returns {Promise} + * @throws {CommitError} + */ +async function commitAndPush(octokit, refHead, changes, originBranch, commitMessage, force, options) { + var _a; + const filesPerCommit = (_a = options === null || options === void 0 ? void 0 : options.filesPerCommit) !== null && _a !== void 0 ? _a : DEFAULT_FILES_PER_COMMIT; + const tree = generateTreeObjects(changes); + for (const treeGroup of inGroupsOf(tree, filesPerCommit)) { + const treeSha = await createTree(octokit, originBranch, refHead, treeGroup, options === null || options === void 0 ? void 0 : options.gitConfig); + refHead = await (0, create_commit_1.createCommit)(refHead, treeSha, commitMessage, options); + } + await updateRef(originBranch, refHead, force, options === null || options === void 0 ? void 0 : options.gitConfig); + await git.push({ ...options === null || options === void 0 ? void 0 : options.gitConfig, force: force }); + logger_1.logger.info('Pushed to remote repository successfully'); +} +exports.commitAndPush = commitAndPush; +//# sourceMappingURL=commit-and-push.js.map + +/***/ }), + +/***/ 18746: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * // Copyright 2020 Google LLC + * // + * // Licensed under the Apache License, Version 2.0 (the "License"); + * // you may not use this file except in compliance with the License. + * // You may obtain a copy of the License at + * // + * // https://www.apache.org/licenses/LICENSE-2.0 + * // + * // Unless required by applicable law or agreed to in writing, software + * // distributed under the License is distributed on an "AS IS" BASIS, + * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * // See the License for the specific language governing permissions and + * // limitations under the License. + * // + * //Modifications made by Joaquin Santana on 18/11/24, 22:09 + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createCommit = void 0; +const logger_1 = __nccwpck_require__(44869); +const errors_1 = __nccwpck_require__(98535); +const git = __nccwpck_require__(85114); +/** + * Create a commit with a repo snapshot SHA on top of the reference HEAD + * and resolves with the SHA of the commit. + * Rejects if GitHub V3 API fails with the GitHub error response + * @param {Octokit} octokit The authenticated octokit instance + * @param {RepoDomain} origin the the remote repository to push changes to + * @param {string} refHead the base of the new commit(s) + * @param {string} treeSha the tree SHA that this commit will point to + * @param {string} message the message of the new commit + * @param options + * @returns {Promise} the new commit SHA + * @see https://docs.github.com/en/rest/git/commits?apiVersion=2022-11-28#create-a-commit + */ +async function createCommit(refHead, treeSha, message, options = {}) { + try { + const signature = options.signer + ? await options.signer.generateSignature({ + message, + tree: treeSha, + parents: [refHead], + author: options.author, + committer: options.committer, + }) + : undefined; + await git.fetch({ + ...options.gitConfig, + }); + logger_1.logger.info('fetched the latest changes from the remote repository'); + const sha = await git.commit({ + ...options.gitConfig, + message, + tree: treeSha, + parent: [refHead], + signingKey: signature, + author: options.author, + committer: options.committer, + }); + logger_1.logger.info(`Successfully created commit. See commit at ${sha}`); + return sha; + } + catch (e) { + throw new errors_1.CommitError(`Error creating commit for: ${treeSha}`, e); + } +} +exports.createCommit = createCommit; +//# sourceMappingURL=create-commit.js.map + +/***/ }), + +/***/ 34248: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fork = void 0; +const logger_1 = __nccwpck_require__(44869); +/** + * Fork the GitHub owner's repository. + * Returns the fork owner and fork repo when the fork creation request to GitHub succeeds. + * Otherwise throws error. + * + * If fork already exists no new fork is created, no error occurs, and the existing Fork data is returned + * with the `updated_at` + any historical repo changes. + * @param {Octokit} octokit The authenticated octokit instance + * @param {RepoDomain} upstream upstream repository information + * @returns {Promise} the forked repository name, as well as the owner of that fork + */ +async function fork(octokit, upstream) { + try { + const forkedRepo = (await octokit.repos.createFork({ + owner: upstream.owner, + repo: upstream.repo, + })).data; + const origin = { + repo: forkedRepo.name, + owner: forkedRepo.owner.login, + }; + logger_1.logger.info(`Create fork request was successful for ${origin.owner}/${origin.repo}`); + return origin; + } + catch (err) { + logger_1.logger.error('Error when forking'); + throw err; + } +} +exports.fork = fork; +//# sourceMappingURL=fork.js.map + +/***/ }), + +/***/ 26871: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.addLabels = void 0; +const logger_1 = __nccwpck_require__(44869); +/** + * Create a GitHub PR on the upstream organization's repo + * Throws an error if the GitHub API fails + * @param {Octokit} octokit The authenticated octokit instance + * @param {RepoDomain} upstream The upstream repository + * @param {BranchDomain} origin The remote origin information that contains the origin branch + * @param {number} issue_number The issue number to add labels to. Can also be a PR number + * @param {string[]} labels The list of labels to apply to the issue/pull request. Default is []. the funciton will no-op. + * @returns {Promise} The list of resulting labels after the addition of the given labels + */ +async function addLabels(octokit, upstream, origin, issue_number, labels) { + if (!labels || labels.length === 0) { + return []; + } + const labelsResponseData = (await octokit.issues.addLabels({ + owner: upstream.owner, + repo: origin.repo, + issue_number: issue_number, + labels: labels, + })).data; + logger_1.logger.info(`Successfully added labels ${labels} to issue: ${issue_number}`); + return labelsResponseData.map(l => l.name); +} +exports.addLabels = addLabels; +//# sourceMappingURL=labels.js.map + +/***/ }), + +/***/ 24703: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * // Copyright 2020 Google LLC + * // + * // Licensed under the Apache License, Version 2.0 (the "License"); + * // you may not use this file except in compliance with the License. + * // You may obtain a copy of the License at + * // + * // https://www.apache.org/licenses/LICENSE-2.0 + * // + * // Unless required by applicable law or agreed to in writing, software + * // distributed under the License is distributed on an "AS IS" BASIS, + * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * // See the License for the specific language governing permissions and + * // limitations under the License. + * // + * //Modifications made by Joaquin Santana on 18/11/24, 22:09 + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.openPullRequest = void 0; +const logger_1 = __nccwpck_require__(44869); +const DEFAULT_PRIMARY = 'main'; +/** + * Create a GitHub PR on the upstream organization's repo + * Throws an error if the GitHub API fails + * @param {Octokit} octokit The authenticated octokit instance + * @param {RepoDomain} upstream The upstream repository + * @param {BranchDomain} origin The remote origin information that contains the origin branch + * @param {Description} description The pull request title and detailed description + * @param {boolean} maintainersCanModify Whether or not maintainers can modify the pull request. Default is true + * @param {string} upstreamPrimary The upstream repository's primary branch. Default is main. + * @param draft Open a DRAFT pull request. Defaults to false. + * @returns {Promise} + */ +async function openPullRequest(octokit, upstream, origin, description, maintainersCanModify = true, upstreamPrimary = DEFAULT_PRIMARY, draft = false) { + const head = `${origin.owner}:${origin.branch}`; + const existingPullRequest = (await octokit.pulls.list({ + owner: upstream.owner, + repo: origin.repo, + head, + state: 'open', + })).data.find(pr => `${pr.head.repo.owner.login}:${pr.head.label}` === head); + if (existingPullRequest) { + logger_1.logger.info(`Found existing pull request for reference ${origin.owner}:${origin.branch}. Skipping creating a new pull request.`); + return existingPullRequest.number; + } + const pullResponseData = (await octokit.pulls.create({ + owner: upstream.owner, + repo: origin.repo, + title: description.title, + head: `${origin.owner}:${origin.branch}`, + base: upstreamPrimary, + body: description.body, + maintainer_can_modify: maintainersCanModify, + draft: draft, + })).data; + logger_1.logger.info(`Successfully opened pull request available at url: ${pullResponseData.url}.`); + return pullResponseData.number; +} +exports.openPullRequest = openPullRequest; +//# sourceMappingURL=open-pull-request.js.map + +/***/ }), + +/***/ 62476: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getPullRequestHunks = exports.getCurrentPullRequestPatches = exports.createPullRequestReview = exports.makeInlineSuggestions = exports.buildReviewComments = exports.buildSummaryComment = void 0; +const logger_1 = __nccwpck_require__(44869); +const diff_utils_1 = __nccwpck_require__(85556); +const hunk_utils_1 = __nccwpck_require__(61689); +function hunkErrorMessage(hunk) { + return ` * lines ${hunk.oldStart}-${hunk.oldEnd}`; +} +function fileErrorMessage(filename, hunks) { + return `* ${filename}\n` + hunks.map(hunkErrorMessage).join('\n'); +} +/** + * Build an error message based on invalid hunks. + * Returns an empty string if the provided hunks are empty. + * @param invalidHunks a map of filename to hunks that are not suggestable + */ +function buildSummaryComment(invalidHunks) { + if (invalidHunks.size === 0) { + return ''; + } + return ('Some suggestions could not be made:\n' + + Array.from(invalidHunks, ([filename, hunks]) => fileErrorMessage(filename, hunks)).join('\n')); +} +exports.buildSummaryComment = buildSummaryComment; +const COMFORT_PREVIEW_HEADER = 'application/vnd.github.comfort-fade-preview+json'; +/** + * Convert the patch suggestions into GitHub parameter objects. + * Use this to generate review comments + * For information see: + * https://developer.github.com/v3/pulls/comments/#create-a-review-comment-for-a-pull-request + * @param suggestions + */ +function buildReviewComments(suggestions) { + const fileComments = []; + suggestions.forEach((hunks, fileName) => { + hunks.forEach(hunk => { + const newContent = hunk.newContent.join('\n'); + if (hunk.oldStart === hunk.oldEnd) { + const singleComment = { + path: fileName, + body: `\`\`\`suggestion\n${newContent}\n\`\`\``, + line: hunk.oldEnd, + side: 'RIGHT', + }; + fileComments.push(singleComment); + } + else { + const comment = { + path: fileName, + body: `\`\`\`suggestion\n${newContent}\n\`\`\``, + start_line: hunk.oldStart, + line: hunk.oldEnd, + side: 'RIGHT', + start_side: 'RIGHT', + }; + fileComments.push(comment); + } + }); + }); + return fileComments; +} +exports.buildReviewComments = buildReviewComments; +/** + * Make a request to GitHub to make review comments + * @param octokit an authenticated octokit instance + * @param suggestions code suggestions patches + * @param remote the repository domain + * @param pullNumber the pull request number to make a review on + */ +async function makeInlineSuggestions(octokit, suggestions, outOfScopeSuggestions, remote, pullNumber) { + const comments = buildReviewComments(suggestions); + if (!comments.length) { + logger_1.logger.info('No valid suggestions to make'); + } + if (!comments.length && !outOfScopeSuggestions.size) { + logger_1.logger.info('No suggestions were generated. Exiting...'); + return null; + } + const summaryComment = buildSummaryComment(outOfScopeSuggestions); + if (summaryComment) { + logger_1.logger.warn('Some suggestions could not be made'); + } + // apply the suggestions to the latest sha + // the latest Pull Request hunk range includes + // all previous commit valid hunk ranges + const headSha = (await octokit.pulls.get({ + owner: remote.owner, + repo: remote.repo, + pull_number: pullNumber, + })).data.head.sha; + const reviewNumber = (await octokit.pulls.createReview({ + owner: remote.owner, + repo: remote.repo, + pull_number: pullNumber, + commit_id: headSha, + event: 'COMMENT', + body: summaryComment, + headers: { accept: COMFORT_PREVIEW_HEADER }, + // Octokit type definitions doesn't support mulitiline comments, but the GitHub API does + comments: comments, + })).data.id; + logger_1.logger.info(`Successfully created a review on pull request: ${pullNumber}.`); + return reviewNumber; +} +exports.makeInlineSuggestions = makeInlineSuggestions; +/** + * Comment on a Pull Request + * @param {Octokit} octokit authenticated octokit isntance + * @param {RepoDomain} remote the Pull Request repository + * @param {number} pullNumber the Pull Request number + * @param {number} pageSize the number of files to comment on // TODO pagination + * @param {Map} diffContents the old and new contents of the files to suggest + * @returns the created review's id, or null if no review was made + */ +async function createPullRequestReview(octokit, remote, pullNumber, pageSize, diffContents) { + try { + // get the hunks from the pull request + const pullRequestHunks = await exports.getPullRequestHunks(octokit, remote, pullNumber, pageSize); + // get the hunks from the suggested change + const allSuggestedHunks = typeof diffContents === 'string' + ? (0, diff_utils_1.parseAllHunks)(diffContents) + : (0, hunk_utils_1.getRawSuggestionHunks)(diffContents); + // split hunks by commentable and uncommentable + const { validHunks, invalidHunks } = (0, hunk_utils_1.partitionSuggestedHunksByScope)(pullRequestHunks, allSuggestedHunks); + // create pull request review + const reviewNumber = await exports.makeInlineSuggestions(octokit, validHunks, invalidHunks, remote, pullNumber); + return reviewNumber; + } + catch (err) { + logger_1.logger.error('Failed to suggest'); + throw err; + } +} +exports.createPullRequestReview = createPullRequestReview; +/** + * For a pull request, get each remote file's patch text asynchronously + * Also get the list of files whose patch data could not be returned + * @param {Octokit} octokit the authenticated octokit instance + * @param {RepoDomain} remote the remote repository domain information + * @param {number} pullNumber the pull request number + * @param {number} pageSize the number of results to return per page + * @returns {Promise>} the stringified patch data for each file and the list of files whose patch data could not be resolved + */ +async function getCurrentPullRequestPatches(octokit, remote, pullNumber, pageSize) { + // TODO: support pagination + const filesMissingPatch = []; + const files = (await octokit.pulls.listFiles({ + owner: remote.owner, + repo: remote.repo, + pull_number: pullNumber, + per_page: pageSize, + })).data; + const patches = new Map(); + if (files.length === 0) { + logger_1.logger.error(`0 file results have returned from list files query for Pull Request #${pullNumber}. Cannot make suggestions on an empty Pull Request`); + throw Error('Empty Pull Request'); + } + files.forEach(file => { + if (file.patch === undefined) { + // files whose patch is too large do not return the patch text by default + // TODO handle file patches that are too large + logger_1.logger.warn(`File ${file.filename} may have a patch that is too large to display patch object.`); + filesMissingPatch.push(file.filename); + } + else { + patches.set(file.filename, file.patch); + } + }); + if (patches.size === 0) { + logger_1.logger.warn('0 patches have been returned. This could be because the patch results were too large to return.'); + } + return { patches, filesMissingPatch }; +} +exports.getCurrentPullRequestPatches = getCurrentPullRequestPatches; +/** + * For a pull request, get each remote file's current patch range to identify the scope of each patch as a Map. + * @param {Octokit} octokit the authenticated octokit instance + * @param {RepoDomain} remote the remote repository domain information + * @param {number} pullNumber the pull request number + * @param {number} pageSize the number of files to return per pull request list files query + * @returns {Promise>} the scope of each file in the pull request + */ +async function getPullRequestHunks(octokit, remote, pullNumber, pageSize) { + const files = (await octokit.pulls.listFiles({ + owner: remote.owner, + repo: remote.repo, + pull_number: pullNumber, + per_page: pageSize, + })).data; + const pullRequestHunks = new Map(); + if (files.length === 0) { + logger_1.logger.error(`0 file results have returned from list files query for Pull Request #${pullNumber}. Cannot make suggestions on an empty Pull Request`); + throw Error('Empty Pull Request'); + } + files.forEach(file => { + if (file.patch === undefined) { + // files whose patch is too large do not return the patch text by default + // TODO handle file patches that are too large + logger_1.logger.warn(`File ${file.filename} may have a patch that is too large to display patch object.`); + } + else { + const hunks = (0, diff_utils_1.parsePatch)(file.patch); + pullRequestHunks.set(file.filename, hunks); + } + }); + if (pullRequestHunks.size === 0) { + logger_1.logger.warn('0 patches have been returned. This could be because the patch results were too large to return.'); + } + return pullRequestHunks; +} +exports.getPullRequestHunks = getPullRequestHunks; +//# sourceMappingURL=review-pull-request.js.map + +/***/ }), + +/***/ 39007: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * // Copyright 2020 Google LLC + * // + * // Licensed under the Apache License, Version 2.0 (the "License"); + * // you may not use this file except in compliance with the License. + * // You may obtain a copy of the License at + * // + * // https://www.apache.org/licenses/LICENSE-2.0 + * // + * // Unless required by applicable law or agreed to in writing, software + * // distributed under the License is distributed on an "AS IS" BASIS, + * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * // See the License for the specific language governing permissions and + * // limitations under the License. + * // + * //Modifications made by Joaquin Santana on 18/11/24, 22:09 + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseTextFiles = exports.createPullRequest = exports.reviewPullRequest = exports.CommitError = exports.getDiffString = exports.getChanges = void 0; +const types_1 = __nccwpck_require__(80721); +const logger_1 = __nccwpck_require__(44869); +const default_options_handler_1 = __nccwpck_require__(13265); +const retry = __nccwpck_require__(33415); +const web_1 = __nccwpck_require__(26672); +const git = __nccwpck_require__(85114); +const fs = __nccwpck_require__(60843); +const review_pull_request_1 = __nccwpck_require__(62476); +const branch_1 = __nccwpck_require__(42602); +const fork_1 = __nccwpck_require__(34248); +const commit_and_push_1 = __nccwpck_require__(12796); +const open_pull_request_1 = __nccwpck_require__(24703); +const labels_1 = __nccwpck_require__(26871); +const os = __nccwpck_require__(70612); +const path = __nccwpck_require__(49411); +var handle_git_dir_change_1 = __nccwpck_require__(74482); +Object.defineProperty(exports, "getChanges", ({ enumerable: true, get: function () { return handle_git_dir_change_1.getChanges; } })); +Object.defineProperty(exports, "getDiffString", ({ enumerable: true, get: function () { return handle_git_dir_change_1.getDiffString; } })); +var errors_1 = __nccwpck_require__(98535); +Object.defineProperty(exports, "CommitError", ({ enumerable: true, get: function () { return errors_1.CommitError; } })); +/** + * Given a set of suggestions, make all the multiline inline review comments on a given pull request given + * that they are in scope of the pull request. Outof scope suggestions are not made. + * + * In-scope suggestions are specifically: the suggestion for a file must correspond to a file in the remote pull request + * and the diff hunk computed for a file's contents must produce a range that is a subset of the pull request's files hunks. + * + * If a file is too large to load in the review, it is skipped in the suggestion phase. + * + * If changes are empty then the workflow will not run. + * Rethrows an HttpError if Octokit GitHub API returns an error. HttpError Octokit access_token and client_secret headers redact all sensitive information. + * @param octokit The authenticated octokit instance, instantiated with an access token having permissiong to create a fork on the target repository. + * @param diffContents A set of changes. The changes may be empty. + * @param options The configuration for interacting with GitHub provided by the user. + * @returns the created review's id number, or null if there are no changes to be made. + */ +async function reviewPullRequest(octokit, diffContents, options) { + (0, logger_1.setupLogger)(options.logger); + // if null undefined, or the empty map then no changes have been provided. + // Do not execute GitHub workflow + if (diffContents === null || + diffContents === undefined || + (typeof diffContents !== 'string' && diffContents.size === 0)) { + logger_1.logger.info('Empty changes provided. No suggestions to be made. Cancelling workflow.'); + return null; + } + const gitHubConfigs = (0, default_options_handler_1.addReviewCommentsDefaults)(options); + const remote = { + owner: gitHubConfigs.owner, + repo: gitHubConfigs.repo, + }; + const reviewNumber = await (0, review_pull_request_1.createPullRequestReview)(octokit, remote, gitHubConfigs.pullNumber, gitHubConfigs.pageSize, diffContents); + return reviewNumber; +} +exports.reviewPullRequest = reviewPullRequest; +/** + * Make a new GitHub Pull Request with a set of changes applied on top of primary branch HEAD. + * The changes are committed into a new branch based on the upstream repository options using the authenticated Octokit account. + * Then a Pull Request is made from that branch. + * + * Also throws error if git data from the fork is not ready in 5 minutes. + * + * From the docs + * https://developer.github.com/v3/repos/forks/#create-a-fork + * """ + * Forking a Repository happens asynchronously. + * You may have to wait a short period of time before you can access the git objects. + * If this takes longer than 5 minutes, be sure to contact GitHub Support or GitHub Premium Support. + * """ + * + * If changes are empty then the workflow will not run. + * Rethrows an HttpError if Octokit GitHub API returns an error. HttpError Octokit access_token and client_secret headers redact all sensitive information. + * @param {Octokit} octokit The authenticated octokit instance, instantiated with an access token having permissiong to create a fork on the target repository + * @param {Changes | null | undefined} changes A set of changes. The changes may be empty + * @param {CreatePullRequestUserOptions} options The configuration for interacting with GitHub provided by the user. + * @returns {Promise} the pull request number. Returns 0 if unsuccessful. + * @throws {CommitError} on failure during commit process + */ +async function createPullRequest(octokit, changes, options) { + (0, logger_1.setupLogger)(options.logger); + // if null undefined, or the empty map then no changes have been provided. + // Do not execute GitHub workflow + if (changes === null || changes === undefined || changes.size === 0) { + logger_1.logger.info('Empty change set provided. No changes need to be made. Cancelling workflow.'); + return 0; + } + const gitHubConfigs = (0, default_options_handler_1.addPullRequestDefaults)(options); + logger_1.logger.info('Starting GitHub PR workflow...'); + const upstream = { + owner: gitHubConfigs.upstreamOwner, + repo: gitHubConfigs.upstreamRepo, + }; + const origin = options.fork === false ? upstream : await (0, fork_1.fork)(octokit, upstream); + if (options.fork) { + // try to sync the fork + await retry(async () => await octokit.repos.mergeUpstream({ + owner: origin.owner, + repo: origin.repo, + branch: gitHubConfigs.primary, + }), { + retries: options.retry, + factor: 2.8411, + minTimeout: 3000, + randomize: false, + onRetry: (e, attempt) => { + e.message = `Error creating syncing upstream: ${e.message}`; + logger_1.logger.error(e); + logger_1.logger.info(`Retry attempt #${attempt}...`); + }, + }); + } + const originBranch = { + ...origin, + branch: gitHubConfigs.branch, + }; + // The `retry` flag defaults to `5` to maintain compatibility + options.retry = options.retry === undefined ? 5 : options.retry; + const refHeadSha = await retry(async () => await (0, branch_1.branch)(octokit, origin, upstream, originBranch.branch, gitHubConfigs.primary), { + retries: options.retry, + factor: 2.8411, + minTimeout: 3000, + randomize: false, + onRetry: (e, attempt) => { + e.message = `Error creating Pull Request: ${e.message}`; + logger_1.logger.error(e); + logger_1.logger.info(`Retry attempt #${attempt}...`); + }, + }); + const tempDirectory = await fs.mkdtemp(path.join(os.tmpdir(), 'git-')); + logger_1.logger.info(`Cloning repository to ${tempDirectory}`); + const gitConfig = { + fs, + http: web_1.default, + dir: tempDirectory, + onAuth: () => ({ + username: options.username, + password: options.password, + }), + }; + const octokitBaseUrl = octokit.request.endpoint.DEFAULTS.baseUrl; + const baseUrl = octokitBaseUrl.substring(0, octokitBaseUrl.indexOf('/api')); + const url = `${baseUrl}/${origin.owner}/${origin.repo}.git`; + await git.clone({ + ...gitConfig, + url: url, + ref: originBranch.branch, + }); + logger_1.logger.info(`repository cloned to branch ${originBranch.branch}`); + options = options !== null && options !== void 0 ? options : {}; + options.gitConfig = gitConfig; + await (0, commit_and_push_1.commitAndPush)(octokit, refHeadSha, changes, originBranch, gitHubConfigs.message, gitHubConfigs.force, options); + const description = { + body: gitHubConfigs.description, + title: gitHubConfigs.title, + }; + const prNumber = await (0, open_pull_request_1.openPullRequest)(octokit, upstream, originBranch, description, gitHubConfigs.maintainersCanModify, gitHubConfigs.primary, options.draft); + logger_1.logger.info(`Successfully opened pull request: ${prNumber}.`); + // addLabels will no-op if options.labels is undefined or empty. + await (0, labels_1.addLabels)(octokit, upstream, originBranch, prNumber, options.labels); + return prNumber; +} +exports.createPullRequest = createPullRequest; +/** + * Convert a Map or {[path: string]: string}, where the key is the relative file path in the repository, + * and the value is the text content. The files will be converted to a Map also containing the file mode information '100644' + * @param {Object | Map} textFiles a map/object where the key is the relative file path and the value is the text file content + * @returns {Changes} Map of the file path to the string file content and the file mode '100644' + */ +function parseTextFiles(textFiles) { + const changes = new Map(); + if (textFiles instanceof Map) { + textFiles.forEach((content, path) => { + if (typeof path !== 'string' || + (content !== null && typeof content !== 'string')) { + throw TypeError('The file changeset provided must have a string key and a string/null value'); + } + changes.set(path, new types_1.FileData(content)); + }); + } + else { + for (const [path, content] of Object.entries(textFiles)) { + if (typeof path !== 'string' || + (content !== null && typeof content !== 'string')) { + throw TypeError('The file changeset provided must have a string key and a string/null value'); + } + changes.set(path, new types_1.FileData(content)); + } + } + return changes; +} +exports.parseTextFiles = parseTextFiles; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 44869: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setupLogger = exports.logger = void 0; +class NullLogger { + constructor() { + this.error = () => { }; + this.warn = () => { }; + this.info = () => { }; + this.debug = () => { }; + this.trace = () => { }; + } +} +let logger = new NullLogger(); +exports.logger = logger; +function setupLogger(userLogger) { + if (userLogger) { + exports.logger = logger = userLogger; + } + else { + exports.logger = logger = new NullLogger(); + } +} +exports.setupLogger = setupLogger; +//# sourceMappingURL=logger.js.map + +/***/ }), + +/***/ 80721: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* + * // Copyright 2020 Google LLC + * // + * // Licensed under the Apache License, Version 2.0 (the "License"); + * // you may not use this file except in compliance with the License. + * // You may obtain a copy of the License at + * // + * // https://www.apache.org/licenses/LICENSE-2.0 + * // + * // Unless required by applicable law or agreed to in writing, software + * // distributed under the License is distributed on an "AS IS" BASIS, + * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * // See the License for the specific language governing permissions and + * // limitations under the License. + * // + * //Modifications made by Joaquin Santana on 18/11/24, 22:09 + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PatchSyntaxError = exports.FileData = void 0; +/** + * The content and the mode of a file. + * Default file mode is a text file which has code '100644'. + * If `content` is not null, then `content` must be the entire file content. + * See https://developer.github.com/v3/git/trees/#tree-object for details on mode. + */ +class FileData { + constructor(content, mode = '100644') { + this.mode = mode; + this.content = content; + } +} +exports.FileData = FileData; +class PatchSyntaxError extends Error { + constructor(message) { + super(message); + this.name = 'PatchSyntaxError'; + } +} +exports.PatchSyntaxError = PatchSyntaxError; +//# sourceMappingURL=types.js.map + +/***/ }), + +/***/ 85556: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSuggestedHunks = exports.parseAllHunks = exports.parsePatch = void 0; +const parseDiff = __nccwpck_require__(94833); +const diff_1 = __nccwpck_require__(71672); +// This header is ignored for calculating patch ranges, but is neccessary +// for parsing a diff +const _DIFF_HEADER = `diff --git a/file.ext b/file.ext +index cac8fbc..87f387c 100644 +--- a/file.ext ++++ b/file.ext +`; +/** + * Given a patch expressed in GNU diff format, return the range of lines + * from the original content that are changed. + * @param diff Diff expressed in GNU diff format. + * @returns Hunk[] + */ +function parsePatch(patch) { + return parseAllHunks(_DIFF_HEADER + patch).get('file.ext') || []; +} +exports.parsePatch = parsePatch; +/** + * Given a diff expressed in GNU diff format, return the range of lines + * from the original content that are changed. + * @param diff Diff expressed in GNU diff format. + * @returns Map + */ +function parseAllHunks(diff) { + const hunksByFile = new Map(); + parseDiff(diff).forEach(file => { + const filename = file.to ? file.to : file.from; + const chunks = file.chunks.map(chunk => { + let oldStart = chunk.oldStart; + let newStart = chunk.newStart; + let normalLines = 0; + let changeSeen = false; + const newLines = []; + let previousLine = null; + let nextLine = null; + chunk.changes.forEach(change => { + // strip off leading '+', '-', or ' ' and trailing carriage return + const content = change.content.substring(1).replace(/[\n\r]+$/g, ''); + if (change.type === 'normal') { + normalLines++; + if (changeSeen) { + if (nextLine === null) { + nextLine = content; + } + } + else { + previousLine = content; + } + } + else { + if (change.type === 'add') { + // strip off leading '+' and trailing carriage return + newLines.push(content); + } + if (!changeSeen) { + oldStart += normalLines; + newStart += normalLines; + changeSeen = true; + } + } + }); + const newEnd = newStart + chunk.newLines - normalLines - 1; + const oldEnd = oldStart + chunk.oldLines - normalLines - 1; + let hunk = { + oldStart: oldStart, + oldEnd: oldEnd, + newStart: newStart, + newEnd: newEnd, + newContent: newLines, + }; + if (previousLine) { + hunk = { ...hunk, previousLine: previousLine }; + } + if (nextLine) { + hunk = { ...hunk, nextLine: nextLine }; + } + return hunk; + }); + hunksByFile.set(filename, chunks); + }); + return hunksByFile; +} +exports.parseAllHunks = parseAllHunks; +/** + * Given two texts, return the range of lines that are changed. + * @param oldContent The original content. + * @param newContent The new content. + * @returns Hunk[] + */ +function getSuggestedHunks(oldContent, newContent) { + const diff = (0, diff_1.createPatch)('unused', oldContent, newContent); + return parseAllHunks(diff).get('unused') || []; +} +exports.getSuggestedHunks = getSuggestedHunks; +//# sourceMappingURL=diff-utils.js.map + +/***/ }), + +/***/ 61689: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.partitionSuggestedHunksByScope = exports.getRawSuggestionHunks = exports.adjustHunkDown = exports.adjustHunkUp = void 0; +const diff_utils_1 = __nccwpck_require__(85556); +const logger_1 = __nccwpck_require__(44869); +/** + * Shift a Hunk up one line so it starts one line earlier. + * @param {Hunk} hunk + * @returns {Hunk | null} the adjusted Hunk or null if there is no preceeding line. + */ +function adjustHunkUp(hunk) { + if (!hunk.previousLine) { + return null; + } + return { + oldStart: hunk.oldStart - 1, + oldEnd: hunk.oldEnd, + newStart: hunk.newStart - 1, + newEnd: hunk.newEnd, + newContent: [hunk.previousLine, ...hunk.newContent], + }; +} +exports.adjustHunkUp = adjustHunkUp; +/** + * Shift a Hunk up one line so it ends one line later. + * @param {Hunk} hunk + * @returns {Hunk | null} the adjusted Hunk or null if there is no following line. + */ +function adjustHunkDown(hunk) { + if (!hunk.nextLine) { + return null; + } + return { + oldStart: hunk.oldStart, + oldEnd: hunk.oldEnd + 1, + newStart: hunk.newStart, + newEnd: hunk.newEnd + 1, + newContent: hunk.newContent.concat(hunk.nextLine), + }; +} +exports.adjustHunkDown = adjustHunkDown; +/** + * Given a map where the key is the file name and the value is the + * old content and new content of the file + * compute the hunk for each file whose old and new contents differ. + * Do not compute the hunk if the old content is the same as the new content. + * The hunk list is sorted and each interval is disjoint. + * @param {Map} diffContents a map of the original file contents and the new file contents + * @returns the hunks for each file whose old and new contents differ + */ +function getRawSuggestionHunks(diffContents) { + const fileHunks = new Map(); + diffContents.forEach((fileDiffContent, fileName) => { + // if identical don't calculate the hunk and continue in the loop + if (fileDiffContent.oldContent === fileDiffContent.newContent) { + return; + } + const hunks = (0, diff_utils_1.getSuggestedHunks)(fileDiffContent.oldContent, fileDiffContent.newContent); + fileHunks.set(fileName, hunks); + }); + logger_1.logger.info('Parsed ranges of old and new patch'); + return fileHunks; +} +exports.getRawSuggestionHunks = getRawSuggestionHunks; +function hunkOverlaps(validHunk, suggestedHunk) { + return (suggestedHunk.oldStart >= validHunk.newStart && + suggestedHunk.oldEnd <= validHunk.newEnd); +} +function partitionFileHunks(pullRequestHunks, suggestedHunks) { + // check ranges: the entirety of the old range of the suggested + // hunk must fit inside the new range of the valid Hunks + let i = 0; + let candidateHunk = pullRequestHunks[i]; + const validFileHunks = []; + const invalidFileHunks = []; + suggestedHunks.forEach(suggestedHunk => { + while (candidateHunk && suggestedHunk.oldStart > candidateHunk.newEnd) { + i++; + candidateHunk = pullRequestHunks[i]; + } + if (!candidateHunk) { + invalidFileHunks.push(suggestedHunk); + return; + } + // if deletion only or addition only + if (suggestedHunk.newEnd < suggestedHunk.newStart || + suggestedHunk.oldEnd < suggestedHunk.oldStart) { + // try using previous line + let adjustedHunk = adjustHunkUp(suggestedHunk); + if (adjustedHunk && hunkOverlaps(candidateHunk, adjustedHunk)) { + validFileHunks.push(adjustedHunk); + return; + } + // try using next line + adjustedHunk = adjustHunkDown(suggestedHunk); + if (adjustedHunk && hunkOverlaps(candidateHunk, adjustedHunk)) { + validFileHunks.push(adjustedHunk); + return; + } + } + else if (hunkOverlaps(candidateHunk, suggestedHunk)) { + validFileHunks.push(suggestedHunk); + return; + } + invalidFileHunks.push(suggestedHunk); + }); + return { validFileHunks, invalidFileHunks }; +} +/** + * Split suggested hunks into commentable and non-commentable hunks. Compares the new line ranges + * from pullRequestHunks against the old line ranges from allSuggestedHunks. + * @param pullRequestHunks {Map} The parsed hunks from that represents the valid lines to comment. + * @param allSuggestedHunks {Map} The hunks that represent suggested changes. + * @returns {PartitionedHunks} split hunks + */ +function partitionSuggestedHunksByScope(pullRequestHunks, allSuggestedHunks) { + const validHunks = new Map(); + const invalidHunks = new Map(); + allSuggestedHunks.forEach((suggestedHunks, filename) => { + const pullRequestFileHunks = pullRequestHunks.get(filename); + if (!pullRequestFileHunks) { + // file is not the original PR + invalidHunks.set(filename, suggestedHunks); + return; + } + const { validFileHunks, invalidFileHunks } = partitionFileHunks(pullRequestFileHunks, suggestedHunks); + if (validFileHunks.length > 0) { + validHunks.set(filename, validFileHunks); + } + if (invalidFileHunks.length > 0) { + invalidHunks.set(filename, invalidFileHunks); + } + }); + return { validHunks, invalidHunks }; +} +exports.partitionSuggestedHunksByScope = partitionSuggestedHunksByScope; +//# sourceMappingURL=hunk-utils.js.map + +/***/ }), + +/***/ 65577: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DefaultChangelogNotes = void 0; +// eslint-disable-next-line @typescript-eslint/no-var-requires +const conventionalChangelogWriter = __nccwpck_require__(86207); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const presetFactory = __nccwpck_require__(88761); +const DEFAULT_HOST = 'https://github.com'; +class DefaultChangelogNotes { + constructor(options = {}) { + this.commitPartial = options.commitPartial; + this.headerPartial = options.headerPartial; + this.mainTemplate = options.mainTemplate; + } + async buildNotes(commits, options) { + const context = { + host: options.host || DEFAULT_HOST, + owner: options.owner, + repository: options.repository, + version: options.version, + previousTag: options.previousTag, + currentTag: options.currentTag, + linkCompare: !!options.previousTag, + }; + const config = {}; + if (options.changelogSections) { + config.types = options.changelogSections; + } + const preset = await presetFactory(config); + preset.writerOpts.commitPartial = + this.commitPartial || preset.writerOpts.commitPartial; + preset.writerOpts.headerPartial = + this.headerPartial || preset.writerOpts.headerPartial; + preset.writerOpts.mainTemplate = + this.mainTemplate || preset.writerOpts.mainTemplate; + const changelogCommits = commits.map(commit => { + const notes = commit.notes + .filter(note => note.title === 'BREAKING CHANGE') + .map(note => replaceIssueLink(note, context.host, context.owner, context.repository)); + return { + body: '', + subject: htmlEscape(commit.bareMessage), + type: commit.type, + scope: commit.scope, + notes, + references: commit.references, + mentions: [], + merge: null, + revert: null, + header: commit.message, + footer: commit.notes + .filter(note => note.title === 'RELEASE AS') + .map(note => `Release-As: ${note.text}`) + .join('\n'), + hash: commit.sha, + }; + }); + return conventionalChangelogWriter + .parseArray(changelogCommits, context, preset.writerOpts) + .trim(); + } +} +exports.DefaultChangelogNotes = DefaultChangelogNotes; +function replaceIssueLink(note, host, owner, repo) { + note.text = note.text.replace(/\(#(\d+)\)/, `([#$1](${host}/${owner}/${repo}/issues/$1))`); + return note; +} +function htmlEscape(message) { + return message.replace(/``[^`].*[^`]``|`[^`]*`|<|>/g, match => match.length > 1 ? match : match === '<' ? '<' : '>'); +} +//# sourceMappingURL=default.js.map + +/***/ }), + +/***/ 75355: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GitHubChangelogNotes = void 0; +class GitHubChangelogNotes { + constructor(github) { + this.github = github; + } + async buildNotes(_commits, options) { + const body = await this.github.generateReleaseNotes(options.currentTag, options.targetBranch, options.previousTag); + const date = new Date().toLocaleDateString('en-CA'); + const header = `## ${options.version} (${date})`; + return `${header}\n\n${body}`; + } +} +exports.GitHubChangelogNotes = GitHubChangelogNotes; +//# sourceMappingURL=github.js.map + +/***/ }), + +/***/ 50726: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseConventionalCommits = void 0; +// eslint-disable-next-line @typescript-eslint/no-var-requires +const visit = __nccwpck_require__(80199); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const visitWithAncestors = __nccwpck_require__(13246); +const NUMBER_REGEX = /^[0-9]+$/; +const logger_1 = __nccwpck_require__(18792); +const parser = __nccwpck_require__(74523); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const conventionalCommitsFilter = __nccwpck_require__(55003); +function getBlankConventionalCommit() { + return { + body: '', + subject: '', + type: '', + scope: null, + notes: [], + references: [], + mentions: [], + merge: null, + revert: null, + header: '', + footer: null, + }; +} +// Converts conventional commit AST into conventional-changelog's +// output format, see: https://www.npmjs.com/package/conventional-commits-parser +function toConventionalChangelogFormat(ast) { + const commits = []; + const headerCommit = getBlankConventionalCommit(); + // Separate the body and summary nodes, this simplifies the subsequent + // tree walking logic: + let body; + let summary; + visit(ast, ['body', 'summary'], (node) => { + switch (node.type) { + case 'body': + body = node; + break; + case 'summary': + summary = node; + break; + } + }); + // , "(", , ")", ["!"], ":", *, + visit(summary, (node) => { + switch (node.type) { + case 'type': + headerCommit.type = node.value; + headerCommit.header += node.value; + break; + case 'scope': + headerCommit.scope = node.value; + headerCommit.header += `(${node.value})`; + break; + case 'breaking-change': + headerCommit.header += '!'; + break; + case 'text': + headerCommit.subject = node.value; + headerCommit.header += `: ${node.value}`; + break; + default: + break; + } + }); + // [] + if (body) { + visit(body, ['text', 'newline'], (node) => { + headerCommit.body += node.value; + }); + } + // Extract BREAKING CHANGE notes, regardless of whether they fall in + // summary, body, or footer: + const breaking = { + title: 'BREAKING CHANGE', + text: '', // "text" will be populated if a BREAKING CHANGE token is parsed. + }; + visitWithAncestors(ast, ['breaking-change'], (node, ancestors) => { + let hitBreakingMarker = false; + let parent = ancestors.pop(); + if (!parent) { + return; + } + switch (parent.type) { + case 'summary': + breaking.text = headerCommit.subject; + break; + case 'body': + breaking.text = ''; + // We treat text from the BREAKING CHANGE marker forward as + // the breaking change notes: + visit(parent, ['breaking-change', 'text', 'newline'], (node) => { + if (node.type === 'breaking-change') { + hitBreakingMarker = true; + return; + } + if (!hitBreakingMarker) + return; + breaking.text += node.value; + }); + break; + case 'token': + // If the '!' breaking change marker is used, the breaking change + // will be identified when the footer is parsed as a commit: + if (!node.value.includes('BREAKING')) + return; + parent = ancestors.pop(); + visit(parent, ['text', 'newline'], (node) => { + breaking.text = node.value; + }); + break; + } + }); + // Add additional breaking change detection from commit body + if (body) { + const bodyString = String(body); + const breakingChangeMatch = bodyString.match(/BREAKING-CHANGE:\s*(.*)/); + if (breakingChangeMatch && breakingChangeMatch[1]) { + if (breaking.text) { + breaking.text += '\n'; + } + breaking.text += breakingChangeMatch[1].trim(); + } + } + if (breaking.text !== '') + headerCommit.notes.push(breaking); + // Populates references array from footers: + // references: [{ + // action: 'Closes', + // owner: null, + // repository: null, + // issue: '1', raw: '#1', + // prefix: '#' + // }] + visit(ast, ['footer'], (node) => { + const reference = { + prefix: '#', + action: '', + issue: '', + }; + let hasRefSepartor = false; + visit(node, ['type', 'separator', 'text'], (node) => { + switch (node.type) { + case 'type': + // refs, closes, etc: + // TODO(@bcoe): conventional-changelog does not currently use + // "reference.action" in its templates: + reference.action = node.value; + break; + case 'separator': + // Footer of the form "Refs #99": + if (node.value.includes('#')) + hasRefSepartor = true; + break; + case 'text': + // Footer of the form "Refs: #99" + if (node.value.charAt(0) === '#') { + hasRefSepartor = true; + reference.issue = node.value.substring(1); + // TODO(@bcoe): what about references like "Refs: #99, #102"? + } + else { + reference.issue = node.value; + } + break; + } + }); + // TODO(@bcoe): how should references like "Refs: v8:8940" work. + if (hasRefSepartor && reference.issue.match(NUMBER_REGEX)) { + headerCommit.references.push(reference); + } + }); + /* + * Split footers that resemble commits into additional commits, e.g., + * chore: multiple commits + * chore(recaptchaenterprise): migrate recaptchaenterprise to the Java microgenerator + * Committer: @miraleung + * PiperOrigin-RevId: 345559154 + * ... + */ + visitWithAncestors(ast, ['type'], (node, ancestors) => { + let parent = ancestors.pop(); + if (!parent) { + return; + } + if (parent.type === 'token') { + parent = ancestors.pop(); + let footerText = ''; + const semanticFooter = node.value.toLowerCase() === 'release-as'; + visit(parent, ['type', 'scope', 'breaking-change', 'separator', 'text', 'newline'], (node) => { + switch (node.type) { + case 'scope': + footerText += `(${node.value})`; + break; + case 'separator': + // Footers of the form Fixes #99, should not be parsed. + if (node.value.includes('#')) + return; + footerText += `${node.value} `; + break; + default: + footerText += node.value; + break; + } + }); + // Any footers that carry semantic meaning, e.g., Release-As, should + // be added to the footer field, for the benefits of post-processing: + if (semanticFooter) { + let releaseAs = ''; + visit(parent, ['text'], (node) => { + releaseAs = node.value; + }); + // record Release-As footer as a note + headerCommit.notes.push({ + title: 'RELEASE AS', + text: releaseAs, + }); + if (!headerCommit.footer) + headerCommit.footer = ''; + headerCommit.footer += `\n${footerText.toLowerCase()}`.trimStart(); + } + try { + for (const commit of toConventionalChangelogFormat(parser.parser(footerText))) { + commits.push(commit); + } + } + catch (err) { + // Footer does not appear to be an additional commit. + } + } + }); + commits.push(headerCommit); + return commits; +} +// TODO(@bcoe): now that we walk the actual AST of conventional commits +// we should be able to move post processing into +// to-conventional-changelog.ts. +function postProcessCommits(commit) { + var _a; + commit.notes.forEach(note => { + let text = ''; + let i = 0; + let extendedContext = false; + for (const chunk of note.text.split(/\r?\n/)) { + if (i > 0 && hasExtendedContext(chunk) && !extendedContext) { + text = `${text.trim()}\n`; + extendedContext = true; + } + if (chunk === '') + break; + else if (extendedContext) { + text += ` ${chunk}\n`; + } + else { + text += `${chunk} `; + } + i++; + } + note.text = text.trim(); + }); + const breakingChangeMatch = (_a = commit.body) === null || _a === void 0 ? void 0 : _a.match(/BREAKING-CHANGE:\s*(.*)/); + if (breakingChangeMatch && breakingChangeMatch[1]) { + const existingNote = commit.notes.find(note => note.title === 'BREAKING CHANGE'); + if (existingNote) { + existingNote.text += `\n${breakingChangeMatch[1].trim()}`; + } + else { + commit.notes.push({ + title: 'BREAKING CHANGE', + text: breakingChangeMatch[1].trim(), + }); + } + } + return commit; +} +// If someone wishes to include additional contextual information for a +// BREAKING CHANGE using markdown, they can do so by starting the line after the initial +// breaking change description with either: +// +// 1. a fourth-level header. +// 2. a bulleted list (using either '*' or '-'). +// +// BREAKING CHANGE: there were breaking changes +// #### Deleted Endpoints +// - endpoint 1 +// - endpoint 2 +function hasExtendedContext(line) { + if (line.match(/^#### |^[*-] /)) + return true; + return false; +} +function parseCommits(message) { + return conventionalCommitsFilter(toConventionalChangelogFormat(parser.parser(message))).map(postProcessCommits); +} +/** + * Splits a commit message into multiple messages based on conventional commit format and nested commit blocks. + * This function is capable of: + * 1. Separating conventional commits (feat, fix, docs, etc.) within the main message. + * 2. Extracting nested commits enclosed in BEGIN_NESTED_COMMIT/END_NESTED_COMMIT blocks. + * 3. Preserving the original message structure outside of nested commit blocks. + * 4. Handling multiple nested commits and conventional commits in a single message. + * + * @param message The input commit message string + * @returns An array of individual commit messages + */ +function splitMessages(message) { + const parts = message.split('BEGIN_NESTED_COMMIT'); + const messages = [parts.shift()]; + for (const part of parts) { + const [newMessage, ...rest] = part.split('END_NESTED_COMMIT'); + messages.push(newMessage); + messages[0] = messages[0] + rest.join('END_NESTED_COMMIT'); + } + const conventionalCommits = messages[0] + .split(/\r?\n\r?\n(?=(?:feat|fix|docs|style|refactor|perf|test|build|ci|chore|revert)(?:\(.*?\))?: )/) + .filter(Boolean); + return [...conventionalCommits, ...messages.slice(1)]; +} +/** + * Given a list of raw commits, parse and expand into conventional commits. + * + * @param commits {Commit[]} The input commits + * + * @returns {ConventionalCommit[]} Parsed and expanded commits. There may be + * more commits returned as a single raw commit may contain multiple release + * messages. + */ +function parseConventionalCommits(commits, logger = logger_1.logger) { + const conventionalCommits = []; + for (const commit of commits) { + for (const commitMessage of splitMessages(preprocessCommitMessage(commit))) { + try { + for (const parsedCommit of parseCommits(commitMessage)) { + const breaking = parsedCommit.notes.filter(note => note.title === 'BREAKING CHANGE') + .length > 0; + conventionalCommits.push({ + sha: commit.sha, + message: parsedCommit.header, + files: commit.files, + pullRequest: commit.pullRequest, + type: parsedCommit.type, + scope: parsedCommit.scope, + bareMessage: parsedCommit.subject, + notes: parsedCommit.notes, + references: parsedCommit.references, + breaking, + }); + } + } + catch (_err) { + logger.debug(`commit could not be parsed: ${commit.sha} ${commit.message.split('\n')[0]}`); + logger.debug(`error message: ${_err}`); + } + } + } + return conventionalCommits; +} +exports.parseConventionalCommits = parseConventionalCommits; +function preprocessCommitMessage(commit) { + // look for 'BEGIN_COMMIT_OVERRIDE' section of pull request body + if (commit.pullRequest) { + const overrideMessage = (commit.pullRequest.body.split('BEGIN_COMMIT_OVERRIDE')[1] || '') + .split('END_COMMIT_OVERRIDE')[0] + .trim(); + if (overrideMessage) { + return overrideMessage; + } + } + return commit.message; +} +//# sourceMappingURL=commit.js.map + +/***/ }), + +/***/ 10818: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FileNotFoundError = exports.DuplicateReleaseError = exports.AuthError = exports.GitHubAPIError = exports.MissingRequiredFileError = exports.ConfigurationError = void 0; +class ConfigurationError extends Error { + constructor(message, releaserName, repository) { + super(`${releaserName} (${repository}): ${message}`); + this.releaserName = releaserName; + this.repository = repository; + this.name = ConfigurationError.name; + } +} +exports.ConfigurationError = ConfigurationError; +class MissingRequiredFileError extends ConfigurationError { + constructor(file, releaserName, repository) { + super(`Missing required file: ${file}`, releaserName, repository); + this.file = file; + this.name = MissingRequiredFileError.name; + } +} +exports.MissingRequiredFileError = MissingRequiredFileError; +class GitHubAPIError extends Error { + constructor(requestError, message) { + super(message !== null && message !== void 0 ? message : requestError.message); + this.status = requestError.status; + this.body = GitHubAPIError.parseErrorBody(requestError); + this.name = GitHubAPIError.name; + this.cause = requestError; + this.stack = requestError.stack; + } + static parseErrorBody(requestError) { + const body = requestError.response; + return (body === null || body === void 0 ? void 0 : body.data) || undefined; + } + static parseErrors(requestError) { + var _a; + return ((_a = GitHubAPIError.parseErrorBody(requestError)) === null || _a === void 0 ? void 0 : _a.errors) || []; + } +} +exports.GitHubAPIError = GitHubAPIError; +class AuthError extends GitHubAPIError { + constructor(requestError) { + super(requestError, 'unauthorized'); + this.status = 401; + this.name = AuthError.name; + } +} +exports.AuthError = AuthError; +class DuplicateReleaseError extends GitHubAPIError { + constructor(requestError, tag) { + super(requestError); + this.tag = tag; + this.name = DuplicateReleaseError.name; + } +} +exports.DuplicateReleaseError = DuplicateReleaseError; +class FileNotFoundError extends Error { + constructor(path) { + super(`Failed to find file: ${path}`); + this.path = path; + this.name = FileNotFoundError.name; + } +} +exports.FileNotFoundError = FileNotFoundError; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 32217: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getChangelogTypes = exports.unregisterChangelogNotes = exports.registerChangelogNotes = exports.buildChangelogNotes = void 0; +const github_1 = __nccwpck_require__(75355); +const default_1 = __nccwpck_require__(65577); +const errors_1 = __nccwpck_require__(10818); +const changelogNotesFactories = { + github: options => new github_1.GitHubChangelogNotes(options.github), + default: options => new default_1.DefaultChangelogNotes(options), +}; +function buildChangelogNotes(options) { + const builder = changelogNotesFactories[options.type]; + if (builder) { + return builder(options); + } + throw new errors_1.ConfigurationError(`Unknown changelog type: ${options.type}`, 'core', `${options.github.repository.owner}/${options.github.repository.repo}`); +} +exports.buildChangelogNotes = buildChangelogNotes; +function registerChangelogNotes(name, changelogNotesBuilder) { + changelogNotesFactories[name] = changelogNotesBuilder; +} +exports.registerChangelogNotes = registerChangelogNotes; +function unregisterChangelogNotes(name) { + delete changelogNotesFactories[name]; +} +exports.unregisterChangelogNotes = unregisterChangelogNotes; +function getChangelogTypes() { + return Object.keys(changelogNotesFactories).sort(); +} +exports.getChangelogTypes = getChangelogTypes; +//# sourceMappingURL=changelog-notes-factory.js.map + +/***/ }), + +/***/ 74208: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getPluginTypes = exports.unregisterPlugin = exports.registerPlugin = exports.buildPlugin = void 0; +const linked_versions_1 = __nccwpck_require__(78087); +const cargo_workspace_1 = __nccwpck_require__(43073); +const node_workspace_1 = __nccwpck_require__(27400); +const maven_workspace_1 = __nccwpck_require__(77153); +const errors_1 = __nccwpck_require__(10818); +const sentence_case_1 = __nccwpck_require__(33483); +const group_priority_1 = __nccwpck_require__(24055); +const pluginFactories = { + 'linked-versions': options => + // NOTE: linked-versions had already have a different behavior about merging + // see test/plugins/compatibility/linked-versions-workspace.ts + new linked_versions_1.LinkedVersions(options.github, options.targetBranch, options.repositoryConfig, options.type.groupName, options.type.components, { + ...options, + ...options.type, + }), + 'cargo-workspace': options => { + var _a; + return new cargo_workspace_1.CargoWorkspace(options.github, options.targetBranch, options.repositoryConfig, { + ...options, + ...options.type, + merge: (_a = options.type.merge) !== null && _a !== void 0 ? _a : !options.separatePullRequests, + }); + }, + 'node-workspace': options => { + var _a; + return new node_workspace_1.NodeWorkspace(options.github, options.targetBranch, options.repositoryConfig, { + ...options, + ...options.type, + merge: (_a = options.type.merge) !== null && _a !== void 0 ? _a : !options.separatePullRequests, + }); + }, + 'maven-workspace': options => { + var _a; + return new maven_workspace_1.MavenWorkspace(options.github, options.targetBranch, options.repositoryConfig, { + ...options, + ...options.type, + merge: (_a = options.type.merge) !== null && _a !== void 0 ? _a : !options.separatePullRequests, + }); + }, + 'sentence-case': options => new sentence_case_1.SentenceCase(options.github, options.targetBranch, options.repositoryConfig, options.type.specialWords), + 'group-priority': options => new group_priority_1.GroupPriority(options.github, options.targetBranch, options.repositoryConfig, options.type.groups), +}; +function buildPlugin(options) { + if (!options.separatePullRequests) { + options.separatePullRequests = false; + } + if (typeof options.type === 'object') { + const builder = pluginFactories[options.type.type]; + if (builder) { + return builder({ + ...options.type, + ...options, + }); + } + throw new errors_1.ConfigurationError(`Unknown plugin type: ${options.type.type}`, 'core', `${options.github.repository.owner}/${options.github.repository.repo}`); + } + else { + const builder = pluginFactories[options.type]; + if (builder) { + return builder(options); + } + throw new errors_1.ConfigurationError(`Unknown plugin type: ${options.type}`, 'core', `${options.github.repository.owner}/${options.github.repository.repo}`); + } +} +exports.buildPlugin = buildPlugin; +function registerPlugin(name, pluginBuilder) { + pluginFactories[name] = pluginBuilder; +} +exports.registerPlugin = registerPlugin; +function unregisterPlugin(name) { + delete pluginFactories[name]; +} +exports.unregisterPlugin = unregisterPlugin; +function getPluginTypes() { + return Object.keys(pluginFactories).sort(); +} +exports.getPluginTypes = getPluginTypes; +//# sourceMappingURL=plugin-factory.js.map + +/***/ }), + +/***/ 2694: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getVersioningStrategyTypes = exports.unregisterVersioningStrategy = exports.registerVersioningStrategy = exports.buildVersioningStrategy = void 0; +const default_1 = __nccwpck_require__(77033); +const always_bump_patch_1 = __nccwpck_require__(42320); +const always_bump_minor_1 = __nccwpck_require__(39864); +const always_bump_major_1 = __nccwpck_require__(17161); +const service_pack_1 = __nccwpck_require__(23897); +const errors_1 = __nccwpck_require__(10818); +const prerelease_1 = __nccwpck_require__(21475); +const versioningTypes = { + default: options => new default_1.DefaultVersioningStrategy(options), + 'always-bump-patch': options => new always_bump_patch_1.AlwaysBumpPatch(options), + 'always-bump-minor': options => new always_bump_minor_1.AlwaysBumpMinor(options), + 'always-bump-major': options => new always_bump_major_1.AlwaysBumpMajor(options), + 'service-pack': options => new service_pack_1.ServicePackVersioningStrategy(options), + prerelease: options => new prerelease_1.PrereleaseVersioningStrategy(options), +}; +function buildVersioningStrategy(options) { + const builder = versioningTypes[options.type || 'default']; + if (builder) { + return builder(options); + } + throw new errors_1.ConfigurationError(`Unknown versioning strategy type: ${options.type}`, 'core', `${options.github.repository.owner}/${options.github.repository.repo}`); +} +exports.buildVersioningStrategy = buildVersioningStrategy; +function registerVersioningStrategy(name, versioningStrategyBuilder) { + versioningTypes[name] = versioningStrategyBuilder; +} +exports.registerVersioningStrategy = registerVersioningStrategy; +function unregisterVersioningStrategy(name) { + delete versioningTypes[name]; +} +exports.unregisterVersioningStrategy = unregisterVersioningStrategy; +function getVersioningStrategyTypes() { + return Object.keys(versioningTypes).sort(); +} +exports.getVersioningStrategyTypes = getVersioningStrategyTypes; +//# sourceMappingURL=versioning-strategy-factory.js.map + +/***/ }), + +/***/ 53693: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getReleaserTypes = exports.unregisterReleaseType = exports.registerReleaseType = exports.buildStrategy = void 0; +const errors_1 = __nccwpck_require__(10818); +const changelog_notes_factory_1 = __nccwpck_require__(32217); +const versioning_strategy_factory_1 = __nccwpck_require__(2694); +const bazel_1 = __nccwpck_require__(8275); +const dart_1 = __nccwpck_require__(33247); +const dotnet_yoshi_1 = __nccwpck_require__(5764); +const elixir_1 = __nccwpck_require__(25267); +const expo_1 = __nccwpck_require__(49813); +const go_1 = __nccwpck_require__(22494); +const go_yoshi_1 = __nccwpck_require__(70958); +const helm_1 = __nccwpck_require__(78249); +const java_1 = __nccwpck_require__(71922); +const java_yoshi_1 = __nccwpck_require__(62492); +const java_yoshi_mono_repo_1 = __nccwpck_require__(39129); +const krm_blueprint_1 = __nccwpck_require__(51900); +const maven_1 = __nccwpck_require__(26609); +const node_1 = __nccwpck_require__(97754); +const ocaml_1 = __nccwpck_require__(29068); +const php_1 = __nccwpck_require__(48526); +const php_yoshi_1 = __nccwpck_require__(93584); +const python_1 = __nccwpck_require__(47434); +const ruby_1 = __nccwpck_require__(8534); +const ruby_yoshi_1 = __nccwpck_require__(74720); +const rust_1 = __nccwpck_require__(5861); +const sfdx_1 = __nccwpck_require__(49658); +const simple_1 = __nccwpck_require__(13671); +const terraform_module_1 = __nccwpck_require__(95720); +const always_bump_patch_1 = __nccwpck_require__(42320); +const dependency_manifest_1 = __nccwpck_require__(94636); +const service_pack_1 = __nccwpck_require__(23897); +__exportStar(__nccwpck_require__(32217), exports); +__exportStar(__nccwpck_require__(74208), exports); +__exportStar(__nccwpck_require__(2694), exports); +const releasers = { + 'dotnet-yoshi': options => new dotnet_yoshi_1.DotnetYoshi(options), + go: options => new go_1.Go(options), + 'go-yoshi': options => new go_yoshi_1.GoYoshi(options), + java: options => new java_1.Java(options), + maven: options => new maven_1.Maven(options), + 'java-yoshi': options => new java_yoshi_1.JavaYoshi(options), + 'java-yoshi-mono-repo': options => new java_yoshi_mono_repo_1.JavaYoshiMonoRepo(options), + 'java-backport': options => new java_yoshi_1.JavaYoshi({ + ...options, + versioningStrategy: new always_bump_patch_1.AlwaysBumpPatch(), + }), + 'java-bom': options => new java_yoshi_1.JavaYoshi({ + ...options, + versioningStrategy: new dependency_manifest_1.DependencyManifest({ + bumpMinorPreMajor: options.bumpMinorPreMajor, + bumpPatchForMinorPreMajor: options.bumpPatchForMinorPreMajor, + }), + }), + 'java-lts': options => new java_yoshi_1.JavaYoshi({ + ...options, + versioningStrategy: new service_pack_1.ServicePackVersioningStrategy(), + }), + 'krm-blueprint': options => new krm_blueprint_1.KRMBlueprint(options), + node: options => new node_1.Node(options), + expo: options => new expo_1.Expo(options), + ocaml: options => new ocaml_1.OCaml(options), + php: options => new php_1.PHP(options), + 'php-yoshi': options => new php_yoshi_1.PHPYoshi(options), + python: options => new python_1.Python(options), + ruby: options => new ruby_1.Ruby(options), + 'ruby-yoshi': options => new ruby_yoshi_1.RubyYoshi(options), + rust: options => new rust_1.Rust(options), + salesforce: options => new sfdx_1.Sfdx(options), + sfdx: options => new sfdx_1.Sfdx(options), + simple: options => new simple_1.Simple(options), + 'terraform-module': options => new terraform_module_1.TerraformModule(options), + helm: options => new helm_1.Helm(options), + elixir: options => new elixir_1.Elixir(options), + dart: options => new dart_1.Dart(options), + bazel: options => new bazel_1.Bazel(options), +}; +async function buildStrategy(options) { + var _a; + const targetBranch = (_a = options.targetBranch) !== null && _a !== void 0 ? _a : options.github.repository.defaultBranch; + const versioningStrategy = (0, versioning_strategy_factory_1.buildVersioningStrategy)({ + github: options.github, + type: options.versioning, + bumpMinorPreMajor: options.bumpMinorPreMajor, + bumpPatchForMinorPreMajor: options.bumpPatchForMinorPreMajor, + prereleaseType: options.prereleaseType, + prerelease: options.prerelease, + }); + const changelogNotes = (0, changelog_notes_factory_1.buildChangelogNotes)({ + type: options.changelogType || 'default', + github: options.github, + changelogSections: options.changelogSections, + }); + const strategyOptions = { + skipGitHubRelease: options.skipGithubRelease, + ...options, + targetBranch, + versioningStrategy, + changelogNotes, + }; + const builder = releasers[options.releaseType]; + if (builder) { + return builder(strategyOptions); + } + throw new errors_1.ConfigurationError(`Unknown release type: ${options.releaseType}`, 'core', `${options.github.repository.owner}/${options.github.repository.repo}`); +} +exports.buildStrategy = buildStrategy; +function registerReleaseType(name, strategyBuilder) { + releasers[name] = strategyBuilder; +} +exports.registerReleaseType = registerReleaseType; +function unregisterReleaseType(name) { + delete releasers[name]; +} +exports.unregisterReleaseType = unregisterReleaseType; +function getReleaserTypes() { + return Object.keys(releasers).sort(); +} +exports.getReleaserTypes = getReleaserTypes; +//# sourceMappingURL=factory.js.map + +/***/ }), + +/***/ 58752: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * // Copyright 2020 Google LLC + * // + * // Licensed under the Apache License, Version 2.0 (the "License"); + * // you may not use this file except in compliance with the License. + * // You may obtain a copy of the License at + * // + * // https://www.apache.org/licenses/LICENSE-2.0 + * // + * // Unless required by applicable law or agreed to in writing, software + * // distributed under the License is distributed on an "AS IS" BASIS, + * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * // See the License for the specific language governing permissions and + * // limitations under the License. + * // + * //Modifications made by Joaquin Santana on 19/11/24, 11:00 + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.sleepInMs = exports.GitHub = exports.GH_GRAPHQL_URL = exports.GH_API_URL = void 0; +const code_suggester_1 = __nccwpck_require__(39007); +const rest_1 = __nccwpck_require__(55375); +const request_1 = __nccwpck_require__(36234); +const graphql_1 = __nccwpck_require__(88467); +const request_error_1 = __nccwpck_require__(10537); +const errors_1 = __nccwpck_require__(10818); +const MAX_ISSUE_BODY_SIZE = 65536; +const MAX_SLEEP_SECONDS = 20; +exports.GH_API_URL = 'https://api.github.com'; +exports.GH_GRAPHQL_URL = 'https://api.github.com'; +const logger_1 = __nccwpck_require__(18792); +const manifest_1 = __nccwpck_require__(24026); +const signoff_commit_message_1 = __nccwpck_require__(74069); +const git_file_utils_1 = __nccwpck_require__(32997); +const https_proxy_agent_1 = __nccwpck_require__(77219); +const http_proxy_agent_1 = __nccwpck_require__(23764); +class GitHub { + constructor(options) { + var _a; + /** + * Get the list of file paths modified in a given commit. + * + * @param {string} sha The commit SHA + * @returns {string[]} File paths + * @throws {GitHubAPIError} on an API error + */ + this.getCommitFiles = wrapAsync(async (sha) => { + this.logger.debug(`Backfilling file list for commit: ${sha}`); + const files = []; + for await (const resp of this.octokit.paginate.iterator('GET /repos/{owner}/{repo}/commits?sha={ref}', { + owner: this.repository.owner, + repo: this.repository.repo, + ref: sha, + })) { + // Paginate plugin doesn't have types for listing files on a commit + const data = resp.data.at(0); + for (const f of data.files || []) { + if (f.filename) { + files.push(f.filename); + } + } + } + if (files.length >= 3000) { + this.logger.warn(`Found ${files.length} files. This may not include all the files.`); + } + else { + this.logger.debug(`Found ${files.length} files`); + } + return files; + }); + this.graphqlRequest = wrapAsync(async (opts, options) => { + var _a; + let maxRetries = (_a = options === null || options === void 0 ? void 0 : options.maxRetries) !== null && _a !== void 0 ? _a : 5; + let seconds = 1; + while (maxRetries >= 0) { + try { + const response = await this.graphql(opts); + if (response) { + return response; + } + this.logger.trace('no GraphQL response, retrying'); + } + catch (err) { + if (err.status !== 502) { + throw err; + } + if (maxRetries === 0) { + this.logger.warn('ran out of retries and response is required'); + throw err; + } + this.logger.info(`received 502 error, ${maxRetries} attempts remaining`); + } + maxRetries -= 1; + if (maxRetries >= 0) { + this.logger.trace(`sleeping ${seconds} seconds`); + await (0, exports.sleepInMs)(1000 * seconds); + seconds = Math.min(seconds * 2, MAX_SLEEP_SECONDS); + } + } + this.logger.trace('ran out of retries'); + return undefined; + }); + /** + * Removes labels from an issue/pull request. + * + * @param {string[]} labels The labels to remove. + * @param {number} number The issue/pull request number. + */ + this.removeIssueLabels = wrapAsync(async (labels, number) => { + if (labels.length === 0) { + return; + } + //gitea utilizes ids to delete a label + const labelMap = new Map(); + (await this.getRepositoryLabels()).forEach(label => { + labelMap.set(label.name, label.id); + }); + this.logger.debug(`removing labels: ${labels} from issue/pull ${number}`); + await Promise.all(labels.map(label => this.octokit.issues.removeLabel({ + owner: this.repository.owner, + repo: this.repository.repo, + issue_number: number, + // @ts-ignore - types are incorrect + name: labelMap.get(label) || -1, + }))); + }); + /** + * Returns a list of paths to all files with a given name. + * + * If a prefix is specified, only return paths that match + * the provided prefix. + * + * @param filename The name of the file to find + * @param ref Git reference to search files in + * @param prefix Optional path prefix used to filter results + * @throws {GitHubAPIError} on an API error + */ + this.findFilesByFilenameAndRef = wrapAsync(async (filename, ref, prefix) => { + if (prefix) { + prefix = normalizePrefix(prefix); + } + this.logger.debug(`finding files by filename: ${filename}, ref: ${ref}, prefix: ${prefix}`); + return await this.fileCache.findFilesByFilename(filename, ref, prefix); + }); + /** + * Returns a list of paths to all files matching a glob pattern. + * + * If a prefix is specified, only return paths that match + * the provided prefix. + * + * @param glob The glob to match + * @param ref Git reference to search files in + * @param prefix Optional path prefix used to filter results + * @throws {GitHubAPIError} on an API error + */ + this.findFilesByGlobAndRef = wrapAsync(async (glob, ref, prefix) => { + if (prefix) { + prefix = normalizePrefix(prefix); + } + this.logger.debug(`finding files by glob: ${glob}, ref: ${ref}, prefix: ${prefix}`); + return await this.fileCache.findFilesByGlob(glob, ref, prefix); + }); + /** + * Open a pull request + * + * @param {PullRequest} pullRequest Pull request data to update + * @param {string} targetBranch The base branch of the pull request + * @param {string} message The commit message for the commit + * @param {Update[]} updates The files to update + * @param {CreatePullRequestOptions} options The pull request options + * @throws {GitHubAPIError} on an API error + */ + this.createPullRequest = wrapAsync(async (pullRequest, targetBranch, message, updates, options) => { + // Update the files for the release if not already supplied + const changes = await this.buildChangeSet(updates, targetBranch); + const prNumber = await (0, code_suggester_1.createPullRequest)(this.octokit, changes, { + upstreamOwner: this.repository.owner, + upstreamRepo: this.repository.repo, + title: pullRequest.title, + branch: pullRequest.headBranchName, + description: pullRequest.body, + primary: targetBranch, + username: this.repository.gitUsername, + password: this.repository.gitPassword, + force: true, + fork: !!(options === null || options === void 0 ? void 0 : options.fork), + message, + author: { + name: 'Gitea Actions [Bot]', + email: 'noreply@gitea.com', + }, + logger: this.logger, + draft: !!(options === null || options === void 0 ? void 0 : options.draft), + labels: pullRequest.labels, + }); + return await this.getPullRequest(prNumber); + }); + /** + * Fetch a pull request given the pull number + * @param {number} number The pull request number + * @returns {PullRequest} + */ + this.getPullRequest = wrapAsync(async (number) => { + const response = await this.octokit.pulls.get({ + owner: this.repository.owner, + repo: this.repository.repo, + pull_number: number, + }); + return { + headBranchName: response.data.head.ref, + baseBranchName: response.data.base.ref, + number: response.data.number, + title: response.data.title, + body: response.data.body || '', + files: [], + labels: response.data.labels + .map(label => label.name) + .filter(name => !!name), + }; + }); + /** + * Update a pull request's title and body. + * @param {number} number The pull request number + * @param {ReleasePullRequest} releasePullRequest Pull request data to update + * @param {string} targetBranch The target branch of the pull request + * @param {string} options.signoffUser Optional. Commit signoff message + * @param {boolean} options.fork Optional. Whether to open the pull request from + * a fork or not. Defaults to `false` + * @param {PullRequestOverflowHandler} options.pullRequestOverflowHandler Optional. + * Handles extra large pull request body messages. + */ + this.updatePullRequest = wrapAsync(async (number, releasePullRequest, targetBranch, options) => { + // Update the files for the release if not already supplied + const changes = await this.buildChangeSet(releasePullRequest.updates, targetBranch); + let message = releasePullRequest.title.toString(); + if (options === null || options === void 0 ? void 0 : options.signoffUser) { + message = (0, signoff_commit_message_1.signoffCommitMessage)(message, options.signoffUser); + } + const title = releasePullRequest.title.toString(); + const body = ((options === null || options === void 0 ? void 0 : options.pullRequestOverflowHandler) + ? await options.pullRequestOverflowHandler.handleOverflow(releasePullRequest) + : releasePullRequest.body) + .toString() + .slice(0, MAX_ISSUE_BODY_SIZE); + const prNumber = await (0, code_suggester_1.createPullRequest)(this.octokit, changes, { + upstreamOwner: this.repository.owner, + upstreamRepo: this.repository.repo, + title, + branch: releasePullRequest.headRefName, + description: body, + primary: targetBranch, + username: this.repository.gitUsername, + password: this.repository.gitPassword, + force: true, + fork: (options === null || options === void 0 ? void 0 : options.fork) !== false, + message, + author: { + name: 'Gitea Actions [Bot]', + email: 'git@3caravelle.com', + }, + logger: this.logger, + draft: releasePullRequest.draft, + }); + if (prNumber !== number) { + this.logger.warn(`updated code for ${prNumber}, but update requested for ${number}`); + } + const response = await this.octokit.pulls.update({ + owner: this.repository.owner, + repo: this.repository.repo, + pull_number: number, + title: releasePullRequest.title.toString(), + body, + state: 'open', + }); + return { + headBranchName: response.data.head.ref, + baseBranchName: response.data.base.ref, + number: response.data.number, + title: response.data.title, + body: response.data.body || '', + files: [], + labels: response.data.labels + .map(label => label.name) + .filter(name => !!name), + }; + }); + /** + * Returns a list of paths to all files with a given file + * extension. + * + * If a prefix is specified, only return paths that match + * the provided prefix. + * + * @param extension The file extension used to filter results. + * Example: `js`, `java` + * @param ref Git reference to search files in + * @param prefix Optional path prefix used to filter results + * @returns {string[]} List of file paths + * @throws {GitHubAPIError} on an API error + */ + this.findFilesByExtensionAndRef = wrapAsync(async (extension, ref, prefix) => { + if (prefix) { + prefix = normalizePrefix(prefix); + } + return this.fileCache.findFilesByExtension(extension, ref, prefix); + }); + /** + * Create a GitHub release + * + * @param {Release} release Release parameters + * @param {ReleaseOptions} options Release option parameters + * @throws {DuplicateReleaseError} if the release tag already exists + * @throws {GitHubAPIError} on other API errors + */ + this.createRelease = wrapAsync(async (release, options = {}) => { + const resp = await this.octokit.repos.createRelease({ + name: release.name, + owner: this.repository.owner, + repo: this.repository.repo, + tag_name: release.tag.toString(), + body: release.notes, + draft: !!options.draft, + prerelease: !!options.prerelease, + target_commitish: release.sha, + }); + return { + id: resp.data.id, + name: resp.data.name || undefined, + tagName: resp.data.tag_name, + sha: resp.data.target_commitish, + notes: resp.data.body_text || + resp.data.body || + resp.data.body_html || + undefined, + url: resp.data.html_url, + draft: resp.data.draft, + uploadUrl: resp.data.upload_url, + }; + }, e => { + if (e instanceof request_error_1.RequestError) { + if (e.status === 422 && + errors_1.GitHubAPIError.parseErrors(e).some(error => { + return error.code === 'already_exists'; + })) { + throw new errors_1.DuplicateReleaseError(e, 'tagName'); + } + } + }); + /** + * Makes a comment on a issue/pull request. + * + * @param {string} comment - The body of the comment to post. + * @param {number} number - The issue or pull request number. + * @throws {GitHubAPIError} on an API error + */ + this.commentOnIssue = wrapAsync(async (comment, number) => { + this.logger.debug(`adding comment to https://github.com/${this.repository.owner}/${this.repository.repo}/issues/${number}`); + const resp = await this.octokit.issues.createComment({ + owner: this.repository.owner, + repo: this.repository.repo, + issue_number: number, + body: comment, + }); + return resp.data.html_url; + }); + /** + * Adds label to an issue/pull request. + * + * @param {string[]} labels The labels to add. + * @param {number} number The issue/pull request number. + */ + this.addIssueLabels = wrapAsync(async (labels, number) => { + if (labels.length === 0) { + return; + } + this.logger.debug(`adding labels: ${labels} from issue/pull ${number}`); + await this.octokit.issues.addLabels({ + owner: this.repository.owner, + repo: this.repository.repo, + issue_number: number, + labels, + }); + }); + this.repository = options.repository; + this.octokit = options.octokitAPIs.octokit; + this.request = options.octokitAPIs.request; + this.graphql = options.octokitAPIs.graphql; + this.fileCache = new git_file_utils_1.RepositoryFileCache(this.octokit, this.repository); + this.logger = (_a = options.logger) !== null && _a !== void 0 ? _a : logger_1.logger; + } + static createDefaultAgent(baseUrl, defaultProxy) { + if (!defaultProxy) { + return undefined; + } + const { host, port } = defaultProxy; + if (new URL(baseUrl).protocol.replace(':', '') === 'http') { + return new http_proxy_agent_1.HttpProxyAgent(`http://${host}:${port}`); + } + else { + return new https_proxy_agent_1.HttpsProxyAgent(`https://${host}:${port}`); + } + } + /** + * Build a new GitHub client with auto-detected default branch. + * + * @param {GitHubCreateOptions} options Configuration options + * @param {string} options.owner The repository owner. + * @param {string} options.repo The repository name. + * @param {string} options.defaultBranch Optional. The repository's default branch. + * Defaults to the value fetched via the API. + * @param {string} options.apiUrl Optional. The base url of the GitHub API. + * @param {string} options.graphqlUrl Optional. The base url of the GraphQL API. + * @param {OctokitAPIs} options.octokitAPIs Optional. Override the internal + * client instances with a pre-authenticated instance. + * @param {string} options.token Optional. A GitHub API token used for authentication. + */ + static async create(options) { + var _a, _b, _c, _d; + const apiUrl = (_a = options.apiUrl) !== null && _a !== void 0 ? _a : exports.GH_API_URL; + const graphqlUrl = (_b = options.graphqlUrl) !== null && _b !== void 0 ? _b : exports.GH_GRAPHQL_URL; + const releasePleaseVersion = (__nccwpck_require__(67034)/* .version */ .i8); + const apis = (_c = options.octokitAPIs) !== null && _c !== void 0 ? _c : { + octokit: new rest_1.Octokit({ + baseUrl: apiUrl, + auth: options.token, + request: { + agent: this.createDefaultAgent(apiUrl, options.proxy), + }, + }), + request: request_1.request.defaults({ + baseUrl: apiUrl, + headers: { + 'user-agent': `release-please/${releasePleaseVersion}`, + Authorization: `token ${options.token}`, + }, + }), + graphql: graphql_1.graphql.defaults({ + baseUrl: graphqlUrl, + request: { + agent: this.createDefaultAgent(graphqlUrl, options.proxy), + }, + headers: { + 'user-agent': `release-please/${releasePleaseVersion}`, + Authorization: `token ${options.token}`, + 'content-type': 'application/vnd.github.v3+json', + }, + }), + }; + const opts = { + repository: { + owner: options.owner, + repo: options.repo, + gitUsername: options.gitUsername, + gitPassword: options.gitPassword, + defaultBranch: (_d = options.defaultBranch) !== null && _d !== void 0 ? _d : (await GitHub.defaultBranch(options.owner, options.repo, apis.octokit)), + }, + octokitAPIs: apis, + logger: options.logger, + }; + return new GitHub(opts); + } + /** + * Returns the default branch for a given repository. + * + * @param {string} owner The GitHub repository owner + * @param {string} repo The GitHub repository name + * @param {OctokitType} octokit An authenticated octokit instance + * @returns {string} Name of the default branch + */ + static async defaultBranch(owner, repo, octokit) { + const { data } = await octokit.repos.get({ + repo, + owner, + }); + return data.default_branch; + } + /** + * Returns the list of commits to the default branch after the provided filter + * query has been satified. + * + * @param {string} targetBranch Target branch of commit + * @param {CommitFilter} filter Callback function that returns whether a + * commit/pull request matches certain criteria + * @param {CommitIteratorOptions} options Query options + * @param {number} options.maxResults Limit the number of results searched. + * Defaults to unlimited. + * @param {boolean} options.backfillFiles If set, use the REST API for + * fetching the list of touched files in this commit. Defaults to `false`. + * @returns {Commit[]} List of commits to current branch + * @throws {GitHubAPIError} on an API error + */ + async commitsSince(targetBranch, filter, options = {}) { + const commits = []; + const generator = this.mergeCommitIterator(targetBranch, options); + for await (const commit of generator) { + if (filter(commit)) { + break; + } + commits.push(commit); + } + return commits; + } + /** + * Iterate through commit history with a max number of results scanned. + * + * @param {string} targetBranch target branch of commit + * @param {CommitIteratorOptions} options Query options + * @param {number} options.maxResults Limit the number of results searched. + * Defaults to unlimited. + * @param {boolean} options.backfillFiles If set, use the REST API for + * fetching the list of touched files in this commit. Defaults to `false`. + * @yields {Commit} + * @throws {GitHubAPIError} on an API error + */ + async *mergeCommitIterator(targetBranch, options = {}) { + var _a; + const maxResults = (_a = options.maxResults) !== null && _a !== void 0 ? _a : Number.MAX_SAFE_INTEGER; + let cursor = undefined; + let results = 0; + while (results < maxResults) { + const response = await this.mergeCommitsGraphQL(targetBranch, cursor, options); + // no response usually means that the branch can't be found + if (!response) { + break; + } + for (let i = 0; i < response.data.length; i++) { + results += 1; + yield response.data[i]; + } + if (!response.pageInfo.hasNextPage) { + break; + } + cursor = response.pageInfo.endCursor; + } + } + async getRepositoryLabels() { + return (await this.octokit.request('GET /repos/{owner}/{repo}/labels', { + owner: this.repository.owner, + repo: this.repository.repo, + })).data; + } + /** + * Iterate through merged pull requests with a max number of results scanned. + * + * @param {string} targetBranch The base branch of the pull request + * @param {string} status The status of the pull request + * @param {number} maxResults Limit the number of results searched. Defaults to + * unlimited. + * @param {boolean} includeFiles Whether to fetch the list of files included in + * the pull request. Defaults to `true`. + * @yields {PullRequest} + * @throws {GitHubAPIError} on an API error + */ + async *pullRequestIterator(targetBranch, status = 'MERGED', maxResults = Number.MAX_SAFE_INTEGER, includeFiles = true) { + const generator = includeFiles + ? this.pullRequestIteratorWithFiles(targetBranch, status, maxResults) + : this.pullRequestIteratorWithoutFiles(targetBranch, status, maxResults); + for await (const pullRequest of generator) { + yield pullRequest; + } + } + /** + * Helper implementation of pullRequestIterator that includes files via + * the graphQL API. + * + * @param {string} targetBranch The base branch of the pull request + * @param {string} status The status of the pull request + * @param {number} maxResults Limit the number of results searched + */ + async *pullRequestIteratorWithFiles(targetBranch, status = 'MERGED', maxResults = Number.MAX_SAFE_INTEGER) { + let cursor = undefined; + let results = 0; + while (results < maxResults) { + const response = await this.pullRequestsGraphQL(targetBranch, status, cursor); + // no response usually means we ran out of results + if (!response) { + break; + } + for (let i = 0; i < response.data.length; i++) { + results += 1; + yield response.data[i]; + } + if (!response.pageInfo.hasNextPage) { + break; + } + cursor = response.pageInfo.endCursor; + } + } + /** + * Return a list of merged pull requests. The list is not guaranteed to be sorted + * by merged_at, but is generally most recent first. + * + * @param {string} targetBranch - Base branch of the pull request. Defaults to + * the configured default branch. + * @param {number} page - Page of results. Defaults to 1. + * @param {number} perPage - Number of results per page. Defaults to 100. + * @returns {PullRequestHistory | null} - List of merged pull requests + * @throws {GitHubAPIError} on an API error + */ + async pullRequestsGraphQL(targetBranch, states = 'MERGED', cursor) { + var _a; + this.logger.debug(`Fetching ${states} pull requests on branch ${targetBranch} with cursor ${cursor}`); + const response = await this.graphqlRequest({ + query: `query mergedPullRequests($owner: String!, $repo: String!, $num: Int!, $maxFilesChanged: Int, $targetBranch: String!, $states: [PullRequestState!], $cursor: String) { + repository(owner: $owner, name: $repo) { + pullRequests(first: $num, after: $cursor, baseRefName: $targetBranch, states: $states, orderBy: {field: CREATED_AT, direction: DESC}) { + nodes { + number + title + baseRefName + headRefName + labels(first: 10) { + nodes { + name + } + } + body + mergeCommit { + oid + } + files(first: $maxFilesChanged) { + nodes { + path + } + pageInfo { + endCursor + hasNextPage + } + } + } + pageInfo { + endCursor + hasNextPage + } + } + } + }`, + cursor, + owner: this.repository.owner, + repo: this.repository.repo, + num: 25, + targetBranch, + states, + maxFilesChanged: 64, + }); + if (!((_a = response === null || response === void 0 ? void 0 : response.repository) === null || _a === void 0 ? void 0 : _a.pullRequests)) { + this.logger.warn(`Could not find merged pull requests for branch ${targetBranch} - it likely does not exist.`); + return null; + } + const pullRequests = (response.repository.pullRequests.nodes || + []); + return { + pageInfo: response.repository.pullRequests.pageInfo, + data: pullRequests.map(pullRequest => { + var _a, _b, _c; + return { + sha: (_a = pullRequest.mergeCommit) === null || _a === void 0 ? void 0 : _a.oid, + number: pullRequest.number, + baseBranchName: pullRequest.baseRefName, + headBranchName: pullRequest.headRefName, + labels: (((_b = pullRequest.labels) === null || _b === void 0 ? void 0 : _b.nodes) || []).map(l => l.name), + title: pullRequest.title, + body: pullRequest.body + '', + files: (((_c = pullRequest.files) === null || _c === void 0 ? void 0 : _c.nodes) || []).map(node => node.path), + }; + }), + }; + } + /** + * Iterate through releases with a max number of results scanned. + * + * @param {ReleaseIteratorOptions} options Query options + * @param {number} options.maxResults Limit the number of results searched. + * Defaults to unlimited. + * @yields {GitHubRelease} + * @throws {GitHubAPIError} on an API error + */ + async *releaseIterator(options = {}) { + var _a; + const maxResults = (_a = options.maxResults) !== null && _a !== void 0 ? _a : Number.MAX_SAFE_INTEGER; + let results = 0; + let cursor = undefined; + while (true) { + const response = await this.releaseGraphQL(cursor); + if (!response) { + break; + } + for (let i = 0; i < response.data.length; i++) { + if ((results += 1) > maxResults) { + break; + } + yield response.data[i]; + } + if (results > maxResults || !response.pageInfo.hasNextPage) { + break; + } + cursor = response.pageInfo.endCursor; + } + } + /** + * Helper implementation of pullRequestIterator that excludes files + * via the REST API. + * + * @param {string} targetBranch The base branch of the pull request + * @param {string} status The status of the pull request + * @param {number} maxResults Limit the number of results searched + */ + async *pullRequestIteratorWithoutFiles(targetBranch, status = 'MERGED', maxResults = Number.MAX_SAFE_INTEGER) { + const statusMap = { + OPEN: 'open', + CLOSED: 'closed', + MERGED: 'closed', + }; + let results = 0; + for await (const { data: pulls } of this.octokit.paginate.iterator('GET /repos/{owner}/{repo}/pulls', { + state: statusMap[status], + owner: this.repository.owner, + repo: this.repository.repo, + base: targetBranch, + sort: 'updated', + direction: 'desc', + })) { + for (const pull of pulls) { + // The REST API does not have an option for "merged" + // pull requests - they are closed with a `merged_at` timestamp + if ((status !== 'MERGED' || pull.merged_at) && + pull.base.ref === targetBranch) { + results += 1; + yield { + headBranchName: pull.head.ref, + baseBranchName: pull.base.ref, + number: pull.number, + title: pull.title, + body: pull.body || '', + labels: pull.labels.map(label => label.name), + files: [], + sha: pull.merge_commit_sha || undefined, + }; + if (results >= maxResults) { + break; + } + } + } + if (results >= maxResults) { + break; + } + } + } + /** + * Iterate through tags with a max number of results scanned. + * + * @param {TagIteratorOptions} options Query options + * @param {number} options.maxResults Limit the number of results searched. + * Defaults to unlimited. + * @yields {GitHubTag} + * @throws {GitHubAPIError} on an API error + */ + async *tagIterator(options = {}) { + const maxResults = options.maxResults || Number.MAX_SAFE_INTEGER; + let results = 0; + for await (const response of this.octokit.paginate.iterator('GET /repos/{owner}/{repo}/tags', { + owner: this.repository.owner, + repo: this.repository.repo, + })) { + for (const tag of response.data) { + if ((results += 1) > maxResults) { + break; + } + yield { + name: tag.name, + sha: tag.commit.sha, + }; + } + if (results > maxResults) + break; + } + } + /** + * Fetch the contents of a file from the configured branch + * + * @param {string} path The path to the file in the repository + * @returns {GitHubFileContents} + * @throws {GitHubAPIError} on other API errors + */ + async getFileContents(path) { + return await this.getFileContentsOnBranch(path, this.repository.defaultBranch); + } + /** + * Fetch the contents of a file + * + * @param {string} path The path to the file in the repository + * @param {string} branch The branch to fetch from + * @returns {GitHubFileContents} + * @throws {FileNotFoundError} if the file cannot be found + * @throws {GitHubAPIError} on other API errors + */ + async getFileContentsOnBranch(path, branch) { + this.logger.debug(`Fetching ${path} from branch ${branch}`); + try { + return await this.fileCache.getFileContents(path, branch); + } + catch (e) { + if (e instanceof git_file_utils_1.FileNotFoundError) { + throw new errors_1.FileNotFoundError(path); + } + throw e; + } + } + async getFileJson(path, branch) { + const content = await this.getFileContentsOnBranch(path, branch); + return JSON.parse(content.parsedContent); + } + /** + * Returns a list of paths to all files with a given name. + * + * If a prefix is specified, only return paths that match + * the provided prefix. + * + * @param filename The name of the file to find + * @param prefix Optional path prefix used to filter results + * @returns {string[]} List of file paths + * @throws {GitHubAPIError} on an API error + */ + async findFilesByFilename(filename, prefix) { + return this.findFilesByFilenameAndRef(filename, this.repository.defaultBranch, prefix); + } + /** + * Returns a list of paths to all files matching a glob pattern. + * + * If a prefix is specified, only return paths that match + * the provided prefix. + * + * @param glob The glob to match + * @param prefix Optional path prefix used to filter results + * @returns {string[]} List of file paths + * @throws {GitHubAPIError} on an API error + */ + async findFilesByGlob(glob, prefix) { + return this.findFilesByGlobAndRef(glob, this.repository.defaultBranch, prefix); + } + /** + * Open a pull request + * + * @deprecated This logic is handled by the Manifest class now as it + * can be more complicated if the release notes are too big + * @param {ReleasePullRequest} releasePullRequest Pull request data to update + * @param {string} targetBranch The base branch of the pull request + * @param {GitHubPR} options The pull request options + * @throws {GitHubAPIError} on an API error + */ + async createReleasePullRequest(releasePullRequest, targetBranch, options) { + let message = releasePullRequest.title.toString(); + if (options === null || options === void 0 ? void 0 : options.signoffUser) { + message = (0, signoff_commit_message_1.signoffCommitMessage)(message, options.signoffUser); + } + const pullRequestLabels = (options === null || options === void 0 ? void 0 : options.skipLabeling) + ? [] + : releasePullRequest.labels; + return await this.createPullRequest({ + headBranchName: releasePullRequest.headRefName, + baseBranchName: targetBranch, + number: -1, + title: releasePullRequest.title.toString(), + body: releasePullRequest.body.toString().slice(0, MAX_ISSUE_BODY_SIZE), + labels: pullRequestLabels, + files: [], + }, targetBranch, message, releasePullRequest.updates, { + fork: options === null || options === void 0 ? void 0 : options.fork, + draft: releasePullRequest.draft, + }); + } + /** + * Given a set of proposed updates, build a changeset to suggest. + * + * @param {Update[]} updates The proposed updates + * @param {string} defaultBranch The target branch + * @return {Changes} The changeset to suggest. + * @throws {GitHubAPIError} on an API error + */ + async buildChangeSet(updates, defaultBranch) { + const changes = new Map(); + for (const update of updates) { + let content; + try { + content = await this.getFileContentsOnBranch(update.path, defaultBranch); + } + catch (err) { + if (!(err instanceof errors_1.FileNotFoundError)) + throw err; + // if the file is missing and create = false, just continue + // to the next update, otherwise create the file. + if (!update.createIfMissing) { + this.logger.warn(`file ${update.path} did not exist`); + continue; + } + } + const contentText = content + ? Buffer.from(content.content, 'base64').toString('utf8') + : undefined; + const updatedContent = update.updater.updateContent(contentText, this.logger); + if (updatedContent) { + changes.set(update.path, { + content: updatedContent, + originalContent: (content === null || content === void 0 ? void 0 : content.parsedContent) || null, + mode: (content === null || content === void 0 ? void 0 : content.mode) || git_file_utils_1.DEFAULT_FILE_MODE, + }); + } + } + return changes; + } + /** + * Returns a list of paths to all files with a given file + * extension. + * + * If a prefix is specified, only return paths that match + * the provided prefix. + * + * @param extension The file extension used to filter results. + * Example: `js`, `java` + * @param prefix Optional path prefix used to filter results + * @returns {string[]} List of file paths + * @throws {GitHubAPIError} on an API error + */ + async findFilesByExtension(extension, prefix) { + return this.findFilesByExtensionAndRef(extension, this.repository.defaultBranch, prefix); + } + async mergeCommitsGraphQL(targetBranch, cursor, options = {}) { + var _a, _b, _c, _d, _e, _f, _g; + var _h; + this.logger.debug(`Fetching merge commits on branch ${targetBranch} with cursor: ${cursor}`); + const query = `query pullRequestsSince($owner: String!, $repo: String!, $num: Int!, $maxFilesChanged: Int, $targetBranch: String!, $cursor: String) { + repository(owner: $owner, name: $repo) { + ref(qualifiedName: $targetBranch) { + target { + ... on Commit { + history(first: $num, after: $cursor) { + nodes { + associatedPullRequests(first: 10) { + nodes { + number + title + baseRefName + headRefName + labels(first: 10) { + nodes { + name + } + } + body + mergeCommit { + oid + } + files(first: $maxFilesChanged) { + nodes { + path + } + pageInfo { + endCursor + hasNextPage + } + } + } + } + sha: oid + message + } + pageInfo { + hasNextPage + endCursor + } + } + } + } + } + } + }`; + const params = { + cursor, + owner: this.repository.owner, + repo: this.repository.repo, + limit: options.maxResults, + targetBranch, + maxFilesChanged: 100, // max is 100 + }; + const response = await this.octokit.repos.listCommits({ ...params }); + if (!response) { + this.logger.warn(`Did not receive a response for query: ${query}`, params); + return null; + } + // if the branch does exist, return null + if (response.status !== 200) { + this.logger.warn(`Could not find commits for branch ${targetBranch} - it likely does not exist.`); + return null; + } + const commits = []; + for (const commit of response.data) { + const prs = []; + try { + const parameters = { + commit_sha: commit.sha, + owner: this.repository.owner, + repo: this.repository.repo, + }; + const commitPullRequest = await this.octokit.request('GET /repos/{owner}/{repo}/commits/{commit_sha}/pull', parameters); + const pr = commitPullRequest.data; + prs.push({ + number: pr.number, + title: pr.title, + baseRefName: pr.base.ref, + headRefName: pr.head.ref, + labels: { + nodes: pr.labels.map(label => { + return { name: label.name }; + }), + }, + body: pr.body || '', + mergeCommit: { oid: pr.merge_commit_sha || '' }, + files: { nodes: [], pageInfo: { hasNextPage: false } }, + }); + } + catch (e) { } + const sha = commit.sha; + const message = commit.commit.message; + commits.push({ sha, message, associatedPullRequests: { nodes: prs } }); + } + // Count the number of pull requests associated with each merge commit. This is + // used in the next step to make sure we only find pull requests with a + // merge commit that contain 1 merged commit. + const mergeCommitCount = {}; + for (const commit of commits) { + for (const pr of commit.associatedPullRequests.nodes) { + if ((_a = pr.mergeCommit) === null || _a === void 0 ? void 0 : _a.oid) { + (_b = mergeCommitCount[_h = pr.mergeCommit.oid]) !== null && _b !== void 0 ? _b : (mergeCommitCount[_h] = 0); + mergeCommitCount[pr.mergeCommit.oid]++; + } + } + } + const commitData = []; + for (const graphCommit of commits) { + const commit = { + sha: graphCommit.sha, + message: graphCommit.message, + }; + const mergePullRequest = graphCommit.associatedPullRequests.nodes.find(pr => { + return ( + // Only match the pull request with a merge commit if there is a + // single merged commit in the PR. This means merge commits and squash + // merges will be matched, but rebase merged PRs will only be matched + // if they contain a single commit. This is so PRs that are rebased + // and merged will have ßSfiles backfilled from each commit instead of + // the whole PR. + pr.mergeCommit && + pr.mergeCommit.oid === graphCommit.sha && + mergeCommitCount[pr.mergeCommit.oid] === 1); + }); + const pullRequest = mergePullRequest || graphCommit.associatedPullRequests.nodes[0]; + if (pullRequest) { + commit.pullRequest = { + sha: commit.sha, + number: pullRequest.number, + baseBranchName: pullRequest.baseRefName, + headBranchName: pullRequest.headRefName, + mergeCommitOid: (_c = pullRequest.mergeCommit) === null || _c === void 0 ? void 0 : _c.oid, + title: pullRequest.title, + body: pullRequest.body, + labels: pullRequest.labels.nodes.map(node => node.name), + files: (((_d = pullRequest.files) === null || _d === void 0 ? void 0 : _d.nodes) || []).map(node => node.path), + }; + } + if (mergePullRequest) { + if (((_f = (_e = mergePullRequest.files) === null || _e === void 0 ? void 0 : _e.pageInfo) === null || _f === void 0 ? void 0 : _f.hasNextPage) && + options.backfillFiles) { + this.logger.info(`PR #${mergePullRequest.number} has many files, backfilling`); + commit.files = await this.getCommitFiles(graphCommit.sha); + } + else { + // We cannot directly fetch files on commits via graphql, only provide file + // information for commits with associated pull requests + commit.files = (((_g = mergePullRequest.files) === null || _g === void 0 ? void 0 : _g.nodes) || []).map(node => node.path); + } + } + else if (options.backfillFiles) { + // In this case, there is no squashed merge commit. This could be a simple + // merge commit, a rebase merge commit, or a direct commit to the branch. + // Fallback to fetching the list of commits from the REST API. In the future + // we can perhaps lazy load these. + commit.files = await this.getCommitFiles(graphCommit.sha); + } + commitData.push(commit); + } + return { + // pageInfo: history.pageInfo, + pageInfo: { hasNextPage: false, endCursor: undefined }, + data: commitData, + }; + } + /** + * Generate release notes from GitHub at tag + * @param {string} tagName Name of new release tag + * @param {string} targetCommitish Target commitish for new tag + * @param {string} previousTag Optional. Name of previous tag to analyze commits since + */ + async generateReleaseNotes(tagName, targetCommitish, previousTag) { + const resp = await this.octokit.repos.generateReleaseNotes({ + owner: this.repository.owner, + repo: this.repository.repo, + tag_name: tagName, + previous_tag_name: previousTag, + target_commitish: targetCommitish, + }); + return resp.data.body; + } + /** + * Create a single file on a new branch based on an existing + * branch. This will force-push to that branch. + * @param {string} filename Filename with path in the repository + * @param {string} contents Contents of the file + * @param {string} newBranchName Name of the new branch + * @param {string} baseBranchName Name of the base branch (where + * new branch is forked from) + * @returns {string} HTML URL of the new file + */ + async createFileOnNewBranch(filename, contents, newBranchName, baseBranchName) { + // create or update new branch to match base branch + await this.forkBranch(newBranchName, baseBranchName); + // use the single file upload API + const { data: { content }, } = await this.octokit.repos.createOrUpdateFileContents({ + owner: this.repository.owner, + repo: this.repository.repo, + path: filename, + // contents need to be base64 encoded + content: Buffer.from(contents, 'binary').toString('base64'), + message: 'Saving release notes', + branch: newBranchName, + }); + if (!(content === null || content === void 0 ? void 0 : content.html_url)) { + throw new Error(`Failed to write to file: ${filename} on branch: ${newBranchName}`); + } + return content.html_url; + } + /** + * Helper to fetch the SHA of a branch + * @param {string} branchName The name of the branch + * @return {string | undefined} Returns the SHA of the branch + * or undefined if it can't be found. + */ + async getBranchSha(branchName) { + this.logger.debug(`Looking up SHA for branch: ${branchName}`); + try { + const { data: { object: { sha }, }, } = await this.octokit.git.getRef({ + owner: this.repository.owner, + repo: this.repository.repo, + ref: `heads/${branchName}`, + }); + this.logger.debug(`SHA for branch: ${sha}`); + return sha; + } + catch (e) { + if (e instanceof request_error_1.RequestError && e.status === 404) { + this.logger.debug(`Branch: ${branchName} does not exist`); + return undefined; + } + throw e; + } + } + /** + * Helper to fork a branch from an existing branch. Uses `force` so + * it will overwrite the contents of `targetBranchName` to match + * the current contents of `baseBranchName`. + * + * @param {string} targetBranchName The name of the new forked branch + * @param {string} baseBranchName The base branch from which to fork. + * @returns {string} The branch SHA + * @throws {ConfigurationError} if the base branch cannot be found. + */ + async forkBranch(targetBranchName, baseBranchName) { + const baseBranchSha = await this.getBranchSha(baseBranchName); + if (!baseBranchSha) { + // this is highly unlikely to be thrown as we will have + // already attempted to read from the branch + throw new errors_1.ConfigurationError(`Unable to find base branch: ${baseBranchName}`, 'core', `${this.repository.owner}/${this.repository.repo}`); + } + // see if newBranchName exists + if (await this.getBranchSha(targetBranchName)) { + // branch already exists, update it to the match the base branch + const branchSha = await this.updateBranchSha(targetBranchName, baseBranchSha); + this.logger.debug(`Updated ${targetBranchName} to match ${baseBranchName} at ${branchSha}`); + return branchSha; + } + else { + // branch does not exist, create a new branch from the base branch + const branchSha = await this.createNewBranch(targetBranchName, baseBranchSha); + this.logger.debug(`Forked ${targetBranchName} from ${baseBranchName} at ${branchSha}`); + return branchSha; + } + } + /** + * Helper to create a new branch from a given SHA. + * @param {string} branchName The new branch name + * @param {string} branchSha The SHA of the branch + * @returns {string} The SHA of the new branch + */ + async createNewBranch(branchName, branchSha) { + this.logger.debug(`Creating new branch: ${branchName} at ${branchSha}`); + const { data: { object: { sha }, }, } = await this.octokit.git.createRef({ + owner: this.repository.owner, + repo: this.repository.repo, + ref: `refs/heads/${branchName}`, + sha: branchSha, + }); + this.logger.debug(`New branch: ${branchName} at ${sha}`); + return sha; + } + async updateBranchSha(branchName, branchSha) { + this.logger.debug(`Updating branch ${branchName} to ${branchSha}`); + const { data: { object: { sha }, }, } = await this.octokit.git.updateRef({ + owner: this.repository.owner, + repo: this.repository.repo, + ref: `heads/${branchName}`, + sha: branchSha, + force: true, + }); + this.logger.debug(`Updated branch: ${branchName} to ${sha}`); + return sha; + } + async releaseGraphQL(cursor) { + const response = await this.octokit.rest.repos.listReleases({ + owner: this.repository.owner, + repo: this.repository.repo, + }); + if (!response.data.length) { + this.logger.warn('Could not find releases.'); + return null; + } + const releases = response.data; + return { + pageInfo: { + hasNextPage: false, + endCursor: undefined, + }, + data: releases + .filter(release => !!release.target_commitish) + .map(release => { + if (!release.tag_name || !release.target_commitish) { + this.logger.debug(release); + } + return { + name: release.name || undefined, + tagName: release.tag_name ? release.tag_name : 'unknown', + sha: release.target_commitish, + notes: release.body, + url: release.url, + draft: release.draft, + }; + }), + }; + } +} +exports.GitHub = GitHub; +/** + * Normalize a provided prefix by removing leading and trailing + * slashes. + * + * @param prefix String to normalize + */ +function normalizePrefix(prefix) { + const normalized = prefix.replace(/^[/\\]/, '').replace(/[/\\]$/, ''); + if (normalized === manifest_1.ROOT_PROJECT_PATH) { + return ''; + } + return normalized; +} +/** + * Wrap an async method with error handling + * + * @param fn Async function that can throw Errors + * @param errorHandler An optional error handler for rethrowing custom exceptions + */ +/* eslint-disable @typescript-eslint/no-explicit-any */ +const wrapAsync = (fn, errorHandler) => { + return async (...args) => { + try { + return await fn(...args); + } + catch (e) { + if (errorHandler) { + errorHandler(e); + } + if (e instanceof request_error_1.RequestError) { + throw new errors_1.GitHubAPIError(e); + } + throw e; + } + }; +}; +const sleepInMs = (ms) => new Promise(resolve => setTimeout(resolve, ms)); +exports.sleepInMs = sleepInMs; +//# sourceMappingURL=github.js.map + +/***/ }), + +/***/ 75833: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.VERSION = exports.manifestSchema = exports.configSchema = exports.GitHub = exports.setLogger = exports.registerVersioningStrategy = exports.getVersioningStrategyTypes = exports.registerPlugin = exports.getPluginTypes = exports.registerChangelogNotes = exports.getChangelogTypes = exports.registerReleaseType = exports.getReleaserTypes = exports.Manifest = exports.Errors = void 0; +exports.Errors = __nccwpck_require__(10818); +var manifest_1 = __nccwpck_require__(24026); +Object.defineProperty(exports, "Manifest", ({ enumerable: true, get: function () { return manifest_1.Manifest; } })); +var factory_1 = __nccwpck_require__(53693); +Object.defineProperty(exports, "getReleaserTypes", ({ enumerable: true, get: function () { return factory_1.getReleaserTypes; } })); +Object.defineProperty(exports, "registerReleaseType", ({ enumerable: true, get: function () { return factory_1.registerReleaseType; } })); +var changelog_notes_factory_1 = __nccwpck_require__(32217); +Object.defineProperty(exports, "getChangelogTypes", ({ enumerable: true, get: function () { return changelog_notes_factory_1.getChangelogTypes; } })); +Object.defineProperty(exports, "registerChangelogNotes", ({ enumerable: true, get: function () { return changelog_notes_factory_1.registerChangelogNotes; } })); +var plugin_factory_1 = __nccwpck_require__(74208); +Object.defineProperty(exports, "getPluginTypes", ({ enumerable: true, get: function () { return plugin_factory_1.getPluginTypes; } })); +Object.defineProperty(exports, "registerPlugin", ({ enumerable: true, get: function () { return plugin_factory_1.registerPlugin; } })); +var versioning_strategy_factory_1 = __nccwpck_require__(2694); +Object.defineProperty(exports, "getVersioningStrategyTypes", ({ enumerable: true, get: function () { return versioning_strategy_factory_1.getVersioningStrategyTypes; } })); +Object.defineProperty(exports, "registerVersioningStrategy", ({ enumerable: true, get: function () { return versioning_strategy_factory_1.registerVersioningStrategy; } })); +var logger_1 = __nccwpck_require__(18792); +Object.defineProperty(exports, "setLogger", ({ enumerable: true, get: function () { return logger_1.setLogger; } })); +var github_1 = __nccwpck_require__(58752); +Object.defineProperty(exports, "GitHub", ({ enumerable: true, get: function () { return github_1.GitHub; } })); +exports.configSchema = __nccwpck_require__(1383); +exports.manifestSchema = __nccwpck_require__(94592); +// x-release-please-start-version +exports.VERSION = '16.15.0'; +// x-release-please-end +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 24026: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/* + * // Copyright 2020 Google LLC + * // + * // Licensed under the Apache License, Version 2.0 (the "License"); + * // you may not use this file except in compliance with the License. + * // You may obtain a copy of the License at + * // + * // https://www.apache.org/licenses/LICENSE-2.0 + * // + * // Unless required by applicable law or agreed to in writing, software + * // distributed under the License is distributed on an "AS IS" BASIS, + * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * // See the License for the specific language governing permissions and + * // limitations under the License. + * // + * //Modifications made by Joaquin Santana on 19/11/24, 11:00 + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Manifest = exports.MANIFEST_PULL_REQUEST_TITLE_PATTERN = exports.SNOOZE_LABEL = exports.DEFAULT_SNAPSHOT_LABELS = exports.DEFAULT_RELEASE_LABELS = exports.DEFAULT_LABELS = exports.DEFAULT_COMPONENT_NAME = exports.ROOT_PROJECT_PATH = exports.DEFAULT_RELEASE_PLEASE_MANIFEST = exports.DEFAULT_RELEASE_PLEASE_CONFIG = void 0; +const version_1 = __nccwpck_require__(25112); +const commit_1 = __nccwpck_require__(50726); +const logger_1 = __nccwpck_require__(18792); +const commit_split_1 = __nccwpck_require__(95086); +const tag_name_1 = __nccwpck_require__(91203); +const branch_name_1 = __nccwpck_require__(94148); +const pull_request_title_1 = __nccwpck_require__(28866); +const factory_1 = __nccwpck_require__(53693); +const merge_1 = __nccwpck_require__(51886); +const release_please_manifest_1 = __nccwpck_require__(90094); +const errors_1 = __nccwpck_require__(10818); +const pull_request_overflow_handler_1 = __nccwpck_require__(58010); +const signoff_commit_message_1 = __nccwpck_require__(74069); +const commit_exclude_1 = __nccwpck_require__(88719); +exports.DEFAULT_RELEASE_PLEASE_CONFIG = 'release-please-config.json'; +exports.DEFAULT_RELEASE_PLEASE_MANIFEST = '.release-please-manifest.json'; +exports.ROOT_PROJECT_PATH = '.'; +exports.DEFAULT_COMPONENT_NAME = ''; +exports.DEFAULT_LABELS = ['autorelease: pending']; +exports.DEFAULT_RELEASE_LABELS = ['autorelease: tagged']; +exports.DEFAULT_SNAPSHOT_LABELS = ['autorelease: snapshot']; +exports.SNOOZE_LABEL = 'autorelease: snooze'; +const DEFAULT_RELEASE_SEARCH_DEPTH = 400; +const DEFAULT_COMMIT_SEARCH_DEPTH = 500; +exports.MANIFEST_PULL_REQUEST_TITLE_PATTERN = 'chore: release ${branch}'; +class Manifest { + /** + * Create a Manifest from explicit config in code. This assumes that the + * repository has a single component at the root path. + * + * @param {GitHub} github GitHub client + * @param {string} targetBranch The releaseable base branch + * @param {RepositoryConfig} repositoryConfig Parsed configuration of path => release configuration + * @param {ReleasedVersions} releasedVersions Parsed versions of path => latest release version + * @param {ManifestOptions} manifestOptions Optional. Manifest options + * @param {string} manifestOptions.bootstrapSha If provided, use this SHA + * as the point to consider commits after + * @param {boolean} manifestOptions.alwaysLinkLocal Option for the node-workspace + * plugin + * @param {boolean} manifestOptions.updatePeerDependencies Option for the node-workspace + * plugin + * @param {boolean} manifestOptions.separatePullRequests If true, create separate pull + * requests instead of a single manifest release pull request + * @param {boolean} manifestOptions.alwaysUpdate If true, always updates pull requests instead of + * only when the release notes change + * @param {PluginType[]} manifestOptions.plugins Any plugins to use for this repository + * @param {boolean} manifestOptions.fork If true, create pull requests from a fork. Defaults + * to `false` + * @param {string} manifestOptions.signoff Add a Signed-off-by annotation to the commit + * @param {string} manifestOptions.manifestPath Path to the versions manifest + * @param {string[]} manifestOptions.labels Labels that denote a pending, untagged release + * pull request. Defaults to `[autorelease: pending]` + * @param {string[]} manifestOptions.releaseLabels Labels to apply to a tagged release + * pull request. Defaults to `[autorelease: tagged]` + */ + constructor(github, targetBranch, repositoryConfig, releasedVersions, manifestOptions) { + var _a, _b; + this.repository = github.repository; + this.github = github; + this.targetBranch = targetBranch; + this.repositoryConfig = repositoryConfig; + this.releasedVersions = releasedVersions; + this.manifestPath = + (manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.manifestPath) || exports.DEFAULT_RELEASE_PLEASE_MANIFEST; + this.separatePullRequests = + (_a = manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.separatePullRequests) !== null && _a !== void 0 ? _a : Object.keys(repositoryConfig).length === 1; + this.alwaysUpdate = (manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.alwaysUpdate) || false; + this.fork = (manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.fork) || false; + this.signoffUser = manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.signoff; + this.releaseLabels = + (manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.releaseLabels) || exports.DEFAULT_RELEASE_LABELS; + this.labels = (manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.labels) || exports.DEFAULT_LABELS; + this.skipLabeling = (manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.skipLabeling) || false; + this.sequentialCalls = (manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.sequentialCalls) || false; + this.snapshotLabels = + (manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.snapshotLabels) || exports.DEFAULT_SNAPSHOT_LABELS; + this.bootstrapSha = manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.bootstrapSha; + this.lastReleaseSha = manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.lastReleaseSha; + this.draft = manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.draft; + this.draftPullRequest = manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.draftPullRequest; + this.groupPullRequestTitlePattern = + manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.groupPullRequestTitlePattern; + this.releaseSearchDepth = + (manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.releaseSearchDepth) || DEFAULT_RELEASE_SEARCH_DEPTH; + this.commitSearchDepth = + (manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.commitSearchDepth) || DEFAULT_COMMIT_SEARCH_DEPTH; + this.logger = (_b = manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.logger) !== null && _b !== void 0 ? _b : logger_1.logger; + this.plugins = ((manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.plugins) || []).map(pluginType => (0, factory_1.buildPlugin)({ + type: pluginType, + github: this.github, + targetBranch: this.targetBranch, + repositoryConfig: this.repositoryConfig, + manifestPath: this.manifestPath, + separatePullRequests: this.separatePullRequests, + })); + this.pullRequestOverflowHandler = new pull_request_overflow_handler_1.FilePullRequestOverflowHandler(this.github, this.logger); + } + /** + * Create a Manifest from config files in the repository. + * + * @param {GitHub} github GitHub client + * @param {string} targetBranch The releaseable base branch + * @param {string} configFile Optional. The path to the manifest config file + * @param {string} manifestFile Optional. The path to the manifest versions file + * @param manifestOptionOverrides + * @param {string} path The single path to check. Optional + * @param releaseAs + * @returns {Manifest} + */ + static async fromManifest(github, targetBranch, configFile = exports.DEFAULT_RELEASE_PLEASE_CONFIG, manifestFile = exports.DEFAULT_RELEASE_PLEASE_MANIFEST, manifestOptionOverrides = {}, path, releaseAs) { + const [{ config: repositoryConfig, options: manifestOptions }, releasedVersions,] = await Promise.all([ + parseConfig(github, configFile, targetBranch, path, releaseAs), + parseReleasedVersions(github, manifestFile, targetBranch), + ]); + return new Manifest(github, targetBranch, repositoryConfig, releasedVersions, { + manifestPath: manifestFile, + ...manifestOptions, + ...manifestOptionOverrides, + }); + } + /** + * Create a Manifest from explicit config in code. This assumes that the + * repository has a single component at the root path. + * + * @param {GitHub} github GitHub client + * @param {string} targetBranch The releaseable base branch + * @param {ReleaserConfig} config Release strategy options + * @param {ManifestOptions} manifestOptions Optional. Manifest options + * @param {string} manifestOptions.bootstrapSha If provided, use this SHA + * as the point to consider commits after + * @param {boolean} manifestOptions.alwaysLinkLocal Option for the node-workspace + * plugin + * @param {boolean} manifestOptions.updatePeerDependencies Option for the node-workspace + * plugin + * @param {boolean} manifestOptions.separatePullRequests If true, create separate pull + * requests instead of a single manifest release pull request + * @param {PluginType[]} manifestOptions.plugins Any plugins to use for this repository + * @param {boolean} manifestOptions.fork If true, create pull requests from a fork. Defaults + * to `false` + * @param {string} manifestOptions.signoff Add a Signed-off-by annotation to the commit + * @param {string} manifestOptions.manifestPath Path to the versions manifest + * @param {string[]} manifestOptions.labels Labels that denote a pending, untagged release + * pull request. Defaults to `[autorelease: pending]` + * @param {string[]} manifestOptions.releaseLabels Labels to apply to a tagged release + * pull request. Defaults to `[autorelease: tagged]` + * @returns {Manifest} + */ + static async fromConfig(github, targetBranch, config, manifestOptions, path = exports.ROOT_PROJECT_PATH) { + const repositoryConfig = {}; + repositoryConfig[path] = config; + const strategy = await (0, factory_1.buildStrategy)({ + github, + ...config, + }); + const component = await strategy.getBranchComponent(); + const releasedVersions = {}; + const latestVersion = await latestReleaseVersion(github, targetBranch, version => isPublishedVersion(strategy, version), config, component, manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.logger); + if (latestVersion) { + releasedVersions[path] = latestVersion; + } + return new Manifest(github, targetBranch, repositoryConfig, releasedVersions, { + separatePullRequests: true, + ...manifestOptions, + }); + } + /** + * Build all candidate pull requests for this repository. + * + * Iterates through each path and builds a candidate pull request for component. + * Applies any configured plugins. + * + * @returns {ReleasePullRequest[]} The candidate pull requests to open or update. + */ + async buildPullRequests() { + var _a; + this.logger.info('Building pull requests'); + const pathsByComponent = await this.getPathsByComponent(); + const strategiesByPath = await this.getStrategiesByPath(); + // Collect all the SHAs of the latest release packages + this.logger.info('Collecting release commit SHAs'); + let releasesFound = 0; + const expectedReleases = Object.keys(strategiesByPath).length; + // SHAs by path + const releaseShasByPath = {}; + // Releases by path + const releasesByPath = {}; + this.logger.debug(`release search depth: ${this.releaseSearchDepth}`); + for await (const release of this.github.releaseIterator({ + maxResults: this.releaseSearchDepth, + })) { + const tagName = tag_name_1.TagName.parse(release.tagName); + if (!tagName) { + this.logger.warn(`Unable to parse release name: ${release.name}`); + continue; + } + const component = tagName.component || exports.DEFAULT_COMPONENT_NAME; + const path = pathsByComponent[component]; + if (!path) { + this.logger.warn(`Found release tag with component '${component}', but not configured in manifest`); + continue; + } + const expectedVersion = this.releasedVersions[path]; + if (!expectedVersion) { + this.logger.warn(`Unable to find expected version for path '${path}' in manifest`); + continue; + } + if (expectedVersion.toString() === tagName.version.toString()) { + this.logger.debug(`Found release for path ${path}, ${release.tagName}`); + releaseShasByPath[path] = release.sha; + releasesByPath[path] = { + name: release.name, + tag: tagName, + sha: release.sha, + notes: release.notes || '', + }; + releasesFound += 1; + } + if (releasesFound >= expectedReleases) { + break; + } + } + if (releasesFound < expectedReleases) { + this.logger.warn(`Expected ${expectedReleases} releases, only found ${releasesFound}`); + // Fall back to looking for missing releases using expected tags + const missingPaths = Object.keys(strategiesByPath).filter(path => !releasesByPath[path]); + this.logger.warn(`Missing ${missingPaths.length} paths: ${missingPaths}`); + const missingReleases = await this.backfillReleasesFromTags(missingPaths, strategiesByPath); + for (const path in missingReleases) { + releaseShasByPath[path] = missingReleases[path].sha; + releasesByPath[path] = missingReleases[path]; + releasesFound++; + } + } + const needsBootstrap = releasesFound < expectedReleases; + if (releasesFound < expectedReleases) { + this.logger.warn(`Expected ${expectedReleases} releases, only found ${releasesFound}`); + } + for (const path in releasesByPath) { + const release = releasesByPath[path]; + this.logger.debug(`release for path: ${path}, version: ${release.tag.version.toString()}, sha: ${release.sha}`); + } + // iterate through commits and collect commits until we have + // seen all release commits + this.logger.info('Collecting commits since all latest releases'); + const commits = []; + this.logger.debug(`commit search depth: ${this.commitSearchDepth}`); + const commitGenerator = this.github.mergeCommitIterator(this.targetBranch, { + maxResults: this.commitSearchDepth, + backfillFiles: true, + }); + const releaseShas = new Set(Object.values(releaseShasByPath)); + this.logger.debug(releaseShas); + const expectedShas = releaseShas.size; + // sha => release pull request + const releasePullRequestsBySha = {}; + let releaseCommitsFound = 0; + for await (const commit of commitGenerator) { + if (releaseShas.has(commit.sha)) { + if (commit.pullRequest) { + releasePullRequestsBySha[commit.sha] = commit.pullRequest; + } + else { + this.logger.warn(`Release SHA ${commit.sha} did not have an associated pull request`); + } + releaseCommitsFound += 1; + } + if (this.lastReleaseSha && this.lastReleaseSha === commit.sha) { + this.logger.info(`Using configured lastReleaseSha ${this.lastReleaseSha} as last commit.`); + break; + } + else if (needsBootstrap && commit.sha === this.bootstrapSha) { + this.logger.info(`Needed bootstrapping, found configured bootstrapSha ${this.bootstrapSha}`); + break; + } + else if (!needsBootstrap && releaseCommitsFound >= expectedShas) { + // found enough commits + break; + } + commits.push({ + sha: commit.sha, + message: commit.message, + files: commit.files, + pullRequest: commit.pullRequest, + }); + } + if (releaseCommitsFound < expectedShas) { + this.logger.warn(`Expected ${expectedShas} commits, only found ${releaseCommitsFound}`); + } + // split commits by path + this.logger.info(`Splitting ${commits.length} commits by path`); + const cs = new commit_split_1.CommitSplit({ + includeEmpty: true, + packagePaths: Object.keys(this.repositoryConfig), + }); + const splitCommits = cs.split(commits); + // limit paths to ones since the last release + let commitsPerPath = {}; + for (const path in this.repositoryConfig) { + commitsPerPath[path] = commitsAfterSha(path === exports.ROOT_PROJECT_PATH ? commits : splitCommits[path], releaseShasByPath[path]); + } + const commitExclude = new commit_exclude_1.CommitExclude(this.repositoryConfig); + commitsPerPath = commitExclude.excludeCommits(commitsPerPath); + // backfill latest release tags from manifest + for (const path in this.repositoryConfig) { + const latestRelease = releasesByPath[path]; + if (!latestRelease && + this.releasedVersions[path] && + this.releasedVersions[path].toString() !== '0.0.0') { + const version = this.releasedVersions[path]; + const strategy = strategiesByPath[path]; + const component = await strategy.getComponent(); + this.logger.info(`No latest release found for path: ${path}, component: ${component}, but a previous version (${version.toString()}) was specified in the manifest.`); + releasesByPath[path] = { + tag: new tag_name_1.TagName(version, component, this.repositoryConfig[path].tagSeparator, this.repositoryConfig[path].includeVInTag), + sha: '', + notes: '', + }; + } + } + let strategies = strategiesByPath; + for (const plugin of this.plugins) { + strategies = await plugin.preconfigure(strategies, commitsPerPath, releasesByPath); + } + let newReleasePullRequests = []; + for (const path in this.repositoryConfig) { + const config = this.repositoryConfig[path]; + this.logger.info(`Building candidate release pull request for path: ${path}`); + this.logger.debug(`type: ${config.releaseType}`); + this.logger.debug(`targetBranch: ${this.targetBranch}`); + let pathCommits = (0, commit_1.parseConventionalCommits)(commitsPerPath[path], this.logger); + // The processCommits hook can be implemented by plugins to + // post-process commits. This can be used to perform cleanup, e.g,, sentence + // casing all commit messages: + for (const plugin of this.plugins) { + pathCommits = plugin.processCommits(pathCommits); + } + this.logger.debug(`commits: ${pathCommits.length}`); + const latestReleasePullRequest = releasePullRequestsBySha[releaseShasByPath[path]]; + if (!latestReleasePullRequest) { + this.logger.warn('No latest release pull request found.'); + } + const strategy = strategies[path]; + const latestRelease = releasesByPath[path]; + const releasePullRequest = await strategy.buildReleasePullRequest(pathCommits, latestRelease, (_a = config.draftPullRequest) !== null && _a !== void 0 ? _a : this.draftPullRequest, this.labels); + if (releasePullRequest) { + // Update manifest, but only for valid release version - this will skip SNAPSHOT from java strategy + if (releasePullRequest.version && + isPublishedVersion(strategy, releasePullRequest.version)) { + const versionsMap = new Map(); + versionsMap.set(path, releasePullRequest.version); + releasePullRequest.updates.push({ + path: this.manifestPath, + createIfMissing: false, + updater: new release_please_manifest_1.ReleasePleaseManifest({ + version: releasePullRequest.version, + versionsMap, + }), + }); + } + newReleasePullRequests.push({ + path, + config, + pullRequest: releasePullRequest, + }); + } + } + // Combine pull requests into 1 unless configured for separate + // pull requests + if (!this.separatePullRequests) { + const mergeOptions = { + pullRequestTitlePattern: this.groupPullRequestTitlePattern, + }; + // Find the first repositoryConfig item that has a set value + // for the options that can be passed to the merge plugin + for (const path in this.repositoryConfig) { + const config = this.repositoryConfig[path]; + if ('pullRequestHeader' in config && + !('pullRequestHeader' in mergeOptions)) { + mergeOptions.pullRequestHeader = config.pullRequestHeader; + } + if ('pullRequestFooter' in config && + !('pullRequestFooter' in mergeOptions)) { + mergeOptions.pullRequestFooter = config.pullRequestFooter; + } + if ('componentNoSpace' in config && + !('componentNoSpace' in mergeOptions)) { + mergeOptions.componentNoSpace = config.componentNoSpace; + } + } + this.plugins.push(new merge_1.Merge(this.github, this.targetBranch, this.repositoryConfig, mergeOptions)); + } + for (const plugin of this.plugins) { + this.logger.debug(`running plugin: ${plugin.constructor.name}`); + newReleasePullRequests = await plugin.run(newReleasePullRequests); + } + return newReleasePullRequests.map(pullRequestWithConfig => pullRequestWithConfig.pullRequest); + } + async backfillReleasesFromTags(missingPaths, strategiesByPath) { + const releasesByPath = {}; + const allTags = await this.getAllTags(); + for (const path of missingPaths) { + const expectedVersion = this.releasedVersions[path]; + if (!expectedVersion) { + this.logger.warn(`No version for path ${path}`); + continue; + } + const component = await strategiesByPath[path].getComponent(); + const expectedTag = new tag_name_1.TagName(expectedVersion, component, this.repositoryConfig[path].tagSeparator, this.repositoryConfig[path].includeVInTag); + this.logger.debug(`looking for tagName: ${expectedTag.toString()}`); + const foundTag = allTags[expectedTag.toString()]; + if (foundTag) { + this.logger.debug(`found: ${foundTag.name} ${foundTag.sha}`); + releasesByPath[path] = { + name: foundTag.name, + tag: expectedTag, + sha: foundTag.sha, + notes: '', + }; + } + else { + if (strategiesByPath[exports.ROOT_PROJECT_PATH] && + this.repositoryConfig[path].skipGithubRelease) { + this.logger.debug('could not find release, checking root package'); + const rootComponent = await strategiesByPath[exports.ROOT_PROJECT_PATH].getComponent(); + const rootTag = new tag_name_1.TagName(expectedVersion, rootComponent, this.repositoryConfig[exports.ROOT_PROJECT_PATH].tagSeparator, this.repositoryConfig[exports.ROOT_PROJECT_PATH].includeVInTag); + const foundTag = allTags[rootTag.toString()]; + if (foundTag) { + this.logger.debug(`found rootTag: ${foundTag.name} ${foundTag.sha}`); + releasesByPath[path] = { + name: foundTag.name, + tag: rootTag, + sha: foundTag.sha, + notes: '', + }; + } + } + } + } + return releasesByPath; + } + async getAllTags() { + const allTags = {}; + for await (const tag of this.github.tagIterator()) { + allTags[tag.name] = tag; + } + return allTags; + } + /** + * Opens/updates all candidate release pull requests for this repository. + * + * @returns {PullRequest[]} Pull request numbers of release pull requests + */ + async createPullRequests() { + const candidatePullRequests = await this.buildPullRequests(); + if (candidatePullRequests.length === 0) { + return []; + } + // if there are any merged, pending release pull requests, don't open + // any new release PRs + const mergedPullRequestsGenerator = this.findMergedReleasePullRequests(); + for await (const _ of mergedPullRequestsGenerator) { + this.logger.warn('There are untagged, merged release PRs outstanding - aborting'); + return []; + } + // collect open and snoozed release pull requests + const openPullRequests = await this.findOpenReleasePullRequests(); + const snoozedPullRequests = await this.findSnoozedReleasePullRequests(); + if (this.sequentialCalls) { + const pullRequests = []; + for (const pullRequest of candidatePullRequests) { + const resultPullRequest = await this.createOrUpdatePullRequest(pullRequest, openPullRequests, snoozedPullRequests); + if (resultPullRequest) + pullRequests.push(resultPullRequest); + } + return pullRequests; + } + else { + const promises = []; + for (const pullRequest of candidatePullRequests) { + promises.push(this.createOrUpdatePullRequest(pullRequest, openPullRequests, snoozedPullRequests)); + } + const pullNumbers = await Promise.all(promises); + // reject any pull numbers that were not created or updated + return pullNumbers.filter(number => !!number); + } + } + async findOpenReleasePullRequests() { + this.logger.info('Looking for open release pull requests'); + const openPullRequests = []; + const generator = this.github.pullRequestIterator(this.targetBranch, 'OPEN', Number.MAX_SAFE_INTEGER, false); + for await (const openPullRequest of generator) { + if (hasAllLabels(this.labels, openPullRequest.labels) || + hasAllLabels(this.snapshotLabels, openPullRequest.labels)) { + const body = await this.pullRequestOverflowHandler.parseOverflow(openPullRequest); + if (body) { + // maybe replace with overflow body + openPullRequests.push({ + ...openPullRequest, + body: body.toString(), + }); + } + } + } + this.logger.info(`found ${openPullRequests.length} open release pull requests.`); + return openPullRequests; + } + async findSnoozedReleasePullRequests() { + this.logger.info('Looking for snoozed release pull requests'); + const snoozedPullRequests = []; + const closedGenerator = this.github.pullRequestIterator(this.targetBranch, 'CLOSED', 200, false); + for await (const closedPullRequest of closedGenerator) { + if (hasAllLabels([exports.SNOOZE_LABEL], closedPullRequest.labels) && + branch_name_1.BranchName.parse(closedPullRequest.headBranchName, this.logger)) { + const body = await this.pullRequestOverflowHandler.parseOverflow(closedPullRequest); + if (body) { + // maybe replace with overflow body + snoozedPullRequests.push({ + ...closedPullRequest, + body: body.toString(), + }); + } + } + } + this.logger.info(`found ${snoozedPullRequests.length} snoozed release pull requests.`); + return snoozedPullRequests; + } + async createOrUpdatePullRequest(pullRequest, openPullRequests, snoozedPullRequests) { + // look for existing, open pull request + const existing = openPullRequests.find(openPullRequest => openPullRequest.headBranchName === pullRequest.headRefName); + if (existing) { + return this.alwaysUpdate + ? await this.updateExistingPullRequest(existing, pullRequest) + : await this.maybeUpdateExistingPullRequest(existing, pullRequest); + } + // look for closed, snoozed pull request + const snoozed = snoozedPullRequests.find(openPullRequest => openPullRequest.headBranchName === pullRequest.headRefName); + if (snoozed) { + return this.alwaysUpdate + ? await this.updateExistingPullRequest(snoozed, pullRequest) + : await this.maybeUpdateSnoozedPullRequest(snoozed, pullRequest); + } + const body = await this.pullRequestOverflowHandler.handleOverflow(pullRequest); + const message = this.signoffUser + ? (0, signoff_commit_message_1.signoffCommitMessage)(pullRequest.title.toString(), this.signoffUser) + : pullRequest.title.toString(); + const newPullRequest = await this.github.createPullRequest({ + headBranchName: pullRequest.headRefName, + baseBranchName: this.targetBranch, + number: -1, + title: pullRequest.title.toString(), + body, + labels: this.skipLabeling ? [] : pullRequest.labels, + files: [], + }, this.targetBranch, message, pullRequest.updates, { + fork: this.fork, + draft: pullRequest.draft, + }); + return newPullRequest; + } + /// only update an existing pull request if it has release note changes + async maybeUpdateExistingPullRequest(existing, pullRequest) { + // If unchanged, no need to push updates + if (existing.body === pullRequest.body.toString()) { + this.logger.info(`PR https://github.com/${this.repository.owner}/${this.repository.repo}/pull/${existing.number} remained the same`); + return undefined; + } + return await this.updateExistingPullRequest(existing, pullRequest); + } + /// only update a snoozed pull request if it has release note changes + async maybeUpdateSnoozedPullRequest(snoozed, pullRequest) { + // If unchanged, no need to push updates + if (snoozed.body === pullRequest.body.toString()) { + this.logger.info(`PR https://github.com/${this.repository.owner}/${this.repository.repo}/pull/${snoozed.number} remained the same`); + return undefined; + } + const updatedPullRequest = await this.updateExistingPullRequest(snoozed, pullRequest); + // TODO: consider leaving the snooze label + await this.github.removeIssueLabels([exports.SNOOZE_LABEL], snoozed.number); + return updatedPullRequest; + } + /// force an update to an existing pull request + async updateExistingPullRequest(existing, pullRequest) { + return await this.github.updatePullRequest(existing.number, pullRequest, this.targetBranch, { + fork: this.fork, + signoffUser: this.signoffUser, + pullRequestOverflowHandler: this.pullRequestOverflowHandler, + }); + } + async *findMergedReleasePullRequests() { + // Find merged release pull requests + const pullRequestGenerator = this.github.pullRequestIterator(this.targetBranch, 'MERGED', 200, false); + for await (const pullRequest of pullRequestGenerator) { + if (!hasAllLabels(this.labels, pullRequest.labels)) { + continue; + } + this.logger.debug(`Found pull request #${pullRequest.number}: '${pullRequest.title}'`); + // if the pull request body overflows, handle it + const pullRequestBody = await this.pullRequestOverflowHandler.parseOverflow(pullRequest); + if (!pullRequestBody) { + this.logger.debug('could not parse pull request body as a release PR'); + continue; + } + // replace with the complete fetched body + yield { + ...pullRequest, + body: pullRequestBody.toString(), + }; + } + } + /** + * Find merged, untagged releases and build candidate releases to tag. + * + * @returns {CandidateRelease[]} List of release candidates + */ + async buildReleases() { + var _a; + this.logger.info('Building releases'); + const strategiesByPath = await this.getStrategiesByPath(); + // Find merged release pull requests + const generator = await this.findMergedReleasePullRequests(); + const candidateReleases = []; + for await (const pullRequest of generator) { + for (const path in this.repositoryConfig) { + const config = this.repositoryConfig[path]; + this.logger.info(`Building release for path: ${path}`); + this.logger.debug(`type: ${config.releaseType}`); + this.logger.debug(`targetBranch: ${this.targetBranch}`); + const strategy = strategiesByPath[path]; + const releases = await strategy.buildReleases(pullRequest, { + groupPullRequestTitlePattern: this.groupPullRequestTitlePattern, + }); + for (const release of releases) { + candidateReleases.push({ + ...release, + path, + pullRequest, + draft: (_a = config.draft) !== null && _a !== void 0 ? _a : this.draft, + prerelease: config.prerelease && + (!!release.tag.version.preRelease || + release.tag.version.major === 0), + }); + } + } + } + return candidateReleases; + } + /** + * Find merged, untagged releases. For each release, create a GitHub release, + * comment on the pull request used to generated it and update the pull request + * labels. + * + * @returns {GitHubRelease[]} List of created GitHub releases + */ + async createReleases() { + const releasesByPullRequest = {}; + const pullRequestsByNumber = {}; + for (const release of await this.buildReleases()) { + pullRequestsByNumber[release.pullRequest.number] = release.pullRequest; + if (releasesByPullRequest[release.pullRequest.number]) { + releasesByPullRequest[release.pullRequest.number].push(release); + } + else { + releasesByPullRequest[release.pullRequest.number] = [release]; + } + } + if (this.sequentialCalls) { + const resultReleases = []; + for (const pullNumber in releasesByPullRequest) { + const releases = await this.createReleasesForPullRequest(releasesByPullRequest[pullNumber], pullRequestsByNumber[pullNumber]); + resultReleases.push(...releases); + } + return resultReleases; + } + else { + const promises = []; + for (const pullNumber in releasesByPullRequest) { + promises.push(this.createReleasesForPullRequest(releasesByPullRequest[pullNumber], pullRequestsByNumber[pullNumber])); + } + const releases = await Promise.all(promises); + return releases.reduce((collection, r) => collection.concat(r), []); + } + } + async createReleasesForPullRequest(releases, pullRequest) { + this.logger.info(`Creating ${releases.length} releases for pull #${pullRequest.number}`); + const duplicateReleases = []; + const githubReleases = []; + let error; + for (const release of releases) { + // stop releasing once we hit an error + if (error) + continue; + try { + githubReleases.push(await this.createRelease(release)); + } + catch (err) { + if (err instanceof errors_1.DuplicateReleaseError) { + this.logger.warn(`Duplicate release tag: ${release.tag.toString()}`); + duplicateReleases.push(err); + } + else { + error = err; + } + } + } + if (githubReleases.length > 0) { + // comment on pull request about the successful releases + const releaseList = githubReleases + .map(({ tagName, url }) => `- [${tagName}](${url})`) + .join('\n'); + const comment = `🤖 Created releases:\n\n${releaseList}\n\n:sunflower:`; + await this.github.commentOnIssue(comment, pullRequest.number); + } + if (error) { + throw error; + } + if (duplicateReleases.length > 0) { + if (duplicateReleases.length + githubReleases.length === + releases.length) { + // we've either tagged all releases or they were duplicates: + // adjust tags on pullRequest + await this.github.removeIssueLabels(this.labels, pullRequest.number); + await this.github.addIssueLabels(this.releaseLabels, pullRequest.number); + } + if (githubReleases.length === 0) { + // If all releases were duplicate, throw a duplicate error + throw duplicateReleases[0]; + } + } + else { + // adjust tags on pullRequest + await this.github.removeIssueLabels(this.labels, pullRequest.number); + await this.github.addIssueLabels(this.releaseLabels, pullRequest.number); + } + return githubReleases; + } + async createRelease(release) { + const githubRelease = await this.github.createRelease(release, { + draft: release.draft, + prerelease: release.prerelease, + }); + return { + ...githubRelease, + path: release.path, + version: release.tag.version.toString(), + major: release.tag.version.major, + minor: release.tag.version.minor, + patch: release.tag.version.patch, + }; + } + async getStrategiesByPath() { + if (!this._strategiesByPath) { + this.logger.info('Building strategies by path'); + this._strategiesByPath = {}; + for (const path in this.repositoryConfig) { + const config = this.repositoryConfig[path]; + this.logger.debug(`${path}: ${config.releaseType}`); + const strategy = await (0, factory_1.buildStrategy)({ + ...config, + github: this.github, + path, + targetBranch: this.targetBranch, + }); + this._strategiesByPath[path] = strategy; + } + } + return this._strategiesByPath; + } + async getPathsByComponent() { + if (!this._pathsByComponent) { + this._pathsByComponent = {}; + const strategiesByPath = await this.getStrategiesByPath(); + for (const path in this.repositoryConfig) { + const strategy = strategiesByPath[path]; + const component = (await strategy.getComponent()) || ''; + if (this._pathsByComponent[component]) { + this.logger.warn(`Multiple paths for ${component}: ${this._pathsByComponent[component]}, ${path}`); + } + this._pathsByComponent[component] = path; + } + } + return this._pathsByComponent; + } +} +exports.Manifest = Manifest; +/** + * Helper to convert parsed JSON releaser config into ReleaserConfig for + * the Manifest. + * + * @param {ReleaserPackageConfig} config Parsed configuration from JSON file. + * @returns {ReleaserConfig} + */ +function extractReleaserConfig(config) { + var _a, _b, _c; + return { + releaseType: config['release-type'], + bumpMinorPreMajor: config['bump-minor-pre-major'], + bumpPatchForMinorPreMajor: config['bump-patch-for-minor-pre-major'], + prereleaseType: config['prerelease-type'], + versioning: config['versioning'], + changelogSections: config['changelog-sections'], + changelogPath: config['changelog-path'], + changelogHost: config['changelog-host'], + releaseAs: config['release-as'], + skipGithubRelease: config['skip-github-release'], + draft: config.draft, + prerelease: config.prerelease, + draftPullRequest: config['draft-pull-request'], + component: config['component'], + packageName: config['package-name'], + versionFile: config['version-file'], + extraFiles: config['extra-files'], + includeComponentInTag: config['include-component-in-tag'], + includeVInTag: config['include-v-in-tag'], + changelogType: config['changelog-type'], + pullRequestTitlePattern: config['pull-request-title-pattern'], + pullRequestHeader: config['pull-request-header'], + pullRequestFooter: config['pull-request-footer'], + componentNoSpace: config['component-no-space'], + tagSeparator: config['tag-separator'], + separatePullRequests: config['separate-pull-requests'], + labels: (_a = config['label']) === null || _a === void 0 ? void 0 : _a.split(','), + releaseLabels: (_b = config['release-label']) === null || _b === void 0 ? void 0 : _b.split(','), + extraLabels: (_c = config['extra-label']) === null || _c === void 0 ? void 0 : _c.split(','), + skipSnapshot: config['skip-snapshot'], + initialVersion: config['initial-version'], + excludePaths: config['exclude-paths'], + }; +} +/** + * Helper to convert fetch the manifest config from the repository and + * parse into configuration for the Manifest. + * + * @param {GitHub} github GitHub client + * @param {string} configFile Path in the repository to the manifest config + * @param {string} branch Branch to fetch the config file from + * @param {string} onlyPath Optional. Use only the given package + * @param {string} releaseAs Optional. Override release-as and use the given version + */ +async function parseConfig(github, configFile, branch, onlyPath, releaseAs) { + const config = await fetchManifestConfig(github, configFile, branch); + const defaultConfig = extractReleaserConfig(config); + const repositoryConfig = {}; + for (const path in config.packages) { + if (onlyPath && onlyPath !== path) + continue; + repositoryConfig[path] = mergeReleaserConfig(defaultConfig, extractReleaserConfig(config.packages[path])); + if (releaseAs) { + repositoryConfig[path].releaseAs = releaseAs; + } + } + const configLabel = config['label']; + const configReleaseLabel = config['release-label']; + const configSnapshotLabel = config['snapshot-label']; + const configExtraLabel = config['extra-label']; + const manifestOptions = { + bootstrapSha: config['bootstrap-sha'], + lastReleaseSha: config['last-release-sha'], + alwaysLinkLocal: config['always-link-local'], + separatePullRequests: config['separate-pull-requests'], + alwaysUpdate: config['always-update'], + groupPullRequestTitlePattern: config['group-pull-request-title-pattern'], + plugins: config['plugins'], + signoff: config['signoff'], + labels: configLabel === null || configLabel === void 0 ? void 0 : configLabel.split(','), + releaseLabels: configReleaseLabel === null || configReleaseLabel === void 0 ? void 0 : configReleaseLabel.split(','), + snapshotLabels: configSnapshotLabel === null || configSnapshotLabel === void 0 ? void 0 : configSnapshotLabel.split(','), + extraLabels: configExtraLabel === null || configExtraLabel === void 0 ? void 0 : configExtraLabel.split(','), + releaseSearchDepth: config['release-search-depth'], + commitSearchDepth: config['commit-search-depth'], + sequentialCalls: config['sequential-calls'], + }; + return { config: repositoryConfig, options: manifestOptions }; +} +/** + * Helper to fetch manifest config + * + * @param {GitHub} github + * @param {string} configFile + * @param {string} branch + * @returns {ManifestConfig} + * @throws {ConfigurationError} if missing the manifest config file + */ +async function fetchManifestConfig(github, configFile, branch) { + try { + return await github.getFileJson(configFile, branch); + } + catch (e) { + if (e instanceof errors_1.FileNotFoundError) { + throw new errors_1.ConfigurationError(`Missing required manifest config: ${configFile}`, 'base', `${github.repository.owner}/${github.repository.repo}`); + } + else if (e instanceof SyntaxError) { + throw new errors_1.ConfigurationError(`Failed to parse manifest config JSON: ${configFile}\n${e.message}`, 'base', `${github.repository.owner}/${github.repository.repo}`); + } + throw e; + } +} +/** + * Helper to parse the manifest versions file. + * + * @param {GitHub} github GitHub client + * @param {string} manifestFile Path in the repository to the versions file + * @param {string} branch Branch to fetch the versions file from + * @returns {Record} + */ +async function parseReleasedVersions(github, manifestFile, branch) { + const manifestJson = await fetchReleasedVersions(github, manifestFile, branch); + const releasedVersions = {}; + for (const path in manifestJson) { + releasedVersions[path] = version_1.Version.parse(manifestJson[path]); + } + return releasedVersions; +} +/** + * Helper to fetch manifest config + * + * @param {GitHub} github + * @param {string} manifestFile + * @param {string} branch + * @throws {ConfigurationError} if missing the manifest config file + */ +async function fetchReleasedVersions(github, manifestFile, branch) { + try { + return await github.getFileJson(manifestFile, branch); + } + catch (e) { + if (e instanceof errors_1.FileNotFoundError) { + throw new errors_1.ConfigurationError(`Missing required manifest versions: ${manifestFile}`, 'base', `${github.repository.owner}/${github.repository.repo}`); + } + else if (e instanceof SyntaxError) { + throw new errors_1.ConfigurationError(`Failed to parse manifest versions JSON: ${manifestFile}\n${e.message}`, 'base', `${github.repository.owner}/${github.repository.repo}`); + } + throw e; + } +} +function isPublishedVersion(strategy, version) { + return strategy.isPublishedVersion + ? strategy.isPublishedVersion(version) + : true; +} +/** + * Find the most recent matching release tag on the branch we're + * configured for. + * + * @param github GitHub client instance. + * @param {string} targetBranch Name of the scanned branch. + * @param releaseFilter Validator function for release version. Used to filter-out SNAPSHOT releases for Java strategy. + * @param {string} prefix Limit the release to a specific component. + */ +async function latestReleaseVersion(github, targetBranch, releaseFilter, config, prefix, logger = logger_1.logger) { + const branchPrefix = prefix + ? prefix.endsWith('-') + ? prefix.replace(/-$/, '') + : prefix + : undefined; + logger.info(`Looking for latest release on branch: ${targetBranch} with prefix: ${prefix}`); + // collect set of recent commit SHAs seen to verify that the release + // is in the current branch + const commitShas = new Set(); + const candidateReleaseVersions = []; + // only look at the last 250 or so commits to find the latest tag - we + // don't want to scan the entire repository history if this repo has never + // been released + const generator = github.mergeCommitIterator(targetBranch, { + maxResults: 250, + }); + for await (const commitWithPullRequest of generator) { + commitShas.add(commitWithPullRequest.sha); + const mergedPullRequest = commitWithPullRequest.pullRequest; + if (!(mergedPullRequest === null || mergedPullRequest === void 0 ? void 0 : mergedPullRequest.mergeCommitOid)) { + logger.trace(`skipping commit: ${commitWithPullRequest.sha} missing merged pull request`); + continue; + } + const branchName = branch_name_1.BranchName.parse(mergedPullRequest.headBranchName, logger); + if (!branchName) { + logger.trace(`skipping commit: ${commitWithPullRequest.sha} unrecognized branch name: ${mergedPullRequest.headBranchName}`); + continue; + } + // If branchPrefix is specified, ensure it is found in the branch name. + // If branchPrefix is not specified, component should also be undefined. + if (branchName.getComponent() !== branchPrefix) { + logger.trace(`skipping commit: ${commitWithPullRequest.sha} branch component ${branchName.getComponent()} doesn't match expected prefix: ${branchPrefix}`); + continue; + } + const pullRequestTitle = pull_request_title_1.PullRequestTitle.parse(mergedPullRequest.title, config.pullRequestTitlePattern, config.componentNoSpace, logger); + if (!pullRequestTitle) { + logger.trace(`skipping commit: ${commitWithPullRequest.sha} couldn't parse pull request title: ${mergedPullRequest.title}`); + continue; + } + const version = pullRequestTitle.getVersion(); + if (version && releaseFilter(version)) { + logger.debug(`Found latest release pull request: ${mergedPullRequest.number} version: ${version}`); + candidateReleaseVersions.push(version); + break; + } + } + // If not found from recent pull requests, look at releases. Iterate + // through releases finding valid tags, then cross reference + const releaseGenerator = github.releaseIterator(); + for await (const release of releaseGenerator) { + const tagName = tag_name_1.TagName.parse(release.tagName); + if (!tagName) { + continue; + } + if (tagMatchesConfig(tagName, branchPrefix, config.includeComponentInTag)) { + logger.debug(`found release for ${prefix}`, tagName.version); + if (!commitShas.has(release.sha)) { + logger.debug(`SHA not found in recent commits to branch ${targetBranch}, skipping`); + continue; + } + candidateReleaseVersions.push(tagName.version); + } + } + logger.debug(`found ${candidateReleaseVersions.length} possible releases.`, candidateReleaseVersions); + if (candidateReleaseVersions.length > 0) { + // Find largest release number (sort descending then return first) + return candidateReleaseVersions.sort((a, b) => b.compare(a))[0]; + } + // If not found from recent pull requests or releases, look at tags. Iterate + // through tags and cross reference against SHAs in this branch + const tagGenerator = github.tagIterator(); + const candidateTagVersion = []; + for await (const tag of tagGenerator) { + const tagName = tag_name_1.TagName.parse(tag.name); + if (!tagName) { + continue; + } + if (tagMatchesConfig(tagName, branchPrefix, config.includeComponentInTag)) { + if (!commitShas.has(tag.sha)) { + logger.debug(`SHA not found in recent commits to branch ${targetBranch}, skipping`); + continue; + } + candidateTagVersion.push(tagName.version); + } + } + logger.debug(`found ${candidateTagVersion.length} possible tags.`, candidateTagVersion); + // Find largest release number (sort descending then return first) + return candidateTagVersion.sort((a, b) => b.compare(a))[0]; +} +function mergeReleaserConfig(defaultConfig, pathConfig) { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _0, _1, _2, _3, _4, _5, _6; + return { + releaseType: (_b = (_a = pathConfig.releaseType) !== null && _a !== void 0 ? _a : defaultConfig.releaseType) !== null && _b !== void 0 ? _b : 'node', + bumpMinorPreMajor: (_c = pathConfig.bumpMinorPreMajor) !== null && _c !== void 0 ? _c : defaultConfig.bumpMinorPreMajor, + bumpPatchForMinorPreMajor: (_d = pathConfig.bumpPatchForMinorPreMajor) !== null && _d !== void 0 ? _d : defaultConfig.bumpPatchForMinorPreMajor, + prereleaseType: (_e = pathConfig.prereleaseType) !== null && _e !== void 0 ? _e : defaultConfig.prereleaseType, + versioning: (_f = pathConfig.versioning) !== null && _f !== void 0 ? _f : defaultConfig.versioning, + changelogSections: (_g = pathConfig.changelogSections) !== null && _g !== void 0 ? _g : defaultConfig.changelogSections, + changelogPath: (_h = pathConfig.changelogPath) !== null && _h !== void 0 ? _h : defaultConfig.changelogPath, + changelogHost: (_j = pathConfig.changelogHost) !== null && _j !== void 0 ? _j : defaultConfig.changelogHost, + changelogType: (_k = pathConfig.changelogType) !== null && _k !== void 0 ? _k : defaultConfig.changelogType, + releaseAs: (_l = pathConfig.releaseAs) !== null && _l !== void 0 ? _l : defaultConfig.releaseAs, + skipGithubRelease: (_m = pathConfig.skipGithubRelease) !== null && _m !== void 0 ? _m : defaultConfig.skipGithubRelease, + draft: (_o = pathConfig.draft) !== null && _o !== void 0 ? _o : defaultConfig.draft, + draftPullRequest: (_p = pathConfig.draftPullRequest) !== null && _p !== void 0 ? _p : defaultConfig.draftPullRequest, + prerelease: (_q = pathConfig.prerelease) !== null && _q !== void 0 ? _q : defaultConfig.prerelease, + component: (_r = pathConfig.component) !== null && _r !== void 0 ? _r : defaultConfig.component, + packageName: (_s = pathConfig.packageName) !== null && _s !== void 0 ? _s : defaultConfig.packageName, + versionFile: (_t = pathConfig.versionFile) !== null && _t !== void 0 ? _t : defaultConfig.versionFile, + extraFiles: (_u = pathConfig.extraFiles) !== null && _u !== void 0 ? _u : defaultConfig.extraFiles, + includeComponentInTag: (_v = pathConfig.includeComponentInTag) !== null && _v !== void 0 ? _v : defaultConfig.includeComponentInTag, + includeVInTag: (_w = pathConfig.includeVInTag) !== null && _w !== void 0 ? _w : defaultConfig.includeVInTag, + tagSeparator: (_x = pathConfig.tagSeparator) !== null && _x !== void 0 ? _x : defaultConfig.tagSeparator, + pullRequestTitlePattern: (_y = pathConfig.pullRequestTitlePattern) !== null && _y !== void 0 ? _y : defaultConfig.pullRequestTitlePattern, + pullRequestHeader: (_z = pathConfig.pullRequestHeader) !== null && _z !== void 0 ? _z : defaultConfig.pullRequestHeader, + pullRequestFooter: (_0 = pathConfig.pullRequestFooter) !== null && _0 !== void 0 ? _0 : defaultConfig.pullRequestFooter, + componentNoSpace: (_1 = pathConfig.componentNoSpace) !== null && _1 !== void 0 ? _1 : defaultConfig.componentNoSpace, + separatePullRequests: (_2 = pathConfig.separatePullRequests) !== null && _2 !== void 0 ? _2 : defaultConfig.separatePullRequests, + skipSnapshot: (_3 = pathConfig.skipSnapshot) !== null && _3 !== void 0 ? _3 : defaultConfig.skipSnapshot, + initialVersion: (_4 = pathConfig.initialVersion) !== null && _4 !== void 0 ? _4 : defaultConfig.initialVersion, + extraLabels: (_5 = pathConfig.extraLabels) !== null && _5 !== void 0 ? _5 : defaultConfig.extraLabels, + excludePaths: (_6 = pathConfig.excludePaths) !== null && _6 !== void 0 ? _6 : defaultConfig.excludePaths, + }; +} +/** + * Helper to compare if a list of labels fully contains another list of labels + * @param {string[]} expected List of labels expected to be contained + * @param {string[]} existing List of existing labels to consider + */ +function hasAllLabels(expected, existing) { + const existingSet = new Set(existing); + for (const label of expected) { + if (!existingSet.has(label)) { + return false; + } + } + return true; +} +function commitsAfterSha(commits, lastReleaseSha) { + if (!commits) { + return []; + } + const index = commits.findIndex(commit => commit.sha === lastReleaseSha); + if (index === -1) { + return commits; + } + return commits.slice(0, index); +} +/** + * Returns true if the release tag matches the configured component. Returns + * true if `includeComponentInTag` is false and there is no component in the + * tag, OR if the tag's component matches the release component. + */ +function tagMatchesConfig(tag, branchComponent, includeComponentInTag) { + return ((includeComponentInTag && tag.component === branchComponent) || + (!includeComponentInTag && !tag.component)); +} +//# sourceMappingURL=manifest.js.map + +/***/ }), + +/***/ 96926: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ManifestPlugin = void 0; +const logger_1 = __nccwpck_require__(18792); +/** + * A plugin runs after a repository manifest has built candidate + * pull requests and can make updates that span across multiple + * components. A plugin *might* choose to merge pull requests or add + * or update existing files. + */ +class ManifestPlugin { + constructor(github, targetBranch, repositoryConfig, logger = logger_1.logger) { + this.github = github; + this.targetBranch = targetBranch; + this.repositoryConfig = repositoryConfig; + this.logger = logger; + } + /** + * Perform post-processing on commits, e.g, sentence casing them. + * @param {Commit[]} commits The set of commits that will feed into release pull request. + * @returns {Commit[]} The modified commit objects. + */ + processCommits(commits) { + return commits; + } + /** + * Post-process candidate pull requests. + * @param {CandidateReleasePullRequest[]} pullRequests Candidate pull requests + * @returns {CandidateReleasePullRequest[]} Updated pull requests + */ + async run(pullRequests) { + return pullRequests; + } + /** + * Pre-configure strategies. + * @param {Record} strategiesByPath Strategies indexed by path + * @returns {Record} Updated strategies indexed by path + */ + async preconfigure(strategiesByPath, _commitsByPath, _releasesByPath) { + return strategiesByPath; + } +} +exports.ManifestPlugin = ManifestPlugin; +//# sourceMappingURL=plugin.js.map + +/***/ }), + +/***/ 43073: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CargoWorkspace = void 0; +const manifest_1 = __nccwpck_require__(24026); +const workspace_1 = __nccwpck_require__(29574); +const common_1 = __nccwpck_require__(66815); +const version_1 = __nccwpck_require__(25112); +const cargo_toml_1 = __nccwpck_require__(39089); +const raw_content_1 = __nccwpck_require__(44068); +const changelog_1 = __nccwpck_require__(11128); +const pull_request_title_1 = __nccwpck_require__(28866); +const pull_request_body_1 = __nccwpck_require__(11941); +const branch_name_1 = __nccwpck_require__(94148); +const versioning_strategy_1 = __nccwpck_require__(55237); +const cargo_lock_1 = __nccwpck_require__(84670); +const errors_1 = __nccwpck_require__(10818); +/** + * The plugin analyzed a cargo workspace and will bump dependencies + * of managed packages if those dependencies are being updated. + * + * If multiple rust packages are being updated, it will merge them + * into a single rust package. + */ +class CargoWorkspace extends workspace_1.WorkspacePlugin { + constructor() { + super(...arguments); + this.strategiesByPath = {}; + this.releasesByPath = {}; + } + async buildAllPackages(candidates) { + var _a, _b, _c, _d; + const cargoManifestContent = await this.github.getFileContentsOnBranch('Cargo.toml', this.targetBranch); + const cargoManifest = (0, common_1.parseCargoManifest)(cargoManifestContent.parsedContent); + if (!((_a = cargoManifest.workspace) === null || _a === void 0 ? void 0 : _a.members)) { + this.logger.warn("cargo-workspace plugin used, but top-level Cargo.toml isn't a cargo workspace"); + return { allPackages: [], candidatesByPackage: {} }; + } + const allCrates = []; + const candidatesByPackage = {}; + const members = (await Promise.all(cargoManifest.workspace.members.map(member => this.github.findFilesByGlobAndRef(member, this.targetBranch)))).flat(); + members.push(manifest_1.ROOT_PROJECT_PATH); + for (const path of members) { + const manifestPath = (0, workspace_1.addPath)(path, 'Cargo.toml'); + this.logger.info(`looking for candidate with path: ${path}`); + const candidate = candidates.find(c => c.path === path); + // get original content of the crate + const manifestContent = ((_b = candidate === null || candidate === void 0 ? void 0 : candidate.pullRequest.updates.find(update => update.path === manifestPath)) === null || _b === void 0 ? void 0 : _b.cachedFileContents) || + (await this.github.getFileContentsOnBranch(manifestPath, this.targetBranch)); + const manifest = (0, common_1.parseCargoManifest)(manifestContent.parsedContent); + const packageName = (_c = manifest.package) === null || _c === void 0 ? void 0 : _c.name; + if (!packageName) { + this.logger.warn(`package manifest at ${manifestPath} is missing [package.name]`); + continue; + } + if (candidate) { + candidatesByPackage[packageName] = candidate; + } + const version = (_d = manifest.package) === null || _d === void 0 ? void 0 : _d.version; + if (!version) { + throw new errors_1.ConfigurationError(`package manifest at ${manifestPath} is missing [package.version]`, 'cargo-workspace', `${this.github.repository.owner}/${this.github.repository.repo}`); + } + else if (typeof version !== 'string') { + throw new errors_1.ConfigurationError(`package manifest at ${manifestPath} has an invalid [package.version]`, 'cargo-workspace', `${this.github.repository.owner}/${this.github.repository.repo}`); + } + allCrates.push({ + path, + name: packageName, + version, + manifest, + manifestContent: manifestContent.parsedContent, + manifestPath, + }); + } + return { + allPackages: allCrates, + candidatesByPackage, + }; + } + bumpVersion(pkg) { + const version = version_1.Version.parse(pkg.version); + return new versioning_strategy_1.PatchVersionUpdate().bump(version); + } + updateCandidate(existingCandidate, pkg, updatedVersions) { + const version = updatedVersions.get(pkg.name); + if (!version) { + throw new Error(`Didn't find updated version for ${pkg.name}`); + } + const updater = new cargo_toml_1.CargoToml({ + version, + versionsMap: updatedVersions, + }); + const updatedContent = updater.updateContent(pkg.manifestContent); + const originalManifest = (0, common_1.parseCargoManifest)(pkg.manifestContent); + const updatedManifest = (0, common_1.parseCargoManifest)(updatedContent); + const dependencyNotes = getChangelogDepsNotes(originalManifest, updatedManifest); + existingCandidate.pullRequest.updates = + existingCandidate.pullRequest.updates.map(update => { + if (update.path === (0, workspace_1.addPath)(existingCandidate.path, 'Cargo.toml')) { + update.updater = new raw_content_1.RawContent(updatedContent); + } + else if (update.updater instanceof changelog_1.Changelog && dependencyNotes) { + update.updater.changelogEntry = (0, workspace_1.appendDependenciesSectionToChangelog)(update.updater.changelogEntry, dependencyNotes, this.logger); + } + else if (update.path === (0, workspace_1.addPath)(existingCandidate.path, 'Cargo.lock')) { + update.updater = new cargo_lock_1.CargoLock(updatedVersions); + } + return update; + }); + // append dependency notes + if (dependencyNotes) { + if (existingCandidate.pullRequest.body.releaseData.length > 0) { + existingCandidate.pullRequest.body.releaseData[0].notes = + (0, workspace_1.appendDependenciesSectionToChangelog)(existingCandidate.pullRequest.body.releaseData[0].notes, dependencyNotes, this.logger); + } + else { + existingCandidate.pullRequest.body.releaseData.push({ + component: pkg.name, + version: existingCandidate.pullRequest.version, + notes: (0, workspace_1.appendDependenciesSectionToChangelog)('', dependencyNotes, this.logger), + }); + } + } + return existingCandidate; + } + async newCandidate(pkg, updatedVersions) { + const version = updatedVersions.get(pkg.name); + if (!version) { + throw new Error(`Didn't find updated version for ${pkg.name}`); + } + const updater = new cargo_toml_1.CargoToml({ + version, + versionsMap: updatedVersions, + }); + const updatedContent = updater.updateContent(pkg.manifestContent); + const originalManifest = (0, common_1.parseCargoManifest)(pkg.manifestContent); + const updatedManifest = (0, common_1.parseCargoManifest)(updatedContent); + const dependencyNotes = getChangelogDepsNotes(originalManifest, updatedManifest); + const updatedPackage = { + ...pkg, + version: version.toString(), + }; + const strategy = this.strategiesByPath[updatedPackage.path]; + const latestRelease = this.releasesByPath[updatedPackage.path]; + const basePullRequest = strategy + ? await strategy.buildReleasePullRequest([], latestRelease, false, [], { + newVersion: version, + }) + : undefined; + if (basePullRequest) { + return this.updateCandidate({ + path: pkg.path, + pullRequest: basePullRequest, + config: { + releaseType: 'rust', + }, + }, pkg, updatedVersions); + } + const pullRequest = { + title: pull_request_title_1.PullRequestTitle.ofTargetBranch(this.targetBranch), + body: new pull_request_body_1.PullRequestBody([ + { + component: pkg.name, + version, + notes: (0, workspace_1.appendDependenciesSectionToChangelog)('', dependencyNotes, this.logger), + }, + ]), + updates: [ + { + path: (0, workspace_1.addPath)(pkg.path, 'Cargo.toml'), + createIfMissing: false, + updater: new raw_content_1.RawContent(updatedContent), + }, + { + path: (0, workspace_1.addPath)(pkg.path, 'CHANGELOG.md'), + createIfMissing: false, + updater: new changelog_1.Changelog({ + version, + changelogEntry: dependencyNotes, + }), + }, + ], + labels: [], + headRefName: branch_name_1.BranchName.ofTargetBranch(this.targetBranch).toString(), + version, + draft: false, + }; + return { + path: pkg.path, + pullRequest, + config: { + releaseType: 'rust', + }, + }; + } + postProcessCandidates(candidates, updatedVersions) { + let rootCandidate = candidates.find(c => c.path === manifest_1.ROOT_PROJECT_PATH); + if (!rootCandidate) { + this.logger.warn('Unable to find root candidate pull request'); + rootCandidate = candidates.find(c => c.config.releaseType === 'rust'); + } + if (!rootCandidate) { + this.logger.warn('Unable to find a rust candidate pull request'); + return candidates; + } + // Update the root Cargo.lock if it exists + rootCandidate.pullRequest.updates.push({ + path: 'Cargo.lock', + createIfMissing: false, + updater: new cargo_lock_1.CargoLock(updatedVersions), + }); + return candidates; + } + async buildGraph(allPackages) { + var _a, _b, _c, _d, _e, _f; + const workspaceCrateNames = new Set(allPackages.map(crateInfo => crateInfo.name)); + const graph = new Map(); + for (const crateInfo of allPackages) { + const allDeps = Object.keys({ + ...((_a = crateInfo.manifest.dependencies) !== null && _a !== void 0 ? _a : {}), + ...((_b = crateInfo.manifest['dev-dependencies']) !== null && _b !== void 0 ? _b : {}), + ...((_c = crateInfo.manifest['build-dependencies']) !== null && _c !== void 0 ? _c : {}), + }); + const targets = crateInfo.manifest.target; + if (targets) { + for (const targetName in targets) { + const target = targets[targetName]; + allDeps.push(...Object.keys({ + ...((_d = target.dependencies) !== null && _d !== void 0 ? _d : {}), + ...((_e = target['dev-dependencies']) !== null && _e !== void 0 ? _e : {}), + ...((_f = target['build-dependencies']) !== null && _f !== void 0 ? _f : {}), + })); + } + } + const workspaceDeps = allDeps.filter(dep => workspaceCrateNames.has(dep)); + graph.set(crateInfo.name, { + deps: workspaceDeps, + value: crateInfo, + }); + } + return graph; + } + inScope(candidate) { + return candidate.config.releaseType === 'rust'; + } + packageNameFromPackage(pkg) { + return pkg.name; + } + pathFromPackage(pkg) { + return pkg.path; + } + async preconfigure(strategiesByPath, _commitsByPath, _releasesByPath) { + // Using preconfigure to siphon releases and strategies. + this.strategiesByPath = strategiesByPath; + this.releasesByPath = _releasesByPath; + return strategiesByPath; + } +} +exports.CargoWorkspace = CargoWorkspace; +function getChangelogDepsNotes(originalManifest, updatedManifest) { + let depUpdateNotes = ''; + const depTypes = [ + 'dependencies', + 'dev-dependencies', + 'build-dependencies', + ]; + const depVer = (s) => { + if (s === undefined) { + return undefined; + } + if (typeof s === 'string') { + return s; + } + else { + return s.version; + } + }; + const getDepMap = (cargoDeps) => { + const result = {}; + for (const [key, val] of Object.entries(cargoDeps)) { + const ver = depVer(val); + if (ver) { + result[key] = ver; + } + } + return result; + }; + const populateUpdates = (originalScope, updatedScope, updates) => { + var _a; + for (const depType of depTypes) { + const depUpdates = []; + const pkgDepTypes = updatedScope[depType]; + if (pkgDepTypes === undefined) { + continue; + } + for (const [depName, currentDepVer] of Object.entries(getDepMap(pkgDepTypes))) { + const origDepVer = depVer((_a = originalScope[depType]) === null || _a === void 0 ? void 0 : _a[depName]); + if (currentDepVer !== origDepVer) { + depUpdates.push(`\n * ${depName} bumped from ${origDepVer} to ${currentDepVer}`); + } + } + if (depUpdates.length > 0) { + const updatesForType = updates.get(depType) || new Set(); + depUpdates.forEach(update => updatesForType.add(update)); + updates.set(depType, updatesForType); + } + } + }; + const updates = new Map(); + populateUpdates(originalManifest, updatedManifest, updates); + if (updatedManifest.target && originalManifest.target) { + for (const targetName in updatedManifest.target) { + populateUpdates(originalManifest.target[targetName], updatedManifest.target[targetName], updates); + } + } + for (const [dt, notes] of updates) { + depUpdateNotes += `\n * ${dt}`; + for (const note of notes) { + depUpdateNotes += note; + } + } + if (depUpdateNotes) { + return `* The following workspace dependencies were updated${depUpdateNotes}`; + } + return ''; +} +//# sourceMappingURL=cargo-workspace.js.map + +/***/ }), + +/***/ 24055: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GroupPriority = void 0; +const plugin_1 = __nccwpck_require__(96926); +/** + * This plugin allows configuring a priority of release groups. For example, you could + * prioritize Java snapshot pull requests over other releases. + */ +class GroupPriority extends plugin_1.ManifestPlugin { + /** + * Instantiate a new GroupPriority plugin. + * + * @param {GitHub} github GitHub client + * @param {string} targetBranch Release branch + * @param {RepositoryConfig} repositoryConfig Parsed configuration for the entire + * repository. This allows plugins to know how components interact. + * @param {string[]} groups List of group names ordered with highest priority first + */ + constructor(github, targetBranch, repositoryConfig, groups) { + super(github, targetBranch, repositoryConfig); + this.groups = groups; + } + /** + * Group candidate release PRs by grouping and check our list of preferred + * groups in order. If a preferred group is found, only return pull requests for + * that group. + * @param {CandidateReleasePullRequest[]} pullRequests Candidate pull requests + * @returns {CandidateReleasePullRequest[]} Possibly a subset of the candidate + * pull requests if a preferred group is found. + */ + async run(pullRequests) { + this.logger.debug(`Group priority plugin running with groups: ${this.groups}`); + const groupedCandidates = groupCandidatesByType(pullRequests); + for (const group of this.groups) { + this.logger.debug(`Considering group: ${group}`); + const groupCandidates = groupedCandidates.get(group); + if (groupCandidates) { + this.logger.debug(`Found preferred group: ${group} with ${groupCandidates.length} candidate pull requests`); + return groupCandidates; + } + } + // fallback to returning all candidates + this.logger.debug('No preferred group found, returning full set.'); + return pullRequests; + } +} +exports.GroupPriority = GroupPriority; +/** + * Helper to group candidates by their `type` field. + * @param {CandidateReleasePullRequest[]} inScopeCandidates The candidates to group. + * @returns {Map} The grouped + * pull requests. + */ +function groupCandidatesByType(inScopeCandidates) { + const groupedCandidates = new Map(); + for (const candidatePullRequest of inScopeCandidates) { + const candidates = groupedCandidates.get(candidatePullRequest.pullRequest.group); + if (candidates) { + candidates.push(candidatePullRequest); + } + else { + groupedCandidates.set(candidatePullRequest.pullRequest.group, [ + candidatePullRequest, + ]); + } + } + return groupedCandidates; +} +//# sourceMappingURL=group-priority.js.map + +/***/ }), + +/***/ 78087: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LinkedVersions = void 0; +const plugin_1 = __nccwpck_require__(96926); +const commit_1 = __nccwpck_require__(50726); +const factory_1 = __nccwpck_require__(53693); +const merge_1 = __nccwpck_require__(51886); +const branch_name_1 = __nccwpck_require__(94148); +/** + * This plugin reconfigures strategies by linking multiple components + * together. + * + * Release notes are broken up using ``/`
` blocks. + */ +class LinkedVersions extends plugin_1.ManifestPlugin { + constructor(github, targetBranch, repositoryConfig, groupName, components, options = {}) { + var _a; + super(github, targetBranch, repositoryConfig, options.logger); + this.groupName = groupName; + this.components = new Set(components); + this.merge = (_a = options.merge) !== null && _a !== void 0 ? _a : true; + } + /** + * Pre-configure strategies. + * @param {Record} strategiesByPath Strategies indexed by path + * @returns {Record} Updated strategies indexed by path + */ + async preconfigure(strategiesByPath, commitsByPath, releasesByPath) { + // Find all strategies in the group + const groupStrategies = {}; + for (const path in strategiesByPath) { + const strategy = strategiesByPath[path]; + const component = await strategy.getComponent(); + if (!component) { + continue; + } + if (this.components.has(component)) { + groupStrategies[path] = strategy; + } + } + this.logger.info(`Found ${Object.keys(groupStrategies).length} group components for ${this.groupName}`); + const groupVersions = {}; + const missingReleasePaths = new Set(); + for (const path in groupStrategies) { + const strategy = groupStrategies[path]; + const latestRelease = releasesByPath[path]; + const releasePullRequest = await strategy.buildReleasePullRequest((0, commit_1.parseConventionalCommits)(commitsByPath[path], this.logger), latestRelease); + if (releasePullRequest === null || releasePullRequest === void 0 ? void 0 : releasePullRequest.version) { + groupVersions[path] = releasePullRequest.version; + } + else { + missingReleasePaths.add(path); + } + } + const versions = Object.values(groupVersions); + if (versions.length === 0) { + return strategiesByPath; + } + const primaryVersion = versions.reduce((collector, version) => collector.compare(version) > 0 ? collector : version, versions[0]); + const newStrategies = {}; + for (const path in strategiesByPath) { + if (path in groupStrategies) { + const component = await strategiesByPath[path].getComponent(); + this.logger.info(`Replacing strategy for path ${path} with forced version: ${primaryVersion}`); + newStrategies[path] = await (0, factory_1.buildStrategy)({ + ...this.repositoryConfig[path], + github: this.github, + path, + targetBranch: this.targetBranch, + releaseAs: primaryVersion.toString(), + }); + if (missingReleasePaths.has(path)) { + this.logger.debug(`Appending fake commit for path: ${path}`); + commitsByPath[path].push({ + sha: '', + message: `chore(${component}): Synchronize ${this.groupName} versions\n\nRelease-As: ${primaryVersion.toString()}`, + }); + } + } + else { + newStrategies[path] = strategiesByPath[path]; + } + } + return newStrategies; + } + /** + * Post-process candidate pull requests. + * @param {CandidateReleasePullRequest[]} pullRequests Candidate pull requests + * @returns {CandidateReleasePullRequest[]} Updated pull requests + */ + async run(candidates) { + if (!this.merge) { + return candidates; + } + const [inScopeCandidates, outOfScopeCandidates] = candidates.reduce((collection, candidate) => { + if (!candidate.pullRequest.version) { + this.logger.warn('pull request missing version', candidate); + collection[1].push(candidate); + return collection; + } + if (this.components.has(candidate.config.component || '')) { + collection[0].push(candidate); + } + else { + collection[1].push(candidate); + } + return collection; + }, [[], []]); + this.logger.info(`found ${inScopeCandidates.length} linked-versions candidates`); + // delegate to the merge plugin and add merged pull request + if (inScopeCandidates.length > 0) { + const merge = new merge_1.Merge(this.github, this.targetBranch, this.repositoryConfig, { + pullRequestTitlePattern: `chore\${scope}: release ${this.groupName} libraries`, + forceMerge: true, + headBranchName: branch_name_1.BranchName.ofGroupTargetBranch(this.groupName, this.targetBranch).toString(), + }); + const merged = await merge.run(inScopeCandidates); + outOfScopeCandidates.push(...merged); + } + return outOfScopeCandidates; + } +} +exports.LinkedVersions = LinkedVersions; +//# sourceMappingURL=linked-versions.js.map + +/***/ }), + +/***/ 77153: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MavenWorkspace = void 0; +const workspace_1 = __nccwpck_require__(29574); +const version_1 = __nccwpck_require__(25112); +const dom = __nccwpck_require__(49213); +const xpath = __nccwpck_require__(65319); +const path_1 = __nccwpck_require__(71017); +const pom_xml_1 = __nccwpck_require__(55023); +const changelog_1 = __nccwpck_require__(11128); +const pull_request_title_1 = __nccwpck_require__(28866); +const pull_request_body_1 = __nccwpck_require__(11941); +const branch_name_1 = __nccwpck_require__(94148); +const logger_1 = __nccwpck_require__(18792); +const java_snapshot_1 = __nccwpck_require__(81189); +const always_bump_patch_1 = __nccwpck_require__(42320); +const composite_1 = __nccwpck_require__(93373); +const JAVA_RELEASE_TYPES = new Set([ + 'java', + 'java-bom', + 'java-yoshi', + 'java-yoshi-mono-repo', + 'maven', +]); +const XPATH_PROJECT_GROUP = '/*[local-name()="project"]/*[local-name()="groupId"]'; +const XPATH_PROJECT_ARTIFACT = '/*[local-name()="project"]/*[local-name()="artifactId"]'; +const XPATH_PROJECT_VERSION = '/*[local-name()="project"]/*[local-name()="version"]'; +const XPATH_PROJECT_DEPENDENCIES = '/*[local-name()="project"]/*[local-name()="dependencies"]/*[local-name()="dependency"]'; +const XPATH_PROJECT_DEPENDENCY_MANAGEMENT_DEPENDENCIES = '/*[local-name()="project"]/*[local-name()="dependencyManagement"]/*[local-name()="dependencies"]/*[local-name()="dependency"]'; +class MavenWorkspace extends workspace_1.WorkspacePlugin { + constructor(github, targetBranch, repositoryConfig, options = {}) { + var _a; + super(github, targetBranch, repositoryConfig, options); + this.considerAllArtifacts = (_a = options.considerAllArtifacts) !== null && _a !== void 0 ? _a : true; + } + async fetchPom(path) { + const content = await this.github.getFileContentsOnBranch(path, this.targetBranch); + return parseMavenArtifact(content.parsedContent, path, this.logger); + } + async buildAllPackages(candidates) { + const allPackages = []; + const candidatesByPackage = {}; + // find all pom.xml files and build a dependency graph + const pomFiles = await this.github.findFilesByFilenameAndRef('pom.xml', this.targetBranch); + for (const pomFile of pomFiles) { + const path = (0, path_1.dirname)(pomFile); + const config = this.repositoryConfig[path]; + if (!config) { + if (!this.considerAllArtifacts) { + this.logger.info(`path '${path}' not configured, ignoring '${pomFile}'`); + continue; + } + this.logger.info(`path '${path}' not configured, but 'considerAllArtifacts' option enabled`); + } + const mavenArtifact = await this.fetchPom(pomFile); + if (!mavenArtifact) { + continue; + } + allPackages.push(mavenArtifact); + const candidate = candidates.find(candidate => candidate.path === path); + if (candidate) { + candidatesByPackage[this.packageNameFromPackage(mavenArtifact)] = + candidate; + } + else { + this.logger.warn(`found ${pomFile} in path ${path}, but did not find an associated candidate PR`); + } + } + return { + allPackages, + candidatesByPackage, + }; + } + /** + * Our maven components can have multiple artifacts if using + * `considerAllArtifacts`. Find the candidate release for the component + * that contains that maven artifact. + * @param {MavenArtifact} pkg The artifact to search for + * @param {Record pkg.path.startsWith(`${candidate.path}/`)); + } + /** + * Helper to determine which packages we will use to base our search + * for touched packages upon. These are usually the packages that + * have candidate pull requests open. + * + * If you configure `updateAllPackages`, we fill force update all + * packages as if they had a release. + * @param {DependencyGraph} graph All the packages in the repository + * @param {Record candidate.path); + // Find artifacts that are in an existing candidate release + return Array.from(graph.values()) + .filter(({ value }) => candidatePaths.find(path => value.path === path || value.path.startsWith(`${path}/`))) + .map(({ value }) => this.packageNameFromPackage(value)); + } + return super.packageNamesToUpdate(graph, candidatesByPackage); + } + /** + * Helper to build up all the versions we are modifying in this + * repository. + * @param {DependencyGraph} graph All the packages in the repository + * @param {T[]} orderedPackages A list of packages that are currently + * updated by the existing candidate pull requests + * @param {Record Version) and a + * map of all updated versions (component path => Version). + */ + async buildUpdatedVersions(_graph, orderedPackages, candidatesByPackage) { + const updatedVersions = new Map(); + const updatedPathVersions = new Map(); + // Look for updated pom.xml files + for (const [_, candidate] of Object.entries(candidatesByPackage)) { + const pomUpdates = candidate.pullRequest.updates.filter(update => update.path.endsWith('pom.xml')); + for (const pomUpdate of pomUpdates) { + if (!pomUpdate.cachedFileContents) { + pomUpdate.cachedFileContents = + await this.github.getFileContentsOnBranch(pomUpdate.path, this.targetBranch); + } + if (pomUpdate.cachedFileContents) { + // pre-run the version updater on this artifact and extract the + // new version + const updatedArtifact = parseMavenArtifact(pomUpdate.updater.updateContent(pomUpdate.cachedFileContents.parsedContent), pomUpdate.path, this.logger); + if (updatedArtifact) { + this.logger.debug(`updating ${updatedArtifact.name} to ${updatedArtifact.version}`); + updatedVersions.set(updatedArtifact.name, version_1.Version.parse(updatedArtifact.version)); + } + } + else { + this.logger.warn(`${pomUpdate.path} does not have cached contents`); + } + } + if (candidate.pullRequest.version && + this.isReleaseVersion(candidate.pullRequest.version)) { + updatedPathVersions.set(candidate.path, candidate.pullRequest.version); + } + } + for (const pkg of orderedPackages) { + const packageName = this.packageNameFromPackage(pkg); + this.logger.debug(`Looking for next version for: ${packageName}`); + const existingCandidate = candidatesByPackage[packageName]; + if (existingCandidate) { + const version = existingCandidate.pullRequest.version; + this.logger.debug(`version: ${version} from release-please`); + updatedVersions.set(packageName, version); + } + else { + const version = this.bumpVersion(pkg); + if (updatedVersions.get(packageName)) { + this.logger.debug('version already set'); + } + else { + this.logger.debug(`version: ${version} forced bump`); + updatedVersions.set(packageName, version); + if (this.isReleaseVersion(version)) { + updatedPathVersions.set(this.pathFromPackage(pkg), version); + } + } + } + } + return { + updatedVersions, + updatedPathVersions, + }; + } + async buildGraph(allPackages) { + this.logger.trace('building graph', allPackages); + const artifactsByName = allPackages.reduce((collection, mavenArtifact) => { + collection[mavenArtifact.name] = mavenArtifact; + return collection; + }, {}); + this.logger.trace('artifacts by name', artifactsByName); + const graph = new Map(); + for (const mavenArtifact of allPackages) { + const allDeps = [ + ...mavenArtifact.dependencies, + ...mavenArtifact.testDependencies, + ...mavenArtifact.managedDependencies, + ]; + const workspaceDeps = allDeps.filter(dep => artifactsByName[packageNameFromGav(dep)]); + graph.set(mavenArtifact.name, { + deps: workspaceDeps.map(dep => packageNameFromGav(dep)), + value: mavenArtifact, + }); + } + return graph; + } + /** + * Given a release version, determine if we should bump the manifest + * version as well. For maven artifacts, SNAPSHOT versions are not + * considered releases. + * @param {Version} version The release version + */ + isReleaseVersion(version) { + var _a; + return !((_a = version.preRelease) === null || _a === void 0 ? void 0 : _a.includes('SNAPSHOT')); + } + bumpVersion(artifact) { + const strategy = new java_snapshot_1.JavaSnapshot(new always_bump_patch_1.AlwaysBumpPatch()); + return strategy.bump(version_1.Version.parse(artifact.version), [FAKE_COMMIT]); + } + updateCandidate(existingCandidate, artifact, updatedVersions) { + const version = updatedVersions.get(artifact.name); + if (!version) { + throw new Error(`Didn't find updated version for ${artifact.name}`); + } + const updater = new pom_xml_1.PomXml(version, updatedVersions); + const dependencyNotes = getChangelogDepsNotes(artifact, updater, updatedVersions, this.logger); + existingCandidate.pullRequest.updates = + existingCandidate.pullRequest.updates.map(update => { + if (update.path === (0, workspace_1.addPath)(existingCandidate.path, 'pom.xml')) { + update.updater = new composite_1.CompositeUpdater(update.updater, updater); + } + else if (update.updater instanceof changelog_1.Changelog) { + if (dependencyNotes) { + update.updater.changelogEntry = + (0, workspace_1.appendDependenciesSectionToChangelog)(update.updater.changelogEntry, dependencyNotes, this.logger); + } + } + return update; + }); + // append dependency notes + if (dependencyNotes) { + if (existingCandidate.pullRequest.body.releaseData.length > 0) { + existingCandidate.pullRequest.body.releaseData[0].notes = + (0, workspace_1.appendDependenciesSectionToChangelog)(existingCandidate.pullRequest.body.releaseData[0].notes, dependencyNotes, this.logger); + } + else { + existingCandidate.pullRequest.body.releaseData.push({ + component: artifact.name, + version: existingCandidate.pullRequest.version, + notes: (0, workspace_1.appendDependenciesSectionToChangelog)('', dependencyNotes, this.logger), + }); + } + } + return existingCandidate; + } + async newCandidate(artifact, updatedVersions) { + const version = updatedVersions.get(artifact.name); + if (!version) { + throw new Error(`Didn't find updated version for ${artifact.name}`); + } + const updater = new pom_xml_1.PomXml(version, updatedVersions); + const dependencyNotes = getChangelogDepsNotes(artifact, updater, updatedVersions, this.logger); + const pullRequest = { + title: pull_request_title_1.PullRequestTitle.ofTargetBranch(this.targetBranch), + body: new pull_request_body_1.PullRequestBody([ + { + component: artifact.name, + version, + notes: (0, workspace_1.appendDependenciesSectionToChangelog)('', dependencyNotes, this.logger), + }, + ]), + updates: [ + { + path: (0, workspace_1.addPath)(artifact.path, 'pom.xml'), + createIfMissing: false, + updater, + }, + { + path: (0, workspace_1.addPath)(artifact.path, 'CHANGELOG.md'), + createIfMissing: false, + updater: new changelog_1.Changelog({ + version, + changelogEntry: dependencyNotes, + }), + }, + ], + labels: [], + headRefName: branch_name_1.BranchName.ofTargetBranch(this.targetBranch).toString(), + version, + draft: false, + }; + return { + path: artifact.path, + pullRequest, + config: { + releaseType: 'maven', + }, + }; + } + inScope(candidate) { + return JAVA_RELEASE_TYPES.has(candidate.config.releaseType); + } + packageNameFromPackage(artifact) { + return artifact.name; + } + pathFromPackage(artifact) { + return artifact.path; + } + postProcessCandidates(candidates, _updatedVersions) { + // NOP for maven workspaces + return candidates; + } +} +exports.MavenWorkspace = MavenWorkspace; +function packageNameFromGav(gav) { + return `${gav.groupId}:${gav.artifactId}`; +} +function getChangelogDepsNotes(artifact, updater, updatedVersions, logger = logger_1.logger) { + const document = new dom.DOMParser().parseFromString(artifact.pomContent); + const dependencyUpdates = updater.dependencyUpdates(document, updatedVersions); + const depUpdateNotes = []; + for (const dependencyUpdate of dependencyUpdates) { + depUpdateNotes.push(`\n * ${dependencyUpdate.name} bumped to ${dependencyUpdate.version}`); + logger.info(`bumped ${dependencyUpdate.name} to ${dependencyUpdate.version}`); + } + if (depUpdateNotes.length > 0) { + return `* The following workspace dependencies were updated${depUpdateNotes.join()}`; + } + return ''; +} +/** + * Helper to parse a pom.xml file and extract important fields + * @param {string} pomContent The XML contents as a string + * @param {string} path The path to the file in the repository including the filename. + * @param {Logger} logger Context logger + * @returns {MavenArtifact | undefined} Returns undefined if we are missing key + * attributes. We log a warning in these cases. + */ +function parseMavenArtifact(pomContent, path, logger) { + const document = new dom.DOMParser().parseFromString(pomContent); + const groupNodes = xpath.select(XPATH_PROJECT_GROUP, document); + if (groupNodes.length === 0) { + logger.warn(`Missing project.groupId in ${path}`); + return; + } + const artifactNodes = xpath.select(XPATH_PROJECT_ARTIFACT, document); + if (artifactNodes.length === 0) { + logger.warn(`Missing project.artifactId in ${path}`); + return; + } + const versionNodes = xpath.select(XPATH_PROJECT_VERSION, document); + if (versionNodes.length === 0) { + logger.warn(`Missing project.version in ${path}`); + return; + } + const dependencies = []; + const testDependencies = []; + for (const dependencyNode of xpath.select(XPATH_PROJECT_DEPENDENCIES, document)) { + const parsedNode = (0, pom_xml_1.parseDependencyNode)(dependencyNode); + if (!parsedNode.version) { + continue; + } + if (parsedNode.scope === 'test') { + testDependencies.push({ + groupId: parsedNode.groupId, + artifactId: parsedNode.artifactId, + version: parsedNode.version, + }); + } + else { + dependencies.push({ + groupId: parsedNode.groupId, + artifactId: parsedNode.artifactId, + version: parsedNode.version, + }); + } + } + const managedDependencies = []; + for (const dependencyNode of xpath.select(XPATH_PROJECT_DEPENDENCY_MANAGEMENT_DEPENDENCIES, document)) { + const parsedNode = (0, pom_xml_1.parseDependencyNode)(dependencyNode); + if (!parsedNode.version) { + continue; + } + managedDependencies.push({ + groupId: parsedNode.groupId, + artifactId: parsedNode.artifactId, + version: parsedNode.version, + }); + } + const groupId = groupNodes[0].firstChild.textContent; + const artifactId = artifactNodes[0].firstChild.textContent; + return { + path: (0, path_1.dirname)(path), + groupId, + artifactId, + name: `${groupId}:${artifactId}`, + version: versionNodes[0].firstChild.textContent, + dependencies, + testDependencies, + managedDependencies, + pomContent, + }; +} +// We use a fake commit to leverage the Java versioning strategy +// (it should be a patch version bump and potentially remove the +// -SNAPSHOT portion of the version) +const FAKE_COMMIT = { + message: 'fix: fake fix', + type: 'fix', + scope: null, + notes: [], + references: [], + bareMessage: 'fake fix', + breaking: false, + sha: 'abc123', + files: [], +}; +//# sourceMappingURL=maven-workspace.js.map + +/***/ }), + +/***/ 51886: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Merge = void 0; +const plugin_1 = __nccwpck_require__(96926); +const manifest_1 = __nccwpck_require__(24026); +const pull_request_title_1 = __nccwpck_require__(28866); +const pull_request_body_1 = __nccwpck_require__(11941); +const branch_name_1 = __nccwpck_require__(94148); +const composite_1 = __nccwpck_require__(93373); +/** + * This plugin merges multiple pull requests into a single + * release pull request. + * + * Release notes are broken up using ``/`
` blocks. + */ +class Merge extends plugin_1.ManifestPlugin { + constructor(github, targetBranch, repositoryConfig, options = {}) { + var _a, _b; + super(github, targetBranch, repositoryConfig); + this.pullRequestTitlePattern = + (_a = options.pullRequestTitlePattern) !== null && _a !== void 0 ? _a : manifest_1.MANIFEST_PULL_REQUEST_TITLE_PATTERN; + this.pullRequestHeader = options.pullRequestHeader; + this.pullRequestFooter = options.pullRequestFooter; + this.componentNoSpace = options.componentNoSpace; + this.headBranchName = options.headBranchName; + this.forceMerge = (_b = options.forceMerge) !== null && _b !== void 0 ? _b : false; + } + async run(candidates) { + var _a; + if (candidates.length < 1) { + return candidates; + } + this.logger.info(`Merging ${candidates.length} pull requests`); + const [inScopeCandidates, outOfScopeCandidates] = candidates.reduce((collection, candidate) => { + if (candidate.config.separatePullRequests && !this.forceMerge) { + collection[1].push(candidate); + } + else { + collection[0].push(candidate); + } + return collection; + }, [[], []]); + const releaseData = []; + const labels = new Set(); + let rawUpdates = []; + let rootRelease = null; + for (const candidate of inScopeCandidates) { + const pullRequest = candidate.pullRequest; + rawUpdates = rawUpdates.concat(...pullRequest.updates); + for (const label of pullRequest.labels) { + labels.add(label); + } + releaseData.push(...pullRequest.body.releaseData); + if (candidate.path === '.') { + rootRelease = candidate; + } + } + const updates = (0, composite_1.mergeUpdates)(rawUpdates); + const pullRequest = { + title: pull_request_title_1.PullRequestTitle.ofComponentTargetBranchVersion(rootRelease === null || rootRelease === void 0 ? void 0 : rootRelease.pullRequest.title.component, this.targetBranch, rootRelease === null || rootRelease === void 0 ? void 0 : rootRelease.pullRequest.title.version, this.pullRequestTitlePattern, this.componentNoSpace), + body: new pull_request_body_1.PullRequestBody(releaseData, { + useComponents: true, + header: this.pullRequestHeader, + footer: this.pullRequestFooter, + }), + updates, + labels: Array.from(labels), + headRefName: (_a = this.headBranchName) !== null && _a !== void 0 ? _a : branch_name_1.BranchName.ofTargetBranch(this.targetBranch).toString(), + draft: !candidates.some(candidate => !candidate.pullRequest.draft), + }; + const releaseTypes = new Set(candidates.map(candidate => candidate.config.releaseType)); + const releaseType = releaseTypes.size === 1 ? releaseTypes.values().next().value : 'simple'; + return [ + { + path: manifest_1.ROOT_PROJECT_PATH, + pullRequest, + config: { + releaseType, + }, + }, + ...outOfScopeCandidates, + ]; + } +} +exports.Merge = Merge; +//# sourceMappingURL=merge.js.map + +/***/ }), + +/***/ 27400: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeWorkspace = void 0; +const package_lock_json_1 = __nccwpck_require__(55554); +const version_1 = __nccwpck_require__(25112); +const pull_request_title_1 = __nccwpck_require__(28866); +const pull_request_body_1 = __nccwpck_require__(11941); +const branch_name_1 = __nccwpck_require__(94148); +const changelog_1 = __nccwpck_require__(11128); +const workspace_1 = __nccwpck_require__(29574); +const composite_1 = __nccwpck_require__(93373); +const package_json_1 = __nccwpck_require__(15189); +const versioning_strategy_1 = __nccwpck_require__(55237); +/** + * The plugin analyzed a cargo workspace and will bump dependencies + * of managed packages if those dependencies are being updated. + * + * If multiple node packages are being updated, it will merge them + * into a single node package. + */ +class NodeWorkspace extends workspace_1.WorkspacePlugin { + constructor(github, targetBranch, repositoryConfig, options = {}) { + super(github, targetBranch, repositoryConfig, options); + this.strategiesByPath = {}; + this.releasesByPath = {}; + this.alwaysLinkLocal = options.alwaysLinkLocal === false ? false : true; + this.updatePeerDependencies = options.updatePeerDependencies === true; + } + async buildAllPackages(candidates) { + var _a; + const candidatesByPath = new Map(); + for (const candidate of candidates) { + candidatesByPath.set(candidate.path, candidate); + } + const candidatesByPackage = {}; + const packagesByPath = new Map(); + for (const path in this.repositoryConfig) { + const config = this.repositoryConfig[path]; + if (config.releaseType !== 'node') { + continue; + } + const candidate = candidatesByPath.get(path); + if (candidate) { + this.logger.debug(`Found candidate pull request for path: ${candidate.path}`); + const packagePath = (0, workspace_1.addPath)(candidate.path, 'package.json'); + const packageUpdate = candidate.pullRequest.updates.find(update => update.path === packagePath); + const contents = (_a = packageUpdate === null || packageUpdate === void 0 ? void 0 : packageUpdate.cachedFileContents) !== null && _a !== void 0 ? _a : (await this.github.getFileContentsOnBranch(packagePath, this.targetBranch)); + const packageJson = JSON.parse(contents.parsedContent); + const pkg = { + name: packageJson.name, + path, + version: packageJson.version, + dependencies: packageJson.dependencies || {}, + devDependencies: packageJson.devDependencies || {}, + peerDependencies: packageJson.peerDependencies || {}, + optionalDependencies: packageJson.optionalDependencies || {}, + jsonContent: contents.parsedContent, + }; + packagesByPath.set(candidate.path, pkg); + candidatesByPackage[pkg.name] = candidate; + // } + } + else { + const packagePath = (0, workspace_1.addPath)(path, 'package.json'); + this.logger.debug(`No candidate pull request for path: ${path} - inspect package from ${packagePath}`); + const contents = await this.github.getFileContentsOnBranch(packagePath, this.targetBranch); + const packageJson = JSON.parse(contents.parsedContent); + const pkg = { + name: packageJson.name, + path, + version: packageJson.version, + dependencies: packageJson.dependencies || {}, + devDependencies: packageJson.devDependencies || {}, + peerDependencies: packageJson.peerDependencies || {}, + optionalDependencies: packageJson.optionalDependencies || {}, + jsonContent: contents.parsedContent, + }; + packagesByPath.set(path, pkg); + } + } + const allPackages = Array.from(packagesByPath.values()); + return { + allPackages, + candidatesByPackage, + }; + } + bumpVersion(pkg) { + const version = version_1.Version.parse(pkg.version); + const strategy = this.strategiesByPath[pkg.path]; + if (strategy) + return strategy.versioningStrategy.bump(version, []); + return new versioning_strategy_1.PatchVersionUpdate().bump(version); + } + updateCandidate(existingCandidate, pkg, updatedVersions) { + // Update version of the package + const newVersion = updatedVersions.get(pkg.name); + if (!newVersion) { + throw new Error(`Didn't find updated version for ${pkg.name}`); + } + const updatedPackage = { + ...pkg, + version: newVersion.toString(), + }; + const updater = new package_json_1.PackageJson({ + version: newVersion, + versionsMap: updatedVersions, + updatePeerDependencies: this.updatePeerDependencies, + }); + const dependencyNotes = getChangelogDepsNotes(pkg, updatedPackage, updatedVersions, this.logger); + existingCandidate.pullRequest.updates = + existingCandidate.pullRequest.updates.map(update => { + if (update.path === (0, workspace_1.addPath)(existingCandidate.path, 'package.json')) { + update.updater = new composite_1.CompositeUpdater(update.updater, updater); + } + else if (update.path === (0, workspace_1.addPath)(existingCandidate.path, 'package-lock.json')) { + update.updater = new package_lock_json_1.PackageLockJson({ + version: newVersion, + versionsMap: updatedVersions, + }); + } + else if (update.updater instanceof changelog_1.Changelog) { + if (dependencyNotes) { + update.updater.changelogEntry = + (0, workspace_1.appendDependenciesSectionToChangelog)(update.updater.changelogEntry, dependencyNotes, this.logger); + } + } + return update; + }); + // append dependency notes + if (dependencyNotes) { + if (existingCandidate.pullRequest.body.releaseData.length > 0) { + existingCandidate.pullRequest.body.releaseData[0].notes = + (0, workspace_1.appendDependenciesSectionToChangelog)(existingCandidate.pullRequest.body.releaseData[0].notes, dependencyNotes, this.logger); + } + else { + existingCandidate.pullRequest.body.releaseData.push({ + component: updatedPackage.name, + version: existingCandidate.pullRequest.version, + notes: (0, workspace_1.appendDependenciesSectionToChangelog)('', dependencyNotes, this.logger), + }); + } + } + return existingCandidate; + } + async newCandidate(pkg, updatedVersions) { + // Update version of the package + const newVersion = updatedVersions.get(pkg.name); + if (!newVersion) { + throw new Error(`Didn't find updated version for ${pkg.name}`); + } + const updatedPackage = { + ...pkg, + version: newVersion.toString(), + }; + const dependencyNotes = getChangelogDepsNotes(pkg, updatedPackage, updatedVersions, this.logger); + const strategy = this.strategiesByPath[updatedPackage.path]; + const latestRelease = this.releasesByPath[updatedPackage.path]; + const basePullRequest = strategy + ? await strategy.buildReleasePullRequest([], latestRelease, false, [], { + newVersion, + }) + : undefined; + if (basePullRequest) { + return this.updateCandidate({ + path: pkg.path, + pullRequest: basePullRequest, + config: { + releaseType: 'node', + }, + }, pkg, updatedVersions); + } + const pullRequest = { + title: pull_request_title_1.PullRequestTitle.ofTargetBranch(this.targetBranch), + body: new pull_request_body_1.PullRequestBody([ + { + component: updatedPackage.name, + version: newVersion, + notes: (0, workspace_1.appendDependenciesSectionToChangelog)('', dependencyNotes, this.logger), + }, + ]), + updates: [ + { + path: (0, workspace_1.addPath)(updatedPackage.path, 'package.json'), + createIfMissing: false, + updater: new package_json_1.PackageJson({ + version: newVersion, + versionsMap: updatedVersions, + updatePeerDependencies: this.updatePeerDependencies, + }), + }, + { + path: (0, workspace_1.addPath)(updatedPackage.path, 'package-lock.json'), + createIfMissing: false, + updater: new package_json_1.PackageJson({ + version: newVersion, + versionsMap: updatedVersions, + updatePeerDependencies: this.updatePeerDependencies, + }), + }, + { + path: (0, workspace_1.addPath)(updatedPackage.path, 'CHANGELOG.md'), + createIfMissing: false, + updater: new changelog_1.Changelog({ + version: newVersion, + changelogEntry: (0, workspace_1.appendDependenciesSectionToChangelog)('', dependencyNotes, this.logger), + }), + }, + ], + labels: [], + headRefName: branch_name_1.BranchName.ofTargetBranch(this.targetBranch).toString(), + version: newVersion, + draft: false, + }; + return { + path: updatedPackage.path, + pullRequest, + config: { + releaseType: 'node', + }, + }; + } + postProcessCandidates(candidates, _updatedVersions) { + if (candidates.length === 0) { + return candidates; + } + const [candidate] = candidates; + // check for root lock file in pull request + let hasRootLockFile; + for (let i = 0; i < candidate.pullRequest.updates.length; i++) { + if (candidate.pullRequest.updates[i].path === '.package-lock.json' || + candidate.pullRequest.updates[i].path === './package-lock.json' || + candidate.pullRequest.updates[i].path === 'package-lock.json' || + candidate.pullRequest.updates[i].path === '/package-lock.json') { + hasRootLockFile = true; + break; + } + } + // if there is a root lock file, then there is no additional pull request update necessary. + if (hasRootLockFile) { + return candidates; + } + candidate.pullRequest.updates.push({ + path: 'package-lock.json', + createIfMissing: false, + updater: new package_lock_json_1.PackageLockJson({ + versionsMap: _updatedVersions, + }), + }); + return candidates; + } + async buildGraph(allPackages) { + const graph = new Map(); + const workspacePackageNames = new Set(allPackages.map(packageJson => packageJson.name)); + for (const packageJson of allPackages) { + const allDeps = Object.keys(this.combineDeps(packageJson)); + const workspaceDeps = allDeps.filter(dep => workspacePackageNames.has(dep)); + graph.set(packageJson.name, { + deps: workspaceDeps, + value: packageJson, + }); + } + return graph; + } + inScope(candidate) { + return candidate.config.releaseType === 'node'; + } + packageNameFromPackage(pkg) { + return pkg.name; + } + pathFromPackage(pkg) { + return pkg.path; + } + combineDeps(packageJson) { + var _a, _b, _c, _d; + return { + ...((_a = packageJson.dependencies) !== null && _a !== void 0 ? _a : {}), + ...((_b = packageJson.devDependencies) !== null && _b !== void 0 ? _b : {}), + ...((_c = packageJson.optionalDependencies) !== null && _c !== void 0 ? _c : {}), + ...(this.updatePeerDependencies + ? (_d = packageJson.peerDependencies) !== null && _d !== void 0 ? _d : {} + : {}), + }; + } + async preconfigure(strategiesByPath, _commitsByPath, _releasesByPath) { + // Using preconfigure to siphon releases and strategies. + this.strategiesByPath = strategiesByPath; + this.releasesByPath = _releasesByPath; + return strategiesByPath; + } +} +exports.NodeWorkspace = NodeWorkspace; +function getChangelogDepsNotes(original, updated, updateVersions, logger) { + var _a; + let depUpdateNotes = ''; + const depTypes = [ + 'dependencies', + 'devDependencies', + 'peerDependencies', + 'optionalDependencies', + ]; + const updates = new Map(); + for (const depType of depTypes) { + const depUpdates = []; + const pkgDepTypes = updated[depType]; + if (pkgDepTypes === undefined) { + continue; + } + for (const [depName, currentDepVer] of Object.entries(pkgDepTypes)) { + const newVersion = updateVersions.get(depName); + if (!newVersion) { + logger.debug(`${depName} was not bumped, ignoring`); + continue; + } + const origDepVer = (_a = original[depType]) === null || _a === void 0 ? void 0 : _a[depName]; + const newVersionString = (0, package_json_1.newVersionWithRange)(origDepVer, newVersion); + if (currentDepVer.startsWith('workspace:')) { + depUpdates.push(`\n * ${depName} bumped to ${newVersionString}`); + } + else if (newVersionString !== origDepVer) { + depUpdates.push(`\n * ${depName} bumped from ${origDepVer} to ${newVersionString}`); + //handle case when "workspace:" version is used + } + } + if (depUpdates.length > 0) { + updates.set(depType, depUpdates); + } + } + for (const [dt, notes] of updates) { + depUpdateNotes += `\n * ${dt}`; + for (const note of notes) { + depUpdateNotes += note; + } + } + if (depUpdateNotes) { + return `* The following workspace dependencies were updated${depUpdateNotes}`; + } + return ''; +} +//# sourceMappingURL=node-workspace.js.map + +/***/ }), + +/***/ 33483: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SentenceCase = void 0; +const plugin_1 = __nccwpck_require__(96926); +// A list of words that should not be converted to uppercase: +const SPECIAL_WORDS = ['gRPC', 'npm']; +/** + * This plugin converts commit messages to sentence case, for the benefit + * of the generated CHANGELOG. + */ +class SentenceCase extends plugin_1.ManifestPlugin { + constructor(github, targetBranch, repositoryConfig, specialWords) { + super(github, targetBranch, repositoryConfig); + this.specialWords = new Set(specialWords ? [...specialWords] : SPECIAL_WORDS); + } + /** + * Perform post-processing on commits, e.g, sentence casing them. + * @param {Commit[]} commits The set of commits that will feed into release pull request. + * @returns {Commit[]} The modified commit objects. + */ + processCommits(commits) { + this.logger.info(`SentenceCase processing ${commits.length} commits`); + for (const commit of commits) { + // The parsed conventional commit message, without the type: + console.info(commit.bareMessage); + commit.bareMessage = this.toUpperCase(commit.bareMessage); + // Check whether commit is in conventional commit format, if it is + // we'll split the string by type and description: + if (commit.message.includes(':')) { + const splitMessage = commit.message.split(':'); + let prefix = splitMessage[0]; + prefix += ': '; + let suffix = splitMessage.slice(1).join(':').trim(); + // Extract the first word from the rest of the string: + const match = /\s|$/.exec(suffix); + if (match) { + const endFirstWord = match.index; + const firstWord = suffix.slice(0, endFirstWord); + suffix = suffix.slice(endFirstWord); + // Put the string back together again: + commit.message = `${prefix}${this.toUpperCase(firstWord)}${suffix}`; + } + } + } + return commits; + } + /* + * Convert a string to upper case, taking into account a dictionary of + * common lowercase words, e.g., gRPC, npm. + * + * @param {string} word The original word. + * @returns {string} The word, now upper case. + */ + toUpperCase(word) { + if (this.specialWords.has(word)) { + return word; + } + if (word.match(/^[a-z]/)) { + return word.charAt(0).toUpperCase() + word.slice(1); + } + else { + return word; + } + } +} +exports.SentenceCase = SentenceCase; +//# sourceMappingURL=sentence-case.js.map + +/***/ }), + +/***/ 29574: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.addPath = exports.appendDependenciesSectionToChangelog = exports.WorkspacePlugin = void 0; +const plugin_1 = __nccwpck_require__(96926); +const manifest_1 = __nccwpck_require__(24026); +const logger_1 = __nccwpck_require__(18792); +const merge_1 = __nccwpck_require__(51886); +const release_please_manifest_1 = __nccwpck_require__(90094); +/** + * The plugin generalizes the logic for handling a workspace and + * will bump dependencies of managed packages if those dependencies + * are being updated. + * + * If multiple in-scope packages are being updated, it will merge them + * into a single package. + * + * This class is templatized with `T` which should be information about + * the package including the name and current version. + */ +class WorkspacePlugin extends plugin_1.ManifestPlugin { + constructor(github, targetBranch, repositoryConfig, options = {}) { + var _a, _b, _c; + super(github, targetBranch, repositoryConfig, options.logger); + this.manifestPath = (_a = options.manifestPath) !== null && _a !== void 0 ? _a : manifest_1.DEFAULT_RELEASE_PLEASE_MANIFEST; + this.updateAllPackages = (_b = options.updateAllPackages) !== null && _b !== void 0 ? _b : false; + this.merge = (_c = options.merge) !== null && _c !== void 0 ? _c : true; + } + async run(candidates) { + this.logger.info('Running workspace plugin'); + const [inScopeCandidates, outOfScopeCandidates] = candidates.reduce((collection, candidate) => { + if (!candidate.pullRequest.version) { + this.logger.warn('pull request missing version', candidate); + return collection; + } + if (this.inScope(candidate)) { + collection[0].push(candidate); + } + else { + collection[1].push(candidate); + } + return collection; + }, [[], []]); + this.logger.info(`Found ${inScopeCandidates.length} in-scope releases`); + if (inScopeCandidates.length === 0) { + return outOfScopeCandidates; + } + this.logger.info('Building list of all packages'); + const { allPackages, candidatesByPackage } = await this.buildAllPackages(inScopeCandidates); + this.logger.info(`Building dependency graph for ${allPackages.length} packages`); + const graph = await this.buildGraph(allPackages); + const packageNamesToUpdate = this.packageNamesToUpdate(graph, candidatesByPackage); + const orderedPackages = this.buildGraphOrder(graph, packageNamesToUpdate); + this.logger.info(`Updating ${orderedPackages.length} packages`); + const { updatedVersions, updatedPathVersions } = await this.buildUpdatedVersions(graph, orderedPackages, candidatesByPackage); + let newCandidates = []; + // In some cases, there are multiple packages within a single candidate. We + // only want to process each candidate package once. + const newCandidatePaths = new Set(); + for (const pkg of orderedPackages) { + const existingCandidate = this.findCandidateForPackage(pkg, candidatesByPackage); + if (existingCandidate) { + // if already has an pull request, update the changelog and update + this.logger.info(`Updating existing candidate pull request for ${this.packageNameFromPackage(pkg)}, path: ${existingCandidate.path}`); + if (newCandidatePaths.has(existingCandidate.path)) { + this.logger.info(`Already updated candidate for path: ${existingCandidate.path}`); + } + else { + const newCandidate = this.updateCandidate(existingCandidate, pkg, updatedVersions); + newCandidatePaths.add(newCandidate.path); + newCandidates.push(newCandidate); + } + } + else { + // otherwise, build a new pull request with changelog and entry update + this.logger.info(`Creating new candidate pull request for ${this.packageNameFromPackage(pkg)}`); + const newCandidate = await this.newCandidate(pkg, updatedVersions); + if (newCandidatePaths.has(newCandidate.path)) { + this.logger.info(`Already created new candidate for path: ${newCandidate.path}`); + } + else { + newCandidatePaths.add(newCandidate.path); + newCandidates.push(newCandidate); + } + } + } + if (this.merge) { + this.logger.info(`Merging ${newCandidates.length} in-scope candidates`); + const mergePlugin = new merge_1.Merge(this.github, this.targetBranch, this.repositoryConfig); + newCandidates = await mergePlugin.run(newCandidates); + } + const newUpdates = newCandidates[0].pullRequest.updates; + newUpdates.push({ + path: this.manifestPath, + createIfMissing: false, + updater: new release_please_manifest_1.ReleasePleaseManifest({ + version: newCandidates[0].pullRequest.version, + versionsMap: updatedPathVersions, + }), + }); + this.logger.info(`Post-processing ${newCandidates.length} in-scope candidates`); + newCandidates = this.postProcessCandidates(newCandidates, updatedVersions); + return [...outOfScopeCandidates, ...newCandidates]; + } + /** + * Helper for finding a candidate release based on the package name. + * By default, we assume that the package name matches the release + * component. + * @param {T} pkg The package being released + * @param {Record} graph All the packages in the repository + * @param {Record this.packageNameFromPackage(value)); + } + return Object.keys(candidatesByPackage); + } + /** + * Helper to build up all the versions we are modifying in this + * repository. + * @param {DependencyGraph} _graph All the packages in the repository + * @param {T[]} orderedPackages A list of packages that are currently + * updated by the existing candidate pull requests + * @param {Record Version) and a + * map of all updated versions (component path => Version). + */ + async buildUpdatedVersions(_graph, orderedPackages, candidatesByPackage) { + const updatedVersions = new Map(); + const updatedPathVersions = new Map(); + for (const pkg of orderedPackages) { + const packageName = this.packageNameFromPackage(pkg); + this.logger.debug(`package: ${packageName}`); + const existingCandidate = candidatesByPackage[packageName]; + if (existingCandidate) { + const version = existingCandidate.pullRequest.version; + this.logger.debug(`version: ${version} from release-please`); + updatedVersions.set(packageName, version); + } + else { + const version = this.bumpVersion(pkg); + this.logger.debug(`version: ${version} forced bump`); + updatedVersions.set(packageName, version); + if (this.isReleaseVersion(version)) { + updatedPathVersions.set(this.pathFromPackage(pkg), version); + } + } + } + return { + updatedVersions, + updatedPathVersions, + }; + } + /** + * Given a release version, determine if we should bump the manifest + * version as well. + * @param {Version} _version The release version + */ + isReleaseVersion(_version) { + return true; + } + /** + * Helper to invert the graph from package => packages that it depends on + * to package => packages that depend on it. + * @param {DependencyGraph} graph + * @returns {DependencyGraph} + */ + invertGraph(graph) { + const dependentGraph = new Map(); + for (const [packageName, node] of graph) { + dependentGraph.set(packageName, { + deps: [], + value: node.value, + }); + } + for (const [packageName, node] of graph) { + for (const depName of node.deps) { + if (dependentGraph.has(depName)) { + dependentGraph.get(depName).deps.push(packageName); + } + } + } + return dependentGraph; + } + /** + * Determine all the packages which need to be updated and sort them. + * @param {DependencyGraph} graph The graph of package => packages it depends on + * @param {string} packageNamesToUpdate Names of the packages which are already + * being updated. + */ + buildGraphOrder(graph, packageNamesToUpdate) { + this.logger.info(`building graph order, existing package names: ${packageNamesToUpdate}`); + // invert the graph so it's dependency name => packages that depend on it + const dependentGraph = this.invertGraph(graph); + const visited = new Set(); + // we're iterating the `Map` in insertion order (as per ECMA262), but + // that does not reflect any particular traversal of the graph, so we + // visit all nodes, opportunistically short-circuiting leafs when we've + // already visited them. + for (const name of packageNamesToUpdate) { + this.visitPostOrder(dependentGraph, name, visited, []); + } + return Array.from(visited).sort((a, b) => this.packageNameFromPackage(a).localeCompare(this.packageNameFromPackage(b))); + } + visitPostOrder(graph, name, visited, path) { + this.logger.debug(`visiting ${name}, path: ${path}`); + if (path.indexOf(name) !== -1) { + throw new Error(`found cycle in dependency graph: ${path.join(' -> ')} -> ${name}`); + } + const node = graph.get(name); + if (!node) { + this.logger.warn(`Didn't find node: ${name} in graph`); + return; + } + const nextPath = [...path, name]; + for (const depName of node.deps) { + const dep = graph.get(depName); + if (!dep) { + this.logger.warn(`dependency not found in graph: ${depName}`); + return; + } + this.logger.info(`visiting ${depName} next`); + this.visitPostOrder(graph, depName, visited, nextPath); + } + if (!visited.has(node.value)) { + this.logger.debug(`marking ${name} as visited and adding ${this.packageNameFromPackage(node.value)} to order`); + visited.add(node.value); + } + else { + this.logger.debug(`${node.value} already visited`); + } + } +} +exports.WorkspacePlugin = WorkspacePlugin; +const DEPENDENCY_HEADER = new RegExp('### Dependencies'); +function appendDependenciesSectionToChangelog(changelog, notes, logger = logger_1.logger) { + if (!changelog) { + return `### Dependencies\n\n${notes}`; + } + logger.info('appending dependency notes to changelog'); + const newLines = []; + let seenDependenciesSection = false; + let seenDependencySectionSpacer = false; + let injected = false; + for (const line of changelog.split('\n')) { + if (seenDependenciesSection) { + const trimmedLine = line.trim(); + if (seenDependencySectionSpacer && + !injected && + !trimmedLine.startsWith('*')) { + newLines.push(changelog); + injected = true; + } + if (trimmedLine === '') { + seenDependencySectionSpacer = true; + } + } + if (line.match(DEPENDENCY_HEADER)) { + seenDependenciesSection = true; + } + newLines.push(line); + } + if (injected) { + return newLines.join('\n'); + } + if (seenDependenciesSection) { + return `${changelog}\n${notes}`; + } + return `${changelog}\n\n\n### Dependencies\n\n${notes}`; +} +exports.appendDependenciesSectionToChangelog = appendDependenciesSectionToChangelog; +function addPath(path, file) { + return path === manifest_1.ROOT_PROJECT_PATH ? file : `${path}/${file}`; +} +exports.addPath = addPath; +//# sourceMappingURL=workspace.js.map + +/***/ }), + +/***/ 25606: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BaseStrategy = void 0; +const manifest_1 = __nccwpck_require__(24026); +const default_1 = __nccwpck_require__(77033); +const default_2 = __nccwpck_require__(65577); +const version_1 = __nccwpck_require__(25112); +const tag_name_1 = __nccwpck_require__(91203); +const logger_1 = __nccwpck_require__(18792); +const pull_request_title_1 = __nccwpck_require__(28866); +const branch_name_1 = __nccwpck_require__(94148); +const pull_request_body_1 = __nccwpck_require__(11941); +const composite_1 = __nccwpck_require__(93373); +const generic_1 = __nccwpck_require__(62963); +const generic_json_1 = __nccwpck_require__(33700); +const generic_xml_1 = __nccwpck_require__(71636); +const pom_xml_1 = __nccwpck_require__(55023); +const generic_yaml_1 = __nccwpck_require__(31861); +const generic_toml_1 = __nccwpck_require__(75074); +const DEFAULT_CHANGELOG_PATH = 'CHANGELOG.md'; +/** + * A strategy is responsible for determining which files are + * necessary to update in a release pull request. + */ +class BaseStrategy { + constructor(options) { + var _a, _b, _c; + this.logger = (_a = options.logger) !== null && _a !== void 0 ? _a : logger_1.logger; + this.path = options.path || manifest_1.ROOT_PROJECT_PATH; + this.github = options.github; + this.packageName = options.packageName; + this.component = + options.component || this.normalizeComponent(this.packageName); + this.versioningStrategy = + options.versioningStrategy || + new default_1.DefaultVersioningStrategy({ logger: this.logger }); + this.targetBranch = options.targetBranch; + this.repository = options.github.repository; + this.changelogPath = options.changelogPath || DEFAULT_CHANGELOG_PATH; + this.changelogHost = options.changelogHost; + this.changelogSections = options.changelogSections; + this.tagSeparator = options.tagSeparator; + this.skipGitHubRelease = options.skipGitHubRelease || false; + this.releaseAs = options.releaseAs; + this.changelogNotes = + options.changelogNotes || new default_2.DefaultChangelogNotes(options); + this.includeComponentInTag = (_b = options.includeComponentInTag) !== null && _b !== void 0 ? _b : true; + this.includeVInTag = (_c = options.includeVInTag) !== null && _c !== void 0 ? _c : true; + this.pullRequestTitlePattern = options.pullRequestTitlePattern; + this.pullRequestHeader = options.pullRequestHeader; + this.pullRequestFooter = options.pullRequestFooter; + this.componentNoSpace = options.componentNoSpace; + this.extraFiles = options.extraFiles || []; + this.initialVersion = options.initialVersion; + this.extraLabels = options.extraLabels || []; + } + /** + * Return the component for this strategy. This may be a computed field. + * @returns {string} + */ + async getComponent() { + if (!this.includeComponentInTag) { + return ''; + } + return this.component || (await this.getDefaultComponent()); + } + async getDefaultComponent() { + var _a; + return this.normalizeComponent((_a = this.packageName) !== null && _a !== void 0 ? _a : (await this.getDefaultPackageName())); + } + async getBranchComponent() { + return this.component || (await this.getDefaultComponent()); + } + async getPackageName() { + var _a; + return (_a = this.packageName) !== null && _a !== void 0 ? _a : (await this.getDefaultPackageName()); + } + async getDefaultPackageName() { + var _a; + return (_a = this.packageName) !== null && _a !== void 0 ? _a : ''; + } + normalizeComponent(component) { + if (!component) { + return ''; + } + return component; + } + /** + * Override this method to post process commits + * @param {ConventionalCommit[]} commits parsed commits + * @returns {ConventionalCommit[]} modified commits + */ + async postProcessCommits(commits) { + return commits; + } + async buildReleaseNotes(conventionalCommits, newVersion, newVersionTag, latestRelease, commits) { + var _a; + return await this.changelogNotes.buildNotes(conventionalCommits, { + host: this.changelogHost, + owner: this.repository.owner, + repository: this.repository.repo, + version: newVersion.toString(), + previousTag: (_a = latestRelease === null || latestRelease === void 0 ? void 0 : latestRelease.tag) === null || _a === void 0 ? void 0 : _a.toString(), + currentTag: newVersionTag.toString(), + targetBranch: this.targetBranch, + changelogSections: this.changelogSections, + commits: commits, + }); + } + async buildPullRequestBody(component, newVersion, releaseNotesBody, _conventionalCommits, _latestRelease, pullRequestHeader, pullRequestFooter) { + return new pull_request_body_1.PullRequestBody([ + { + component, + version: newVersion, + notes: releaseNotesBody, + }, + ], { + header: pullRequestHeader, + footer: pullRequestFooter, + }); + } + /** + * Builds a candidate release pull request + * @param {Commit[]} commits Raw commits to consider for this release. + * @param {Release} latestRelease Optional. The last release for this + * component if available. + * @param {boolean} draft Optional. Whether or not to create the pull + * request as a draft. Defaults to `false`. + * @returns {ReleasePullRequest | undefined} The release pull request to + * open for this path/component. Returns undefined if we should not + * open a pull request. + */ + async buildReleasePullRequest(commits, latestRelease, draft, labels = [], bumpOnlyOptions) { + var _a; + const conventionalCommits = await this.postProcessCommits(commits); + this.logger.info(`Considering: ${conventionalCommits.length} commits`); + if (!bumpOnlyOptions && conventionalCommits.length === 0) { + this.logger.info(`No commits for path: ${this.path}, skipping`); + return undefined; + } + const newVersion = (_a = bumpOnlyOptions === null || bumpOnlyOptions === void 0 ? void 0 : bumpOnlyOptions.newVersion) !== null && _a !== void 0 ? _a : (await this.buildNewVersion(conventionalCommits, latestRelease)); + const versionsMap = await this.updateVersionsMap(await this.buildVersionsMap(conventionalCommits), conventionalCommits, newVersion); + const component = await this.getComponent(); + this.logger.debug('component:', component); + const newVersionTag = new tag_name_1.TagName(newVersion, this.includeComponentInTag ? component : undefined, this.tagSeparator, this.includeVInTag); + this.logger.debug('pull request title pattern:', this.pullRequestTitlePattern); + this.logger.debug('componentNoSpace:', this.componentNoSpace); + const pullRequestTitle = pull_request_title_1.PullRequestTitle.ofComponentTargetBranchVersion(component || '', this.targetBranch, newVersion, this.pullRequestTitlePattern, this.componentNoSpace); + const branchComponent = await this.getBranchComponent(); + const branchName = branchComponent + ? branch_name_1.BranchName.ofComponentTargetBranch(branchComponent, this.targetBranch) + : branch_name_1.BranchName.ofTargetBranch(this.targetBranch); + const releaseNotesBody = await this.buildReleaseNotes(conventionalCommits, newVersion, newVersionTag, latestRelease, commits); + if (!bumpOnlyOptions && this.changelogEmpty(releaseNotesBody)) { + this.logger.info(`No user facing commits found since ${latestRelease ? latestRelease.sha : 'beginning of time'} - skipping`); + return undefined; + } + const updates = await this.buildUpdates({ + changelogEntry: releaseNotesBody, + newVersion, + versionsMap, + latestVersion: latestRelease === null || latestRelease === void 0 ? void 0 : latestRelease.tag.version, + commits: conventionalCommits, + }); + const updatesWithExtras = (0, composite_1.mergeUpdates)(updates.concat(...(await this.extraFileUpdates(newVersion, versionsMap)))); + const pullRequestBody = await this.buildPullRequestBody(component, newVersion, releaseNotesBody, conventionalCommits, latestRelease, this.pullRequestHeader, this.pullRequestFooter); + return { + title: pullRequestTitle, + body: pullRequestBody, + updates: updatesWithExtras, + labels: [...labels, ...this.extraLabels], + headRefName: branchName.toString(), + version: newVersion, + draft: draft !== null && draft !== void 0 ? draft : false, + }; + } + // Helper to convert extra files with globs to the file paths to add + async extraFilePaths(extraFile) { + if (typeof extraFile !== 'object') { + return [extraFile]; + } + if (!extraFile.glob) { + return [extraFile.path]; + } + if (extraFile.path.startsWith('/')) { + // glob is relative to root, strip the leading `/` for glob matching + // and re-add the leading `/` to make the file relative to the root + return (await this.github.findFilesByGlobAndRef(extraFile.path.slice(1), this.targetBranch)).map(file => `/${file}`); + } + else if (this.path === manifest_1.ROOT_PROJECT_PATH) { + // root component, ignore path prefix + return this.github.findFilesByGlobAndRef(extraFile.path, this.targetBranch); + } + else { + // glob is relative to current path + return this.github.findFilesByGlobAndRef(extraFile.path, this.targetBranch, this.path); + } + } + async extraFileUpdates(version, versionsMap) { + const extraFileUpdates = []; + for (const extraFile of this.extraFiles) { + if (typeof extraFile === 'object') { + const paths = await this.extraFilePaths(extraFile); + for (const path of paths) { + switch (extraFile.type) { + case 'generic': + extraFileUpdates.push({ + path: this.addPath(path), + createIfMissing: false, + updater: new generic_1.Generic({ version, versionsMap }), + }); + break; + case 'json': + extraFileUpdates.push({ + path: this.addPath(path), + createIfMissing: false, + updater: new generic_json_1.GenericJson(extraFile.jsonpath, version), + }); + break; + case 'yaml': + extraFileUpdates.push({ + path: this.addPath(path), + createIfMissing: false, + updater: new generic_yaml_1.GenericYaml(extraFile.jsonpath, version), + }); + break; + case 'toml': + extraFileUpdates.push({ + path: this.addPath(path), + createIfMissing: false, + updater: new generic_toml_1.GenericToml(extraFile.jsonpath, version), + }); + break; + case 'xml': + extraFileUpdates.push({ + path: this.addPath(path), + createIfMissing: false, + updater: new generic_xml_1.GenericXml(extraFile.xpath, version), + }); + break; + case 'pom': + extraFileUpdates.push({ + path: this.addPath(path), + createIfMissing: false, + updater: new pom_xml_1.PomXml(version), + }); + break; + default: + throw new Error(`unsupported extraFile type: ${extraFile.type}`); + } + } + } + else if (extraFile.endsWith('.json')) { + extraFileUpdates.push({ + path: this.addPath(extraFile), + createIfMissing: false, + updater: new composite_1.CompositeUpdater(new generic_json_1.GenericJson('$.version', version), new generic_1.Generic({ version, versionsMap })), + }); + } + else if (extraFile.endsWith('.yaml') || extraFile.endsWith('.yml')) { + extraFileUpdates.push({ + path: this.addPath(extraFile), + createIfMissing: false, + updater: new composite_1.CompositeUpdater(new generic_yaml_1.GenericYaml('$.version', version), new generic_1.Generic({ version, versionsMap })), + }); + } + else if (extraFile.endsWith('.toml')) { + extraFileUpdates.push({ + path: this.addPath(extraFile), + createIfMissing: false, + updater: new composite_1.CompositeUpdater(new generic_toml_1.GenericToml('$.version', version), new generic_1.Generic({ version, versionsMap })), + }); + } + else if (extraFile.endsWith('.xml')) { + extraFileUpdates.push({ + path: this.addPath(extraFile), + createIfMissing: false, + updater: new composite_1.CompositeUpdater( + // Updates "version" element that is a child of the root element. + new generic_xml_1.GenericXml('/*/version', version), new generic_1.Generic({ version, versionsMap })), + }); + } + else { + extraFileUpdates.push({ + path: this.addPath(extraFile), + createIfMissing: false, + updater: new generic_1.Generic({ version, versionsMap }), + }); + } + } + return extraFileUpdates; + } + changelogEmpty(changelogEntry) { + return changelogEntry.split('\n').length <= 1; + } + async updateVersionsMap(versionsMap, conventionalCommits, _newVersion) { + for (const [component, version] of versionsMap.entries()) { + versionsMap.set(component, await this.versioningStrategy.bump(version, conventionalCommits)); + } + return versionsMap; + } + async buildNewVersion(conventionalCommits, latestRelease) { + if (this.releaseAs) { + this.logger.warn(`Setting version for ${this.path} from release-as configuration`); + return version_1.Version.parse(this.releaseAs); + } + const releaseAsCommit = conventionalCommits.find(conventionalCommit => conventionalCommit.notes.find(note => note.title === 'RELEASE AS')); + if (releaseAsCommit) { + const note = releaseAsCommit.notes.find(note => note.title === 'RELEASE AS'); + if (note) { + return version_1.Version.parse(note.text); + } + } + if (latestRelease) { + return await this.versioningStrategy.bump(latestRelease.tag.version, conventionalCommits); + } + return this.initialReleaseVersion(); + } + async buildVersionsMap(_conventionalCommits) { + return new Map(); + } + async parsePullRequestBody(pullRequestBody) { + return pull_request_body_1.PullRequestBody.parse(pullRequestBody, this.logger); + } + /** + * Given a merged pull request, build the candidate release. + * @param {PullRequest} mergedPullRequest The merged release pull request. + * @returns {Release} The candidate release. + * @deprecated Use buildReleases() instead. + */ + async buildRelease(mergedPullRequest, options) { + var _a; + if (this.skipGitHubRelease) { + this.logger.info('Release skipped from strategy config'); + return; + } + if (!mergedPullRequest.sha) { + this.logger.error('Pull request should have been merged'); + return; + } + const mergedTitlePattern = (_a = options === null || options === void 0 ? void 0 : options.groupPullRequestTitlePattern) !== null && _a !== void 0 ? _a : manifest_1.MANIFEST_PULL_REQUEST_TITLE_PATTERN; + const pullRequestTitle = pull_request_title_1.PullRequestTitle.parse(mergedPullRequest.title, this.pullRequestTitlePattern, this.componentNoSpace, this.logger) || + pull_request_title_1.PullRequestTitle.parse(mergedPullRequest.title, mergedTitlePattern, this.componentNoSpace, this.logger); + if (!pullRequestTitle) { + this.logger.error(`Bad pull request title: '${mergedPullRequest.title}'`); + return; + } + const branchName = branch_name_1.BranchName.parse(mergedPullRequest.headBranchName, this.logger); + if (!branchName) { + this.logger.error(`Bad branch name: ${mergedPullRequest.headBranchName}`); + return; + } + const pullRequestBody = await this.parsePullRequestBody(mergedPullRequest.body); + if (!pullRequestBody) { + this.logger.error('Could not parse pull request body as a release PR'); + return; + } + const component = await this.getComponent(); + let releaseData; + if (pullRequestBody.releaseData.length === 1 && + !pullRequestBody.releaseData[0].component) { + const branchComponent = await this.getBranchComponent(); + // standalone release PR, ensure the components match + if (this.normalizeComponent(branchName.component) !== + this.normalizeComponent(branchComponent)) { + this.logger.warn(`PR component: ${branchName.component} does not match configured component: ${branchComponent}`); + return; + } + releaseData = pullRequestBody.releaseData[0]; + } + else { + // manifest release with multiple components - find the release notes + // for the component to see if it was included in this release (parsed + // from the release pull request body) + releaseData = pullRequestBody.releaseData.find(datum => { + return (this.normalizeComponent(datum.component) === + this.normalizeComponent(component)); + }); + if (!releaseData && pullRequestBody.releaseData.length > 0) { + this.logger.info(`Pull request contains releases, but not for component: ${component}`); + return; + } + } + const notes = releaseData === null || releaseData === void 0 ? void 0 : releaseData.notes; + if (notes === undefined) { + this.logger.warn('Failed to find release notes'); + } + let version = pullRequestTitle.getVersion(); + if (!version || + (pullRequestBody.releaseData.length > 1 && (releaseData === null || releaseData === void 0 ? void 0 : releaseData.version))) { + // prioritize pull-request body version for multi-component releases + version = releaseData === null || releaseData === void 0 ? void 0 : releaseData.version; + } + if (!version) { + this.logger.error('Pull request should have included version'); + return; + } + if (!this.isPublishedVersion(version)) { + this.logger.warn(`Skipping non-published version: ${version.toString()}`); + return; + } + const tag = new tag_name_1.TagName(version, this.includeComponentInTag ? component : undefined, this.tagSeparator, this.includeVInTag); + const releaseName = component && this.includeComponentInTag + ? `${component}: v${version.toString()}` + : `v${version.toString()}`; + return { + name: releaseName, + tag, + notes: notes || '', + sha: mergedPullRequest.sha, + }; + } + /** + * Given a merged pull request, build the candidate releases. + * @param {PullRequest} mergedPullRequest The merged release pull request. + * @returns {Release} The candidate release. + */ + async buildReleases(mergedPullRequest, options) { + const release = await this.buildRelease(mergedPullRequest, options); + if (release) { + return [release]; + } + return []; + } + isPublishedVersion(_version) { + return true; + } + /** + * Override this to handle the initial version of a new library. + */ + initialReleaseVersion() { + if (this.initialVersion) { + return version_1.Version.parse(this.initialVersion); + } + return version_1.Version.parse('1.0.0'); + } + /** + * Adds a given file path to the strategy path. + * @param {string} file Desired file path. + * @returns {string} The file relative to the strategy. + * @throws {Error} If the file path contains relative pathing characters, i.e. ../, ~/ + */ + addPath(file) { + // There is no strategy path to join, the strategy is at the root, or the + // file is at the root (denoted by a leading slash or tilde) + if (!this.path || this.path === manifest_1.ROOT_PROJECT_PATH || file.startsWith('/')) { + file = file.replace(/^\/+/, ''); + } + // Otherwise, the file is relative to the strategy path + else { + file = `${this.path.replace(/\/+$/, '')}/${file}`; + } + // Ensure the file path does not escape the workspace + if (/((^|\/)\.{1,2}|^~|^\/*)+\//.test(file)) { + throw new Error(`illegal pathing characters in path: ${file}`); + } + // Strip any trailing slashes and return + return file.replace(/\/+$/, ''); + } +} +exports.BaseStrategy = BaseStrategy; +//# sourceMappingURL=base.js.map + +/***/ }), + +/***/ 8275: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Bazel = void 0; +const module_bazel_1 = __nccwpck_require__(93440); +const changelog_1 = __nccwpck_require__(11128); +const base_1 = __nccwpck_require__(25606); +class Bazel extends base_1.BaseStrategy { + constructor(options) { + var _a; + super(options); + this.versionFile = (_a = options.versionFile) !== null && _a !== void 0 ? _a : 'MODULE.bazel'; + } + async buildUpdates(options) { + const updates = []; + const version = options.newVersion; + updates.push({ + path: this.addPath(this.changelogPath), + createIfMissing: true, + updater: new changelog_1.Changelog({ + version, + changelogEntry: options.changelogEntry, + }), + }); + updates.push({ + path: this.addPath(this.versionFile), + createIfMissing: false, + updater: new module_bazel_1.ModuleBazel({ + version, + }), + }); + return updates; + } +} +exports.Bazel = Bazel; +//# sourceMappingURL=bazel.js.map + +/***/ }), + +/***/ 33247: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Dart = void 0; +// Generic +const changelog_1 = __nccwpck_require__(11128); +const yaml = __nccwpck_require__(21917); +// pubspec +const pubspec_yaml_1 = __nccwpck_require__(12492); +const base_1 = __nccwpck_require__(25606); +const errors_1 = __nccwpck_require__(10818); +class Dart extends base_1.BaseStrategy { + async buildUpdates(options) { + const updates = []; + const version = options.newVersion; + updates.push({ + path: this.addPath(this.changelogPath), + createIfMissing: true, + updater: new changelog_1.Changelog({ + version, + changelogEntry: options.changelogEntry, + }), + }); + updates.push({ + path: this.addPath('pubspec.yaml'), + createIfMissing: false, + cachedFileContents: this.pubspecYmlContents, + updater: new pubspec_yaml_1.PubspecYaml({ + version, + }), + }); + return updates; + } + async getDefaultPackageName() { + const pubspecYmlContents = await this.getPubspecYmlContents(); + const pubspec = yaml.load(pubspecYmlContents.parsedContent, { json: true }); + if (typeof pubspec === 'object') { + return pubspec.name; + } + else { + return undefined; + } + } + async getPubspecYmlContents() { + if (!this.pubspecYmlContents) { + try { + this.pubspecYmlContents = await this.github.getFileContentsOnBranch(this.addPath('pubspec.yaml'), this.targetBranch); + } + catch (e) { + if (e instanceof errors_1.FileNotFoundError) { + throw new errors_1.MissingRequiredFileError(this.addPath('pubspec.yaml'), Dart.name, `${this.repository.owner}/${this.repository.repo}`); + } + throw e; + } + } + return this.pubspecYmlContents; + } +} +exports.Dart = Dart; +//# sourceMappingURL=dart.js.map + +/***/ }), + +/***/ 5764: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DotnetYoshi = void 0; +const base_1 = __nccwpck_require__(25606); +const changelog_1 = __nccwpck_require__(11128); +const apis_1 = __nccwpck_require__(73292); +const errors_1 = __nccwpck_require__(10818); +const CHANGELOG_SECTIONS = [ + { type: 'feat', section: 'New features' }, + { type: 'fix', section: 'Bug fixes' }, + { type: 'perf', section: 'Performance improvements' }, + { type: 'revert', section: 'Reverts' }, + { type: 'chore', section: 'Miscellaneous chores', hidden: true }, + { type: 'docs', section: 'Documentation improvements' }, + { type: 'style', section: 'Styles', hidden: true }, + { type: 'refactor', section: 'Code Refactoring', hidden: true }, + { type: 'test', section: 'Tests', hidden: true }, + { type: 'build', section: 'Build System', hidden: true }, + { type: 'ci', section: 'Continuous Integration', hidden: true }, +]; +const DEFAULT_CHANGELOG_PATH = 'docs/history.md'; +const DEFAULT_PULL_REQUEST_TITLE_PATTERN = 'Release${component} version ${version}'; +const DEFAULT_PULL_REQUEST_HEADER = ':robot: I have created a release *beep* *boop*'; +const DEFAULT_PULL_REQUEST_FOOTER = 'This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please).'; +const RELEASE_NOTES_HEADER_PATTERN = /#{2,3} \[?(\d+\.\d+\.\d+-?[^\]]*)\]?.* \((\d{4}-\d{2}-\d{2})\)/; +class DotnetYoshi extends base_1.BaseStrategy { + constructor(options) { + var _a, _b, _c, _d, _e, _f; + options.changelogSections = (_a = options.changelogSections) !== null && _a !== void 0 ? _a : CHANGELOG_SECTIONS; + options.changelogPath = (_b = options.changelogPath) !== null && _b !== void 0 ? _b : DEFAULT_CHANGELOG_PATH; + options.pullRequestTitlePattern = + (_c = options.pullRequestTitlePattern) !== null && _c !== void 0 ? _c : DEFAULT_PULL_REQUEST_TITLE_PATTERN; + options.pullRequestHeader = + (_d = options.pullRequestHeader) !== null && _d !== void 0 ? _d : DEFAULT_PULL_REQUEST_HEADER; + options.pullRequestFooter = + (_e = options.pullRequestFooter) !== null && _e !== void 0 ? _e : DEFAULT_PULL_REQUEST_FOOTER; + options.includeVInTag = (_f = options.includeVInTag) !== null && _f !== void 0 ? _f : false; + super(options); + } + async buildReleaseNotes(conventionalCommits, newVersion, newVersionTag, latestRelease) { + const notes = await super.buildReleaseNotes(conventionalCommits, newVersion, newVersionTag, latestRelease); + return notes.replace(RELEASE_NOTES_HEADER_PATTERN, '## Version $1, released $2'); + } + async getApi() { + try { + const contents = await this.github.getFileContentsOnBranch('apis/apis.json', this.targetBranch); + const apis = JSON.parse(contents.parsedContent); + const component = await this.getComponent(); + return apis.apis.find(api => api.id === component); + } + catch (e) { + if (e instanceof errors_1.FileNotFoundError) { + throw new errors_1.MissingRequiredFileError('apis/apis.json', DotnetYoshi.name, `${this.repository.owner}/${this.repository.repo}`); + } + throw e; + } + } + async getDefaultComponent() { + // default component is based on the path + const pathParts = this.path.split('/'); + return pathParts[pathParts.length - 1]; + } + async buildUpdates(options) { + const updates = []; + const version = options.newVersion; + const component = await this.getComponent(); + const api = await this.getApi(); + if (api === null || api === void 0 ? void 0 : api.noVersionHistory) { + this.logger.info(`Skipping changelog for ${component} via noVersionHistory configuration`); + } + else { + updates.push({ + path: this.addPath(this.changelogPath), + createIfMissing: true, + updater: new changelog_1.Changelog({ + version, + changelogEntry: options.changelogEntry, + versionHeaderRegex: '\n## Version [0-9[]+', + }), + }); + } + if (!component) { + this.logger.warn('Dotnet strategy expects to use components, could not update all files'); + return updates; + } + updates.push({ + path: 'apis/apis.json', + createIfMissing: false, + updater: new apis_1.Apis(component, version), + }); + return updates; + } +} +exports.DotnetYoshi = DotnetYoshi; +//# sourceMappingURL=dotnet-yoshi.js.map + +/***/ }), + +/***/ 25267: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Elixir = void 0; +// Generic +const changelog_1 = __nccwpck_require__(11128); +// mix.exs support +const elixir_mix_exs_1 = __nccwpck_require__(99827); +const base_1 = __nccwpck_require__(25606); +class Elixir extends base_1.BaseStrategy { + async buildUpdates(options) { + const updates = []; + const version = options.newVersion; + updates.push({ + path: this.addPath(this.changelogPath), + createIfMissing: true, + updater: new changelog_1.Changelog({ + version, + changelogEntry: options.changelogEntry, + }), + }); + updates.push({ + path: this.addPath('mix.exs'), + createIfMissing: false, + updater: new elixir_mix_exs_1.ElixirMixExs({ + version, + }), + }); + return updates; + } +} +exports.Elixir = Elixir; +//# sourceMappingURL=elixir.js.map + +/***/ }), + +/***/ 49813: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Expo = void 0; +const node_1 = __nccwpck_require__(97754); +const app_json_1 = __nccwpck_require__(897); +const version_1 = __nccwpck_require__(25112); +/** + * Strategy for building Expo based React Native projects. This strategy extends + * the Node strategy to additionally update the `app.json` file of a project. + */ +class Expo extends node_1.Node { + async buildUpdates(options) { + const version = options.newVersion; + const updates = await super.buildUpdates(options); + const expoSDKVersion = await this.getExpoSDKVersion(); + updates.push({ + path: this.addPath('app.json'), + createIfMissing: false, + updater: new app_json_1.AppJson({ version, expoSDKVersion }), + }); + return updates; + } + /** + * Determine the Expo SDK version by parsing the package.json dependencies. + */ + async getExpoSDKVersion() { + var _a, _b, _c, _d; + const pkgJsonContents = await this.getPkgJsonContents(); + const pkg = JSON.parse(pkgJsonContents.parsedContent); + return version_1.Version.parse(((_a = pkg.dependencies) === null || _a === void 0 ? void 0 : _a.expo) || + ((_b = pkg.devDependencies) === null || _b === void 0 ? void 0 : _b.expo) || + ((_c = pkg.peerDependencies) === null || _c === void 0 ? void 0 : _c.expo) || + ((_d = pkg.optionalDependencies) === null || _d === void 0 ? void 0 : _d.expo) || + '0.0.0'); + } +} +exports.Expo = Expo; +//# sourceMappingURL=expo.js.map + +/***/ }), + +/***/ 70958: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GoYoshi = void 0; +const base_1 = __nccwpck_require__(25606); +const changelog_1 = __nccwpck_require__(11128); +const version_1 = __nccwpck_require__(25112); +const version_go_1 = __nccwpck_require__(32735); +const path_1 = __nccwpck_require__(71017); +const CHANGELOG_SECTIONS = [ + { type: 'feat', section: 'Features' }, + { type: 'fix', section: 'Bug Fixes' }, + { type: 'perf', section: 'Performance Improvements' }, + { type: 'revert', section: 'Reverts' }, + { type: 'docs', section: 'Documentation' }, + { type: 'style', section: 'Styles', hidden: true }, + { type: 'chore', section: 'Miscellaneous Chores', hidden: true }, + { type: 'refactor', section: 'Code Refactoring', hidden: true }, + { type: 'test', section: 'Tests', hidden: true }, + { type: 'build', section: 'Build System', hidden: true }, + { type: 'ci', section: 'Continuous Integration', hidden: true }, +]; +const REGEN_PR_REGEX = /.*auto-regenerate.*/; +const REGEN_ISSUE_REGEX = /(?.*)\(#(?.*)\)(\n|$)/; +class GoYoshi extends base_1.BaseStrategy { + constructor(options) { + var _a; + options.changelogPath = (_a = options.changelogPath) !== null && _a !== void 0 ? _a : 'CHANGES.md'; + super({ + ...options, + changelogSections: CHANGELOG_SECTIONS, + }); + } + async buildUpdates(options) { + const updates = []; + const version = options.newVersion; + updates.push({ + path: this.addPath(this.changelogPath), + createIfMissing: true, + updater: new changelog_1.Changelog({ + version, + changelogEntry: options.changelogEntry, + }), + }); + updates.push({ + path: this.addPath('internal/version.go'), + createIfMissing: false, + updater: new version_go_1.VersionGo({ + version, + }), + }); + return updates; + } + async postProcessCommits(commits) { + let regenCommit; + const component = await this.getComponent(); + this.logger.debug('Filtering commits'); + const ignoredSubmodules = await this.getIgnoredSubModules(); + return commits.filter(commit => { + var _a, _b; + // Only have a single entry of the nightly regen listed in the changelog. + // If there are more than one of these commits, append associated PR. + if (this.repository.owner === 'googleapis' && + this.repository.repo === 'google-api-go-client' && + REGEN_PR_REGEX.test(commit.message)) { + if (regenCommit) { + const match = commit.message.match(REGEN_ISSUE_REGEX); + if ((_a = match === null || match === void 0 ? void 0 : match.groups) === null || _a === void 0 ? void 0 : _a.pr) { + regenCommit.references.push({ + action: 'refs', + issue: match.groups.pr, + prefix: '#', + }); + } + return false; + } + else { + commit.sha = ''; + regenCommit = commit; + const match = commit.bareMessage.match(REGEN_ISSUE_REGEX); + if ((_b = match === null || match === void 0 ? void 0 : match.groups) === null || _b === void 0 ? void 0 : _b.pr) { + regenCommit.references.push({ + action: 'refs', + issue: match.groups.pr, + prefix: '#', + }); + regenCommit.bareMessage = match.groups.prefix.trim(); + } + } + } + // For google-cloud-go, filter into 2 cases, a subset of modules + // released independently, and the remainder + if (this.repository.owner === 'googleapis' && + this.repository.repo === 'google-cloud-go') { + // Skip commits that don't have a scope as we don't know where to + // put them + if (!commit.scope) { + this.logger.debug(`Skipping commit without scope: ${commit.message}`); + return false; + } + // Skip commits related to sub-modules as they are not part of + // the parent module. + if (this.includeComponentInTag) { + // This is a submodule release, so only include commits in this + // scope + if (!commitMatchesScope(commit.scope, component)) { + this.logger.debug(`Skipping commit scope: ${commit.scope} != ${component}`); + return false; + } + } + else { + // This is the main module release, so ignore sub modules that + // are released independently + for (const submodule of ignoredSubmodules) { + if (commitMatchesScope(commit.scope, submodule)) { + this.logger.debug(`Skipping ignored commit scope: ${commit.scope}`); + return false; + } + } + } + } + return true; + }); + } + async getIgnoredSubModules() { + // ignored submodules only applies to the root component of + // googleapis/google-cloud-go + if (this.repository.owner !== 'googleapis' || + this.repository.repo !== 'google-cloud-go' || + this.includeComponentInTag) { + return new Set(); + } + this.logger.info('Looking for go.mod files'); + const paths = (await this.github.findFilesByFilenameAndRef('go.mod', this.targetBranch)) + .filter(path => !path.includes('internal') && path !== 'go.mod') + .map(path => (0, path_1.dirname)(path)); + this.logger.info(`Found ${paths.length} submodules`); + this.logger.debug(JSON.stringify(paths)); + return new Set(paths); + } + // "closes" is a little presumptuous, let's just indicate that the + // PR references these other commits: + async buildReleaseNotes(conventionalCommits, newVersion, newVersionTag, latestRelease, commits) { + const releaseNotes = await super.buildReleaseNotes(conventionalCommits, newVersion, newVersionTag, latestRelease, commits); + return releaseNotes.replace(/, closes /g, ', refs '); + } + initialReleaseVersion() { + return version_1.Version.parse('0.1.0'); + } +} +exports.GoYoshi = GoYoshi; +function commitMatchesScope(commitScope, scope) { + return commitScope === scope || commitScope.startsWith(`${scope}/`); +} +//# sourceMappingURL=go-yoshi.js.map + +/***/ }), + +/***/ 22494: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Go = void 0; +// Generic +const changelog_1 = __nccwpck_require__(11128); +const base_1 = __nccwpck_require__(25606); +class Go extends base_1.BaseStrategy { + async buildUpdates(options) { + const updates = []; + const version = options.newVersion; + updates.push({ + path: this.addPath(this.changelogPath), + createIfMissing: true, + updater: new changelog_1.Changelog({ + version, + changelogEntry: options.changelogEntry, + }), + }); + return updates; + } +} +exports.Go = Go; +//# sourceMappingURL=go.js.map + +/***/ }), + +/***/ 78249: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Helm = void 0; +// Generic +const changelog_1 = __nccwpck_require__(11128); +const yaml = __nccwpck_require__(21917); +// helm +const chart_yaml_1 = __nccwpck_require__(57947); +const base_1 = __nccwpck_require__(25606); +const errors_1 = __nccwpck_require__(10818); +class Helm extends base_1.BaseStrategy { + async buildUpdates(options) { + const updates = []; + const version = options.newVersion; + updates.push({ + path: this.addPath(this.changelogPath), + createIfMissing: true, + updater: new changelog_1.Changelog({ + version, + changelogEntry: options.changelogEntry, + }), + }); + updates.push({ + path: this.addPath('Chart.yaml'), + createIfMissing: false, + cachedFileContents: this.chartYmlContents, + updater: new chart_yaml_1.ChartYaml({ + version, + }), + }); + return updates; + } + async getDefaultPackageName() { + const chartYmlContents = await this.getChartYmlContents(); + const chart = yaml.load(chartYmlContents.parsedContent, { json: true }); + if (typeof chart === 'object') { + return chart.name; + } + else { + return undefined; + } + } + async getChartYmlContents() { + if (!this.chartYmlContents) { + try { + this.chartYmlContents = await this.github.getFileContents(this.addPath('Chart.yaml')); + } + catch (e) { + if (e instanceof errors_1.FileNotFoundError) { + throw new errors_1.MissingRequiredFileError(this.addPath('Chart.yaml'), Helm.name, `${this.repository.owner}/${this.repository.repo}`); + } + throw e; + } + } + return this.chartYmlContents; + } +} +exports.Helm = Helm; +//# sourceMappingURL=helm.js.map + +/***/ }), + +/***/ 39129: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.JavaYoshiMonoRepo = void 0; +const versions_manifest_1 = __nccwpck_require__(45153); +const version_1 = __nccwpck_require__(25112); +const changelog_1 = __nccwpck_require__(11128); +const changelog_json_1 = __nccwpck_require__(34540); +const commit_split_1 = __nccwpck_require__(95086); +const composite_1 = __nccwpck_require__(93373); +const errors_1 = __nccwpck_require__(10818); +const java_1 = __nccwpck_require__(71922); +const java_update_1 = __nccwpck_require__(3972); +const filter_commits_1 = __nccwpck_require__(49526); +class JavaYoshiMonoRepo extends java_1.Java { + /** + * Override this method to post process commits + * @param {ConventionalCommit[]} commits parsed commits + * @returns {ConventionalCommit[]} modified commits + */ + async postProcessCommits(commits) { + if (commits.length === 0) { + // For Java commits, push a fake commit so we force a + // SNAPSHOT release + commits.push({ + type: 'fake', + bareMessage: 'fake commit', + message: 'fake commit', + breaking: false, + scope: null, + notes: [], + files: [], + references: [], + sha: 'fake', + }); + } + return commits; + } + async needsSnapshot() { + return versions_manifest_1.VersionsManifest.needsSnapshot((await this.getVersionsContent()).parsedContent); + } + async buildVersionsMap() { + this.versionsContent = await this.getVersionsContent(); + return versions_manifest_1.VersionsManifest.parseVersions(this.versionsContent.parsedContent); + } + async getVersionsContent() { + if (!this.versionsContent) { + try { + this.versionsContent = await this.github.getFileContentsOnBranch(this.addPath('versions.txt'), this.targetBranch); + } + catch (err) { + if (err instanceof errors_1.GitHubAPIError) { + throw new errors_1.MissingRequiredFileError(this.addPath('versions.txt'), JavaYoshiMonoRepo.name, `${this.repository.owner}/${this.repository.repo}`); + } + throw err; + } + } + return this.versionsContent; + } + async buildUpdates(options) { + const updates = []; + const version = options.newVersion; + const versionsMap = options.versionsMap; + updates.push({ + path: this.addPath('versions.txt'), + createIfMissing: false, + cachedFileContents: this.versionsContent, + updater: new versions_manifest_1.VersionsManifest({ + version, + versionsMap, + }), + }); + const pomFilesSearch = this.github.findFilesByFilenameAndRef('pom.xml', this.targetBranch, this.path); + const buildFilesSearch = this.github.findFilesByFilenameAndRef('build.gradle', this.targetBranch, this.path); + const dependenciesSearch = this.github.findFilesByFilenameAndRef('dependencies.properties', this.targetBranch, this.path); + const readmeFilesSearch = this.github.findFilesByFilenameAndRef('README.md', this.targetBranch, this.path); + const pomFiles = await pomFilesSearch; + pomFiles.forEach(path => { + updates.push({ + path: this.addPath(path), + createIfMissing: false, + updater: new java_update_1.JavaUpdate({ + version, + versionsMap, + isSnapshot: options.isSnapshot, + }), + }); + }); + const buildFiles = await buildFilesSearch; + buildFiles.forEach(path => { + updates.push({ + path: this.addPath(path), + createIfMissing: false, + updater: new java_update_1.JavaUpdate({ + version, + versionsMap, + isSnapshot: options.isSnapshot, + }), + }); + }); + const dependenciesFiles = await dependenciesSearch; + dependenciesFiles.forEach(path => { + updates.push({ + path: this.addPath(path), + createIfMissing: false, + updater: new java_update_1.JavaUpdate({ + version, + versionsMap, + isSnapshot: options.isSnapshot, + }), + }); + }); + const readmeFiles = await readmeFilesSearch; + readmeFiles.forEach(path => { + updates.push({ + path: this.addPath(path), + createIfMissing: false, + updater: new java_update_1.JavaUpdate({ + version, + versionsMap, + isSnapshot: options.isSnapshot, + }), + }); + }); + this.extraFiles.forEach(extraFile => { + if (typeof extraFile === 'object') { + return; + } + updates.push({ + path: extraFile, + createIfMissing: false, + updater: new java_update_1.JavaUpdate({ + version, + versionsMap, + isSnapshot: options.isSnapshot, + }), + }); + }); + if (!options.isSnapshot) { + updates.push({ + path: this.addPath(this.changelogPath), + createIfMissing: true, + updater: new changelog_1.Changelog({ + version, + changelogEntry: options.changelogEntry, + }), + }); + // Bail early if the repository has no root changelog.json. + // This file is used to opt into machine readable commits. + const hasChangelogJson = await this.hasChangelogJson(); + if (hasChangelogJson && options.commits) { + const changelogUpdates = []; + const cs = new commit_split_1.CommitSplit({ + includeEmpty: false, + }); + const splitCommits = cs.split((0, filter_commits_1.filterCommits)(options.commits, this.changelogSections)); + for (const path of Object.keys(splitCommits)) { + const repoMetadata = await this.getRepoMetadata(path); + const artifactName = repoMetadata + ? repoMetadata['distribution_name'] + : null; + if (repoMetadata && artifactName) { + this.logger.info(`Found artifact ${artifactName} for ${path}`); + changelogUpdates.push(new changelog_json_1.ChangelogJson({ + artifactName, + version, + // We filter out "chore:" commits, to reduce noise in the upstream + // release notes. We will only show a product release note entry + // if there has been a substantial change, such as a fix or feature. + commits: splitCommits[path], + language: 'JAVA', + })); + } + } + updates.push({ + path: 'changelog.json', + createIfMissing: false, + updater: new composite_1.CompositeUpdater(...changelogUpdates), + }); + } + } + return updates; + } + async hasChangelogJson() { + try { + const content = await this.github.getFileContentsOnBranch('changelog.json', this.targetBranch); + return !!content; + } + catch (e) { + if (e instanceof errors_1.FileNotFoundError) + return false; + else + throw e; + } + } + async getRepoMetadata(path) { + try { + const content = await this.github.getFileContentsOnBranch(this.addPath(`${path}/.repo-metadata.json`), this.targetBranch); + return content ? JSON.parse(content.parsedContent) : null; + } + catch (e) { + if (e instanceof errors_1.FileNotFoundError) + return null; + else + throw e; + } + } + async updateVersionsMap(versionsMap, conventionalCommits) { + let isPromotion = false; + const modifiedCommits = []; + for (const commit of conventionalCommits) { + if (isPromotionCommit(commit)) { + isPromotion = true; + modifiedCommits.push({ + ...commit, + notes: commit.notes.filter(note => !isPromotionNote(note)), + }); + } + else { + modifiedCommits.push(commit); + } + } + for (const versionKey of versionsMap.keys()) { + const version = versionsMap.get(versionKey); + if (!version) { + this.logger.warn(`didn't find version for ${versionKey}`); + continue; + } + if (isPromotion && isStableArtifact(versionKey)) { + versionsMap.set(versionKey, version_1.Version.parse('1.0.0')); + } + else { + const newVersion = await this.versioningStrategy.bump(version, modifiedCommits); + versionsMap.set(versionKey, newVersion); + } + } + return versionsMap; + } + initialReleaseVersion() { + return version_1.Version.parse('0.1.0'); + } +} +exports.JavaYoshiMonoRepo = JavaYoshiMonoRepo; +const VERSIONED_ARTIFACT_REGEX = /^.*-(v\d+[^-]*)$/; +const VERSION_REGEX = /^v\d+(.*)$/; +/** + * Returns true if the artifact should be considered stable + * @param artifact name of the artifact to check + */ +function isStableArtifact(artifact) { + const match = artifact.match(VERSIONED_ARTIFACT_REGEX); + if (!match) { + // The artifact does not have a version qualifier at the end + return true; + } + const versionMatch = match[1].match(VERSION_REGEX); + if (versionMatch && versionMatch[1]) { + // The version is not stable (probably alpha/beta/rc) + return false; + } + return true; +} +function isPromotionCommit(commit) { + return commit.notes.some(isPromotionNote); +} +function isPromotionNote(note) { + return note.title === 'RELEASE AS' && note.text === '1.0.0'; +} +//# sourceMappingURL=java-yoshi-mono-repo.js.map + +/***/ }), + +/***/ 62492: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.JavaYoshi = void 0; +const versions_manifest_1 = __nccwpck_require__(45153); +const version_1 = __nccwpck_require__(25112); +const changelog_1 = __nccwpck_require__(11128); +const errors_1 = __nccwpck_require__(10818); +const java_1 = __nccwpck_require__(71922); +const java_update_1 = __nccwpck_require__(3972); +class JavaYoshi extends java_1.Java { + /** + * Override this method to post process commits + * @param {ConventionalCommit[]} commits parsed commits + * @returns {ConventionalCommit[]} modified commits + */ + async postProcessCommits(commits) { + if (commits.length === 0) { + // For Java commits, push a fake commit so we force a + // SNAPSHOT release + commits.push({ + type: 'fake', + bareMessage: 'fake commit', + message: 'fake commit', + breaking: false, + scope: null, + notes: [], + files: [], + references: [], + sha: 'fake', + }); + } + return commits; + } + async needsSnapshot() { + return versions_manifest_1.VersionsManifest.needsSnapshot((await this.getVersionsContent()).parsedContent); + } + async buildVersionsMap() { + this.versionsContent = await this.getVersionsContent(); + return versions_manifest_1.VersionsManifest.parseVersions(this.versionsContent.parsedContent); + } + async getVersionsContent() { + if (!this.versionsContent) { + try { + this.versionsContent = await this.github.getFileContentsOnBranch(this.addPath('versions.txt'), this.targetBranch); + } + catch (err) { + if (err instanceof errors_1.FileNotFoundError) { + throw new errors_1.MissingRequiredFileError(this.addPath('versions.txt'), JavaYoshi.name, `${this.repository.owner}/${this.repository.repo}`); + } + throw err; + } + } + return this.versionsContent; + } + async buildUpdates(options) { + const updates = []; + const version = options.newVersion; + const versionsMap = options.versionsMap; + updates.push({ + path: this.addPath('versions.txt'), + createIfMissing: false, + cachedFileContents: this.versionsContent, + updater: new versions_manifest_1.VersionsManifest({ + version, + versionsMap, + }), + }); + const pomFilesSearch = this.github.findFilesByFilenameAndRef('pom.xml', this.targetBranch, this.path); + const buildFilesSearch = this.github.findFilesByFilenameAndRef('build.gradle', this.targetBranch, this.path); + const dependenciesSearch = this.github.findFilesByFilenameAndRef('dependencies.properties', this.targetBranch, this.path); + const pomFiles = await pomFilesSearch; + pomFiles.forEach(path => { + updates.push({ + path: this.addPath(path), + createIfMissing: false, + updater: new java_update_1.JavaUpdate({ + version, + versionsMap, + isSnapshot: options.isSnapshot, + }), + }); + }); + const buildFiles = await buildFilesSearch; + buildFiles.forEach(path => { + updates.push({ + path: this.addPath(path), + createIfMissing: false, + updater: new java_update_1.JavaUpdate({ + version, + versionsMap, + isSnapshot: options.isSnapshot, + }), + }); + }); + const dependenciesFiles = await dependenciesSearch; + dependenciesFiles.forEach(path => { + updates.push({ + path: this.addPath(path), + createIfMissing: false, + updater: new java_update_1.JavaUpdate({ + version, + versionsMap, + isSnapshot: options.isSnapshot, + }), + }); + }); + this.extraFiles.forEach(extraFile => { + if (typeof extraFile === 'object') { + return; + } + updates.push({ + path: extraFile, + createIfMissing: false, + updater: new java_update_1.JavaUpdate({ + version, + versionsMap, + isSnapshot: options.isSnapshot, + }), + }); + }); + if (!options.isSnapshot) { + updates.push({ + path: this.addPath(this.changelogPath), + createIfMissing: true, + updater: new changelog_1.Changelog({ + version, + changelogEntry: options.changelogEntry, + }), + }); + } + return updates; + } + async updateVersionsMap(versionsMap, conventionalCommits) { + let isPromotion = false; + const modifiedCommits = []; + for (const commit of conventionalCommits) { + if (isPromotionCommit(commit)) { + isPromotion = true; + modifiedCommits.push({ + ...commit, + notes: commit.notes.filter(note => !isPromotionNote(note)), + }); + } + else { + modifiedCommits.push(commit); + } + } + for (const versionKey of versionsMap.keys()) { + const version = versionsMap.get(versionKey); + if (!version) { + this.logger.warn(`didn't find version for ${versionKey}`); + continue; + } + if (isPromotion && isStableArtifact(versionKey)) { + versionsMap.set(versionKey, version_1.Version.parse('1.0.0')); + } + else { + const newVersion = await this.versioningStrategy.bump(version, modifiedCommits); + versionsMap.set(versionKey, newVersion); + } + } + return versionsMap; + } + initialReleaseVersion() { + return version_1.Version.parse('0.1.0'); + } +} +exports.JavaYoshi = JavaYoshi; +const VERSIONED_ARTIFACT_REGEX = /^.*-(v\d+[^-]*)$/; +const VERSION_REGEX = /^v\d+(.*)$/; +/** + * Returns true if the artifact should be considered stable + * @param artifact name of the artifact to check + */ +function isStableArtifact(artifact) { + const match = artifact.match(VERSIONED_ARTIFACT_REGEX); + if (!match) { + // The artifact does not have a version qualifier at the end + return true; + } + const versionMatch = match[1].match(VERSION_REGEX); + if (versionMatch && versionMatch[1]) { + // The version is not stable (probably alpha/beta/rc) + return false; + } + return true; +} +function isPromotionCommit(commit) { + return commit.notes.some(isPromotionNote); +} +function isPromotionNote(note) { + return note.title === 'RELEASE AS' && note.text === '1.0.0'; +} +//# sourceMappingURL=java-yoshi.js.map + +/***/ }), + +/***/ 71922: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Java = void 0; +const base_1 = __nccwpck_require__(25606); +const changelog_1 = __nccwpck_require__(11128); +const java_snapshot_1 = __nccwpck_require__(81189); +const pull_request_title_1 = __nccwpck_require__(28866); +const branch_name_1 = __nccwpck_require__(94148); +const pull_request_body_1 = __nccwpck_require__(11941); +const default_1 = __nccwpck_require__(77033); +const java_add_snapshot_1 = __nccwpck_require__(3277); +const manifest_1 = __nccwpck_require__(24026); +const java_released_1 = __nccwpck_require__(24678); +const composite_1 = __nccwpck_require__(93373); +const logger_1 = __nccwpck_require__(18792); +const CHANGELOG_SECTIONS = [ + { type: 'feat', section: 'Features' }, + { type: 'fix', section: 'Bug Fixes' }, + { type: 'perf', section: 'Performance Improvements' }, + { type: 'deps', section: 'Dependencies' }, + { type: 'revert', section: 'Reverts' }, + { type: 'docs', section: 'Documentation' }, + { type: 'style', section: 'Styles', hidden: true }, + { type: 'chore', section: 'Miscellaneous Chores', hidden: true }, + { type: 'refactor', section: 'Code Refactoring', hidden: true }, + { type: 'test', section: 'Tests', hidden: true }, + { type: 'build', section: 'Build System', hidden: true }, + { type: 'ci', section: 'Continuous Integration', hidden: true }, +]; +/** + * A strategy that generates SNAPSHOT version after each release, which is standard especially in Maven projects. + * + * This is universal strategy that does not update any files on its own. Use maven strategy for Maven projects. + */ +class Java extends base_1.BaseStrategy { + constructor(options) { + var _a, _b, _c; + options.changelogSections = (_a = options.changelogSections) !== null && _a !== void 0 ? _a : CHANGELOG_SECTIONS; + // wrap the configured versioning strategy with snapshotting + const parentVersioningStrategy = options.versioningStrategy || + new default_1.DefaultVersioningStrategy({ logger: (_b = options.logger) !== null && _b !== void 0 ? _b : logger_1.logger }); + options.versioningStrategy = new java_snapshot_1.JavaSnapshot(parentVersioningStrategy); + super(options); + this.snapshotVersioning = new java_add_snapshot_1.JavaAddSnapshot(parentVersioningStrategy); + this.snapshotLabels = options.snapshotLabels || manifest_1.DEFAULT_SNAPSHOT_LABELS; + this.skipSnapshot = (_c = options.skipSnapshot) !== null && _c !== void 0 ? _c : false; + } + async buildReleasePullRequest(commits, latestRelease, draft, labels = [], _bumpOnlyOptions) { + if (await this.needsSnapshot(commits, latestRelease)) { + this.logger.info('Repository needs a snapshot bump.'); + return await this.buildSnapshotPullRequest(latestRelease, draft, this.snapshotLabels); + } + this.logger.info('No Java snapshot needed'); + return await super.buildReleasePullRequest(commits, latestRelease, draft, labels); + } + async buildSnapshotPullRequest(latestRelease, draft, labels = []) { + const component = await this.getComponent(); + const newVersion = latestRelease + ? await this.snapshotVersioning.bump(latestRelease.tag.version, []) + : this.initialReleaseVersion(); + const versionsMap = await this.buildVersionsMap([]); + for (const [component, version] of versionsMap.entries()) { + versionsMap.set(component, await this.snapshotVersioning.bump(version, [])); + } + const pullRequestTitle = pull_request_title_1.PullRequestTitle.ofComponentTargetBranchVersion(component || '', this.targetBranch, newVersion); + const branchName = component + ? branch_name_1.BranchName.ofComponentTargetBranch(component, this.targetBranch) + : branch_name_1.BranchName.ofTargetBranch(this.targetBranch); + const notes = '### Updating meta-information for bleeding-edge SNAPSHOT release.'; + // TODO use pullrequest header here? + const pullRequestBody = new pull_request_body_1.PullRequestBody([ + { + component, + version: newVersion, + notes, + }, + ]); + const updates = await this.buildUpdates({ + newVersion, + versionsMap, + changelogEntry: notes, + isSnapshot: true, + commits: [], + }); + const updatesWithExtras = (0, composite_1.mergeUpdates)(updates.concat(...(await this.extraFileUpdates(newVersion, versionsMap)))); + return { + title: pullRequestTitle, + body: pullRequestBody, + updates: updatesWithExtras, + labels: [...labels, ...this.extraLabels], + headRefName: branchName.toString(), + version: newVersion, + draft: draft !== null && draft !== void 0 ? draft : false, + group: 'snapshot', + }; + } + isPublishedVersion(version) { + return !version.preRelease || version.preRelease.indexOf('SNAPSHOT') < 0; + } + async needsSnapshot(commits, latestRelease) { + var _a; + if (this.skipSnapshot) { + return false; + } + const component = await this.getComponent(); + this.logger.debug('component:', component); + const version = (_a = latestRelease === null || latestRelease === void 0 ? void 0 : latestRelease.tag) === null || _a === void 0 ? void 0 : _a.version; + if (!version) { + // Don't bump snapshots for the first release ever + return false; + } + // Found snapshot as a release, this is unexpected, but use it + if (!this.isPublishedVersion(version)) { + return false; + } + // Search commits for snapshot bump + const pullRequests = commits + .map(commit => { + var _a; + return pull_request_title_1.PullRequestTitle.parse(((_a = commit.pullRequest) === null || _a === void 0 ? void 0 : _a.title) || commit.message, this.pullRequestTitlePattern, this.componentNoSpace, this.logger); + }) + .filter(pullRequest => pullRequest); + const snapshotCommits = pullRequests + .filter(pullRequest => ((pullRequest === null || pullRequest === void 0 ? void 0 : pullRequest.component) || '') === component) + .map(pullRequest => pullRequest === null || pullRequest === void 0 ? void 0 : pullRequest.getVersion()) + .filter(version => version && !this.isPublishedVersion(version)); + return snapshotCommits.length === 0; + } + async buildUpdates(options) { + const version = options.newVersion; + const versionsMap = options.versionsMap; + const updates = []; + if (!options.isSnapshot) { + // Append java-specific updater for extraFiles + this.extraFiles.forEach(extraFile => { + if (typeof extraFile === 'string') { + updates.push({ + path: this.addPath(extraFile), + createIfMissing: false, + updater: new java_released_1.JavaReleased({ version, versionsMap }), + }); + } + }); + // Update changelog + updates.push({ + path: this.addPath(this.changelogPath), + createIfMissing: true, + updater: new changelog_1.Changelog({ + version, + changelogEntry: options.changelogEntry, + }), + }); + } + return updates; + } +} +exports.Java = Java; +//# sourceMappingURL=java.js.map + +/***/ }), + +/***/ 51900: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.KRMBlueprint = void 0; +// Generic +const changelog_1 = __nccwpck_require__(11128); +// KRM specific. +const krm_blueprint_version_1 = __nccwpck_require__(75678); +const base_1 = __nccwpck_require__(25606); +const version_1 = __nccwpck_require__(25112); +const KRMBlueprintAttribAnnotation = 'cnrm.cloud.google.com/blueprint'; +const hasKRMBlueprintAttrib = (content) => content.includes(KRMBlueprintAttribAnnotation); +class KRMBlueprint extends base_1.BaseStrategy { + async buildUpdates(options) { + const updates = []; + const version = options.newVersion; + updates.push({ + path: this.addPath(this.changelogPath), + createIfMissing: true, + updater: new changelog_1.Changelog({ + version, + changelogEntry: options.changelogEntry, + }), + }); + const versionsMap = new Map(); + if (options.latestVersion) { + versionsMap.set('previousVersion', options.latestVersion); + } + // Update version in all yaml files with attribution annotation + const yamlPaths = await this.github.findFilesByExtensionAndRef('yaml', this.targetBranch, this.path); + for (const yamlPath of yamlPaths) { + const contents = await this.github.getFileContents(this.addPath(yamlPath)); + if (hasKRMBlueprintAttrib(contents.parsedContent)) { + updates.push({ + path: this.addPath(yamlPath), + createIfMissing: false, + cachedFileContents: contents, + updater: new krm_blueprint_version_1.KRMBlueprintVersion({ + version, + versionsMap, + }), + }); + } + } + return updates; + } + initialReleaseVersion() { + return version_1.Version.parse('0.1.0'); + } +} +exports.KRMBlueprint = KRMBlueprint; +//# sourceMappingURL=krm-blueprint.js.map + +/***/ }), + +/***/ 26609: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Maven = void 0; +const java_1 = __nccwpck_require__(71922); +const java_released_1 = __nccwpck_require__(24678); +const generic_1 = __nccwpck_require__(62963); +const pom_xml_1 = __nccwpck_require__(55023); +/** + * Strategy for Maven projects. It generates SNAPSHOT version after each release, and updates all found + * pom.xml files automatically. + */ +class Maven extends java_1.Java { + async buildUpdates(options) { + const version = options.newVersion; + const versionsMap = options.versionsMap; + // Use generic Java updates + const updates = await super.buildUpdates(options); + // Update pom.xml files + const pomFiles = await this.github.findFilesByFilenameAndRef('pom.xml', this.targetBranch, this.path); + pomFiles.forEach(path => { + updates.push({ + path: this.addPath(path), + createIfMissing: false, + updater: new pom_xml_1.PomXml(version), + }); + if (!options.isSnapshot) { + updates.push({ + path: this.addPath(path), + createIfMissing: false, + updater: new java_released_1.JavaReleased({ version, versionsMap }), + }); + } + updates.push({ + path: this.addPath(path), + createIfMissing: false, + updater: new generic_1.Generic({ version, versionsMap }), + }); + }); + return updates; + } +} +exports.Maven = Maven; +//# sourceMappingURL=maven.js.map + +/***/ }), + +/***/ 97754: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Node = void 0; +const base_1 = __nccwpck_require__(25606); +const changelog_json_1 = __nccwpck_require__(34540); +const package_lock_json_1 = __nccwpck_require__(55554); +const samples_package_json_1 = __nccwpck_require__(56196); +const changelog_1 = __nccwpck_require__(11128); +const package_json_1 = __nccwpck_require__(15189); +const errors_1 = __nccwpck_require__(10818); +const filter_commits_1 = __nccwpck_require__(49526); +class Node extends base_1.BaseStrategy { + async buildUpdates(options) { + var _a; + const updates = []; + const version = options.newVersion; + const versionsMap = options.versionsMap; + const packageName = (_a = (await this.getPackageName())) !== null && _a !== void 0 ? _a : ''; + const lockFiles = ['package-lock.json', 'npm-shrinkwrap.json']; + lockFiles.forEach(lockFile => { + updates.push({ + path: this.addPath(lockFile), + createIfMissing: false, + updater: new package_lock_json_1.PackageLockJson({ + version, + versionsMap, + }), + }); + }); + updates.push({ + path: this.addPath('samples/package.json'), + createIfMissing: false, + updater: new samples_package_json_1.SamplesPackageJson({ + version, + packageName, + }), + }); + updates.push({ + path: this.addPath(this.changelogPath), + createIfMissing: true, + updater: new changelog_1.Changelog({ + version, + changelogEntry: options.changelogEntry, + }), + }); + updates.push({ + path: this.addPath('package.json'), + createIfMissing: false, + cachedFileContents: this.pkgJsonContents, + updater: new package_json_1.PackageJson({ + version, + }), + }); + // If a machine readable changelog.json exists update it: + if (options.commits && packageName) { + const commits = (0, filter_commits_1.filterCommits)(options.commits, this.changelogSections); + updates.push({ + path: 'changelog.json', + createIfMissing: false, + updater: new changelog_json_1.ChangelogJson({ + artifactName: packageName, + version, + commits, + language: 'JAVASCRIPT', + }), + }); + } + return updates; + } + async getDefaultPackageName() { + const pkgJsonContents = await this.getPkgJsonContents(); + const pkg = JSON.parse(pkgJsonContents.parsedContent); + return pkg.name; + } + normalizeComponent(component) { + if (!component) { + return ''; + } + return component.match(/^@[\w-]+\//) ? component.split('/')[1] : component; + } + async getPkgJsonContents() { + if (!this.pkgJsonContents) { + try { + this.pkgJsonContents = await this.github.getFileContentsOnBranch(this.addPath('package.json'), this.targetBranch); + } + catch (e) { + if (e instanceof errors_1.FileNotFoundError) { + throw new errors_1.MissingRequiredFileError(this.addPath('package.json'), 'node', `${this.repository.owner}/${this.repository.repo}`); + } + throw e; + } + } + return this.pkgJsonContents; + } +} +exports.Node = Node; +//# sourceMappingURL=node.js.map + +/***/ }), + +/***/ 29068: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.OCaml = void 0; +// Generic +const changelog_1 = __nccwpck_require__(11128); +// OCaml +const opam_1 = __nccwpck_require__(63454); +const esy_json_1 = __nccwpck_require__(93366); +const dune_project_1 = __nccwpck_require__(47944); +const base_1 = __nccwpck_require__(25606); +const notEsyLock = (path) => !path.startsWith('esy.lock'); +class OCaml extends base_1.BaseStrategy { + async buildUpdates(options) { + const updates = []; + const version = options.newVersion; + updates.push({ + path: this.addPath(this.changelogPath), + createIfMissing: true, + updater: new changelog_1.Changelog({ + version, + changelogEntry: options.changelogEntry, + }), + }); + const jsonPaths = await this.github.findFilesByExtension('json', this.path); + for (const path of jsonPaths) { + if (notEsyLock(path)) { + const contents = await this.github.getFileContents(this.addPath(path)); + const pkg = JSON.parse(contents.parsedContent); + if (pkg.version !== undefined) { + updates.push({ + path: this.addPath(path), + createIfMissing: false, + cachedFileContents: contents, + updater: new esy_json_1.EsyJson({ + version, + }), + }); + } + } + } + const opamPaths = await this.github.findFilesByExtension('opam', this.path); + opamPaths.filter(notEsyLock).forEach(path => { + updates.push({ + path: this.addPath(path), + createIfMissing: false, + updater: new opam_1.Opam({ + version, + }), + }); + }); + const opamLockedPaths = await this.github.findFilesByExtension('opam.locked', this.path); + opamLockedPaths.filter(notEsyLock).forEach(path => { + updates.push({ + path: this.addPath(path), + createIfMissing: false, + updater: new opam_1.Opam({ + version, + }), + }); + }); + updates.push({ + path: this.addPath('dune-project'), + createIfMissing: false, + updater: new dune_project_1.DuneProject({ + version, + }), + }); + return updates; + } +} +exports.OCaml = OCaml; +//# sourceMappingURL=ocaml.js.map + +/***/ }), + +/***/ 93584: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PHPYoshi = void 0; +const base_1 = __nccwpck_require__(25606); +const changelog_1 = __nccwpck_require__(11128); +const root_composer_update_packages_1 = __nccwpck_require__(44955); +const php_client_version_1 = __nccwpck_require__(35752); +const version_1 = __nccwpck_require__(25112); +const commit_1 = __nccwpck_require__(50726); +const commit_split_1 = __nccwpck_require__(95086); +const default_1 = __nccwpck_require__(41143); +const tag_name_1 = __nccwpck_require__(91203); +const pull_request_title_1 = __nccwpck_require__(28866); +const branch_name_1 = __nccwpck_require__(94148); +const pull_request_body_1 = __nccwpck_require__(11941); +const errors_1 = __nccwpck_require__(10818); +const CHANGELOG_SECTIONS = [ + { type: 'feat', section: 'Features' }, + { type: 'fix', section: 'Bug Fixes' }, + { type: 'perf', section: 'Performance Improvements' }, + { type: 'revert', section: 'Reverts' }, + { type: 'docs', section: 'Documentation' }, + { type: 'misc', section: 'Miscellaneous' }, + { type: 'chore', section: 'Chores', hidden: true }, + { type: 'style', section: 'Styles', hidden: true }, + { type: 'refactor', section: 'Code Refactoring', hidden: true }, + { type: 'test', section: 'Tests', hidden: true }, + { type: 'build', section: 'Build System', hidden: true }, + { type: 'ci', section: 'Continuous Integration', hidden: true }, +]; +class PHPYoshi extends base_1.BaseStrategy { + constructor(options) { + super({ + ...options, + changelogSections: CHANGELOG_SECTIONS, + }); + } + async buildReleasePullRequest(commits, latestRelease, draft, labels = [], bumpOnlyOptions) { + var _a, _b, _c; + const conventionalCommits = await this.postProcessCommits((0, commit_1.parseConventionalCommits)(commits, this.logger)); + if (!bumpOnlyOptions && conventionalCommits.length === 0) { + this.logger.info(`No commits for path: ${this.path}, skipping`); + return undefined; + } + const versionOverrides = {}; + commits.forEach(commit => { + var _a; + Object.entries(parseVersionOverrides(((_a = commit.pullRequest) === null || _a === void 0 ? void 0 : _a.body) || '')).forEach(([directory, version]) => { + versionOverrides[directory] = version; + }); + }); + const newVersion = latestRelease + ? await this.versioningStrategy.bump(latestRelease.tag.version, conventionalCommits) + : this.initialReleaseVersion(); + const cs = new commit_split_1.CommitSplit(); + const splitCommits = cs.split(conventionalCommits); + const topLevelDirectories = Object.keys(splitCommits).sort(); + const versionsMap = new Map(); + const directoryVersionContents = {}; + const component = await this.getComponent(); + const newVersionTag = new tag_name_1.TagName(newVersion, component, this.tagSeparator, this.includeVInTag); + let releaseNotesBody = `## ${newVersion.toString()}`; + for (const directory of topLevelDirectories) { + try { + const contents = await this.github.getFileContentsOnBranch(this.addPath(`${directory}/VERSION`), this.targetBranch); + const composer = await this.github.getFileJson(this.addPath(`${directory}/composer.json`), this.targetBranch); + directoryVersionContents[directory] = { + versionContents: contents, + composer, + }; + const newVersion = versionOverrides[directory] + ? version_1.Version.parse(versionOverrides[directory]) + : await this.versioningStrategy.bump(version_1.Version.parse(contents.parsedContent), splitCommits[directory]); + versionsMap.set(composer.name, newVersion); + const partialReleaseNotes = await this.changelogNotes.buildNotes(splitCommits[directory], { + host: this.changelogHost, + owner: this.repository.owner, + repository: this.repository.repo, + version: newVersion.toString(), + previousTag: (_a = latestRelease === null || latestRelease === void 0 ? void 0 : latestRelease.tag) === null || _a === void 0 ? void 0 : _a.toString(), + currentTag: newVersionTag.toString(), + targetBranch: this.targetBranch, + changelogSections: this.changelogSections, + }); + releaseNotesBody = updatePHPChangelogEntry(`${composer.name} ${newVersion.toString()}`, releaseNotesBody, partialReleaseNotes); + } + catch (err) { + if (err instanceof errors_1.FileNotFoundError) { + // if the updated path has no VERSION, assume this isn't a + // module that needs updating. + continue; + } + else { + throw err; + } + } + } + const pullRequestTitle = pull_request_title_1.PullRequestTitle.ofComponentTargetBranchVersion(component || '', this.targetBranch, newVersion); + const branchName = component + ? branch_name_1.BranchName.ofComponentTargetBranch(component, this.targetBranch) + : branch_name_1.BranchName.ofTargetBranch(this.targetBranch); + const updates = await this.buildUpdates({ + changelogEntry: releaseNotesBody, + newVersion, + versionsMap, + latestVersion: latestRelease === null || latestRelease === void 0 ? void 0 : latestRelease.tag.version, + commits: conventionalCommits, // TODO(@bcoe): these commits will need to be divided into multiple changelog.json updates. + }); + for (const directory in directoryVersionContents) { + const componentInfo = directoryVersionContents[directory]; + const version = versionsMap.get(componentInfo.composer.name); + if (!version) { + this.logger.warn(`No version found for ${componentInfo.composer.name}`); + continue; + } + updates.push({ + path: this.addPath(`${directory}/VERSION`), + createIfMissing: false, + cachedFileContents: componentInfo.versionContents, + updater: new default_1.DefaultUpdater({ + version, + }), + }); + updates.push({ + path: this.addPath(`${directory}/composer.json`), + createIfMissing: false, + updater: new root_composer_update_packages_1.RootComposerUpdatePackages({ + version, + }), + }); + if ((_c = (_b = componentInfo.composer.extra) === null || _b === void 0 ? void 0 : _b.component) === null || _c === void 0 ? void 0 : _c.entry) { + updates.push({ + path: this.addPath(`${directory}/${componentInfo.composer.extra.component.entry}`), + createIfMissing: false, + updater: new php_client_version_1.PHPClientVersion({ + version, + }), + }); + } + } + // TODO use pullrequest header here? + const pullRequestBody = new pull_request_body_1.PullRequestBody([ + { + component, + version: newVersion, + notes: releaseNotesBody, + }, + ]); + return { + title: pullRequestTitle, + body: pullRequestBody, + updates, + labels: [...labels, ...this.extraLabels], + headRefName: branchName.toString(), + version: newVersion, + draft: draft !== null && draft !== void 0 ? draft : false, + }; + } + async parsePullRequestBody(pullRequestBody) { + const body = pull_request_body_1.PullRequestBody.parse(pullRequestBody, this.logger); + if (!body) { + return undefined; + } + const component = await this.getComponent(); + const notes = body.releaseData + .map(release => { + var _a; + return `
${release.component}: ${(_a = release.version) === null || _a === void 0 ? void 0 : _a.toString()}\n\n${release.notes}\n
`; + }) + .join('\n\n'); + return new pull_request_body_1.PullRequestBody([{ component, notes }], { + footer: body.footer, + header: body.header, + }); + } + async buildUpdates(options) { + const updates = []; + const version = options.newVersion; + const versionsMap = options.versionsMap; + updates.push({ + path: this.addPath(this.changelogPath), + createIfMissing: true, + updater: new changelog_1.Changelog({ + version, + changelogEntry: options.changelogEntry, + }), + }); + // update VERSION file + updates.push({ + path: this.addPath('VERSION'), + createIfMissing: false, + updater: new default_1.DefaultUpdater({ + version, + }), + }); + // update the aggregate package information in the root composer.json + updates.push({ + path: this.addPath('composer.json'), + createIfMissing: false, + updater: new root_composer_update_packages_1.RootComposerUpdatePackages({ + version, + versionsMap, + }), + }); + return updates; + } +} +exports.PHPYoshi = PHPYoshi; +function parseVersionOverrides(body) { + // look for 'BEGIN_VERSION_OVERRIDE' section of pull request body + const versionOverrides = {}; + if (body) { + const overrideMessage = (body.split('BEGIN_VERSION_OVERRIDE')[1] || '') + .split('END_VERSION_OVERRIDE')[0] + .trim(); + if (overrideMessage) { + overrideMessage.split('\n').forEach(line => { + const [directory, version] = line.split(':'); + versionOverrides[directory.trim()] = version.trim(); + }); + } + } + return versionOverrides; +} +function updatePHPChangelogEntry(pkgKey, changelogEntry, entryUpdate) { + // Remove the first line of the entry, in favor of . + // This also allows us to use the same regex for extracting release + // notes (since the string "## v0.0.0" doesn't show up multiple times). + const entryUpdateSplit = entryUpdate.split(/\r?\n/); + entryUpdateSplit.shift(); + entryUpdate = entryUpdateSplit.join('\n'); + return `${changelogEntry} + +
${pkgKey} + +${entryUpdate} + +
`; +} +//# sourceMappingURL=php-yoshi.js.map + +/***/ }), + +/***/ 48526: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PHP = void 0; +// Generic +const changelog_1 = __nccwpck_require__(11128); +// PHP Specific. +const root_composer_update_packages_1 = __nccwpck_require__(44955); +const base_1 = __nccwpck_require__(25606); +const default_1 = __nccwpck_require__(41143); +const CHANGELOG_SECTIONS = [ + { type: 'feat', section: 'Features' }, + { type: 'fix', section: 'Bug Fixes' }, + { type: 'perf', section: 'Performance Improvements' }, + { type: 'revert', section: 'Reverts' }, + { type: 'chore', section: 'Miscellaneous Chores' }, + { type: 'docs', section: 'Documentation', hidden: true }, + { type: 'style', section: 'Styles', hidden: true }, + { type: 'refactor', section: 'Code Refactoring', hidden: true }, + { type: 'test', section: 'Tests', hidden: true }, + { type: 'build', section: 'Build System', hidden: true }, + { type: 'ci', section: 'Continuous Integration', hidden: true }, +]; +class PHP extends base_1.BaseStrategy { + constructor(options) { + var _a; + options.changelogSections = (_a = options.changelogSections) !== null && _a !== void 0 ? _a : CHANGELOG_SECTIONS; + super(options); + } + async buildUpdates(options) { + const updates = []; + const version = options.newVersion; + const versionsMap = new Map(); + updates.push({ + path: this.addPath(this.changelogPath), + createIfMissing: true, + updater: new changelog_1.Changelog({ + version, + changelogEntry: options.changelogEntry, + }), + }); + // update composer.json + updates.push({ + path: this.addPath('composer.json'), + createIfMissing: false, + updater: new root_composer_update_packages_1.RootComposerUpdatePackages({ + version, + versionsMap, + }), + }); + // update VERSION file + updates.push({ + path: this.addPath('VERSION'), + createIfMissing: false, + updater: new default_1.DefaultUpdater({ + version, + }), + }); + return updates; + } +} +exports.PHP = PHP; +//# sourceMappingURL=php.js.map + +/***/ }), + +/***/ 47434: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Python = void 0; +const base_1 = __nccwpck_require__(25606); +const changelog_1 = __nccwpck_require__(11128); +const changelog_json_1 = __nccwpck_require__(34540); +const version_1 = __nccwpck_require__(25112); +const setup_cfg_1 = __nccwpck_require__(32650); +const setup_py_1 = __nccwpck_require__(14874); +const pyproject_toml_1 = __nccwpck_require__(30952); +const python_file_with_version_1 = __nccwpck_require__(4064); +const errors_1 = __nccwpck_require__(10818); +const filter_commits_1 = __nccwpck_require__(49526); +const CHANGELOG_SECTIONS = [ + { type: 'feat', section: 'Features' }, + { type: 'fix', section: 'Bug Fixes' }, + { type: 'perf', section: 'Performance Improvements' }, + { type: 'deps', section: 'Dependencies' }, + { type: 'revert', section: 'Reverts' }, + { type: 'docs', section: 'Documentation' }, + { type: 'style', section: 'Styles', hidden: true }, + { type: 'chore', section: 'Miscellaneous Chores', hidden: true }, + { type: 'refactor', section: 'Code Refactoring', hidden: true }, + { type: 'test', section: 'Tests', hidden: true }, + { type: 'build', section: 'Build System', hidden: true }, + { type: 'ci', section: 'Continuous Integration', hidden: true }, +]; +class Python extends base_1.BaseStrategy { + constructor(options) { + var _a; + options.changelogSections = (_a = options.changelogSections) !== null && _a !== void 0 ? _a : CHANGELOG_SECTIONS; + super(options); + } + async buildUpdates(options) { + var _a; + const updates = []; + const version = options.newVersion; + updates.push({ + path: this.addPath(this.changelogPath), + createIfMissing: true, + updater: new changelog_1.Changelog({ + version, + changelogEntry: options.changelogEntry, + }), + }); + updates.push({ + path: this.addPath('setup.cfg'), + createIfMissing: false, + updater: new setup_cfg_1.SetupCfg({ + version, + }), + }); + updates.push({ + path: this.addPath('setup.py'), + createIfMissing: false, + updater: new setup_py_1.SetupPy({ + version, + }), + }); + const parsedPyProject = await this.getPyProject(this.addPath('pyproject.toml')); + const pyProject = (parsedPyProject === null || parsedPyProject === void 0 ? void 0 : parsedPyProject.project) || ((_a = parsedPyProject === null || parsedPyProject === void 0 ? void 0 : parsedPyProject.tool) === null || _a === void 0 ? void 0 : _a.poetry); + let projectName = this.component; + if (pyProject) { + updates.push({ + path: this.addPath('pyproject.toml'), + createIfMissing: false, + updater: new pyproject_toml_1.PyProjectToml({ + version, + }), + }); + projectName = pyProject.name; + } + else { + this.logger.warn(parsedPyProject + ? 'invalid pyproject.toml' + : `file ${this.addPath('pyproject.toml')} did not exist`); + } + if (!projectName) { + this.logger.warn('No project/component found.'); + } + else { + [projectName, projectName.replace(/-/g, '_')] + .flatMap(packageName => [ + `${packageName}/__init__.py`, + `src/${packageName}/__init__.py`, + ]) + .forEach(packagePath => updates.push({ + path: this.addPath(packagePath), + createIfMissing: false, + updater: new python_file_with_version_1.PythonFileWithVersion({ version }), + })); + } + // There should be only one version.py, but foreach in case that is incorrect + const versionPyFilesSearch = this.github.findFilesByFilenameAndRef('version.py', this.targetBranch, this.path); + const versionPyFiles = await versionPyFilesSearch; + versionPyFiles.forEach(path => { + updates.push({ + path: this.addPath(path), + createIfMissing: false, + updater: new python_file_with_version_1.PythonFileWithVersion({ + version, + }), + }); + }); + // If a machine readable changelog.json exists update it: + const artifactName = projectName !== null && projectName !== void 0 ? projectName : (await this.getNameFromSetupPy()); + if (options.commits && artifactName) { + const commits = (0, filter_commits_1.filterCommits)(options.commits, this.changelogSections); + updates.push({ + path: 'changelog.json', + createIfMissing: false, + updater: new changelog_json_1.ChangelogJson({ + artifactName, + version, + commits, + language: 'PYTHON', + }), + }); + } + return updates; + } + async getPyProject(path) { + try { + const content = await this.github.getFileContentsOnBranch(path, this.targetBranch); + return (0, pyproject_toml_1.parsePyProject)(content.parsedContent); + } + catch (e) { + return null; + } + } + async getNameFromSetupPy() { + var _a; + const ARTIFACT_NAME_REGEX = /name *= *['"](?.*)['"](\r|\n|$)/; + const setupPyContents = await this.getSetupPyContents(); + if (setupPyContents) { + const match = setupPyContents.match(ARTIFACT_NAME_REGEX); + if (match && ((_a = match === null || match === void 0 ? void 0 : match.groups) === null || _a === void 0 ? void 0 : _a.name)) { + return match.groups.name; + } + } + return null; + } + async getSetupPyContents() { + try { + return (await this.github.getFileContentsOnBranch(this.addPath('setup.py'), this.targetBranch)).parsedContent; + } + catch (e) { + if (e instanceof errors_1.FileNotFoundError) { + return null; + } + else { + throw e; + } + } + } + initialReleaseVersion() { + return version_1.Version.parse('0.1.0'); + } +} +exports.Python = Python; +//# sourceMappingURL=python.js.map + +/***/ }), + +/***/ 74720: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.RubyYoshi = void 0; +const indent_commit_1 = __nccwpck_require__(81133); +// Generic +const changelog_1 = __nccwpck_require__(11128); +// RubyYoshi +const version_rb_1 = __nccwpck_require__(43499); +const base_1 = __nccwpck_require__(25606); +const fs_1 = __nccwpck_require__(57147); +const path_1 = __nccwpck_require__(71017); +const CHANGELOG_SECTIONS = [ + { type: 'feat', section: 'Features' }, + { type: 'fix', section: 'Bug Fixes' }, + { type: 'perf', section: 'Performance Improvements' }, + { type: 'revert', section: 'Reverts' }, + { type: 'docs', section: 'Documentation' }, + { type: 'style', section: 'Styles', hidden: true }, + { type: 'chore', section: 'Miscellaneous Chores', hidden: true }, + { type: 'refactor', section: 'Code Refactoring', hidden: true }, + { type: 'test', section: 'Tests', hidden: true }, + { type: 'build', section: 'Build System', hidden: true }, + { type: 'ci', section: 'Continuous Integration', hidden: true }, +]; +class RubyYoshi extends base_1.BaseStrategy { + constructor(options) { + var _a; + super({ + ...options, + changelogSections: CHANGELOG_SECTIONS, + commitPartial: (0, fs_1.readFileSync)((0, path_1.resolve)(__dirname, '../../../templates/commit.hbs'), 'utf8'), + headerPartial: (0, fs_1.readFileSync)((0, path_1.resolve)(__dirname, '../../../templates/header.hbs'), 'utf8'), + mainTemplate: (0, fs_1.readFileSync)((0, path_1.resolve)(__dirname, '../../../templates/template.hbs'), 'utf8'), + tagSeparator: '/', + }); + this.versionFile = (_a = options.versionFile) !== null && _a !== void 0 ? _a : ''; + } + async buildUpdates(options) { + const updates = []; + const version = options.newVersion; + updates.push({ + path: this.addPath(this.changelogPath), + createIfMissing: true, + updater: new changelog_1.Changelog({ + version, + changelogEntry: options.changelogEntry, + }), + }); + const versionFile = this.versionFile + ? this.versionFile + : `lib/${(this.component || '').replace(/-/g, '/')}/version.rb`; + updates.push({ + path: this.addPath(versionFile), + createIfMissing: false, + updater: new version_rb_1.VersionRB({ + version, + }), + }); + return updates; + } + async postProcessCommits(commits) { + commits.forEach(commit => { + commit.message = (0, indent_commit_1.indentCommit)(commit); + }); + return commits; + } + async buildReleaseNotes(conventionalCommits, newVersion, newVersionTag, latestRelease, commits) { + const releaseNotes = await super.buildReleaseNotes(conventionalCommits, newVersion, newVersionTag, latestRelease, commits); + return (releaseNotes + // Remove links in version title line and standardize on h3 + .replace(/^###? \[([\d.]+)\]\([^)]*\)/gm, '### $1') + // Remove bolded scope from change lines + .replace(/^\* \*\*[\w-]+:\*\* /gm, '* ') + // Remove PR and commit links from pull request title suffixes + .replace(/(\(\[(\w+)\]\(https:\/\/github\.com\/[^)]*\)\))+\s*$/gm, '') + // Standardize on h4 for change type subheaders + .replace(/^### (Features|Bug Fixes|Documentation)$/gm, '#### $1') + // Collapse 2 or more blank lines + .replace(/\n{3,}/g, '\n\n')); + } +} +exports.RubyYoshi = RubyYoshi; +//# sourceMappingURL=ruby-yoshi.js.map + +/***/ }), + +/***/ 8534: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Ruby = void 0; +const indent_commit_1 = __nccwpck_require__(81133); +// Generic +const changelog_1 = __nccwpck_require__(11128); +// Ruby +const version_rb_1 = __nccwpck_require__(43499); +const gemfile_lock_1 = __nccwpck_require__(21147); +const base_1 = __nccwpck_require__(25606); +class Ruby extends base_1.BaseStrategy { + constructor(options) { + var _a; + super(options); + this.versionFile = (_a = options.versionFile) !== null && _a !== void 0 ? _a : ''; + this.tagSeparator = '/'; + } + async buildUpdates(options) { + const updates = []; + const version = options.newVersion; + updates.push({ + path: this.addPath(this.changelogPath), + createIfMissing: true, + updater: new changelog_1.Changelog({ + version, + changelogEntry: options.changelogEntry, + }), + }); + const versionFile = this.versionFile + ? this.versionFile + : `lib/${(this.component || '').replace(/-/g, '/')}/version.rb`; + updates.push({ + path: this.addPath(versionFile), + createIfMissing: false, + updater: new version_rb_1.VersionRB({ + version, + }), + }); + updates.push({ + path: this.addPath('Gemfile.lock'), + createIfMissing: false, + updater: new gemfile_lock_1.GemfileLock({ + version, + gemName: this.component || '', + }), + }); + return updates; + } + async postProcessCommits(commits) { + commits.forEach(commit => { + commit.message = (0, indent_commit_1.indentCommit)(commit); + }); + return commits; + } +} +exports.Ruby = Ruby; +//# sourceMappingURL=ruby.js.map + +/***/ }), + +/***/ 5861: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Rust = void 0; +// Generic +const changelog_1 = __nccwpck_require__(11128); +// Cargo.toml support +const cargo_toml_1 = __nccwpck_require__(39089); +const cargo_lock_1 = __nccwpck_require__(84670); +const common_1 = __nccwpck_require__(66815); +const base_1 = __nccwpck_require__(25606); +const version_1 = __nccwpck_require__(25112); +class Rust extends base_1.BaseStrategy { + async buildUpdates(options) { + var _a, _b, _c; + const updates = []; + const version = options.newVersion; + updates.push({ + path: this.addPath(this.changelogPath), + createIfMissing: true, + updater: new changelog_1.Changelog({ + version, + changelogEntry: options.changelogEntry, + }), + }); + const workspaceManifest = await this.getPackageManifest(); + const versionsMap = new Map(); + if ((_a = workspaceManifest === null || workspaceManifest === void 0 ? void 0 : workspaceManifest.workspace) === null || _a === void 0 ? void 0 : _a.members) { + const members = workspaceManifest.workspace.members; + if ((_b = workspaceManifest.package) === null || _b === void 0 ? void 0 : _b.name) { + versionsMap.set(workspaceManifest.package.name, version); + } + else { + this.logger.warn('No workspace manifest package name found'); + } + this.logger.info(`found workspace with ${members.length} members, upgrading all`); + // Collect submodule names to update + const manifestsByPath = new Map(); + for (const member of members) { + const manifestPath = `${member}/Cargo.toml`; + const manifestContent = await this.getContent(manifestPath); + if (!manifestContent) { + this.logger.warn(`member ${member} declared but did not find Cargo.toml`); + continue; + } + const manifest = (0, common_1.parseCargoManifest)(manifestContent.parsedContent); + manifestsByPath.set(manifestPath, manifestContent); + if (!((_c = manifest.package) === null || _c === void 0 ? void 0 : _c.name)) { + this.logger.warn(`member ${member} has no package name`); + continue; + } + versionsMap.set(manifest.package.name, version); + } + this.logger.info(`updating ${manifestsByPath.size} submodules`); + this.logger.debug('versions map:', versionsMap); + for (const [manifestPath, manifestContent] of manifestsByPath) { + updates.push({ + path: this.addPath(manifestPath), + createIfMissing: false, + cachedFileContents: manifestContent, + updater: new cargo_toml_1.CargoToml({ + version, + versionsMap, + }), + }); + } + // Update root Cargo.toml + updates.push({ + path: this.addPath('Cargo.toml'), + createIfMissing: false, + updater: new cargo_toml_1.CargoToml({ + version, + versionsMap, + }), + }); + } + else { + this.logger.info('single crate found, updating Cargo.toml'); + const packageName = await this.getDefaultPackageName(); + if (packageName) { + versionsMap.set(packageName, version); + } + else { + this.logger.warn('No crate package name found'); + } + updates.push({ + path: this.addPath('Cargo.toml'), + createIfMissing: false, + updater: new cargo_toml_1.CargoToml({ + version, + versionsMap, + }), + }); + } + updates.push({ + path: this.addPath('Cargo.lock'), + createIfMissing: false, + updater: new cargo_lock_1.CargoLock(versionsMap), + }); + return updates; + } + initialReleaseVersion() { + return version_1.Version.parse('0.1.0'); + } + async getDefaultPackageName() { + var _a; + const packageManifest = await this.getPackageManifest(); + if (packageManifest) { + return (_a = packageManifest.package) === null || _a === void 0 ? void 0 : _a.name; + } + return undefined; + } + /** + * @returns the package's manifest, ie. `crates/foobar/Cargo.toml` + */ + async getPackageManifest() { + if (this.packageManifest === undefined) { + this.packageManifest = await this.getManifest('Cargo.toml'); + } + return this.packageManifest; + } + async getContent(path) { + try { + return await this.github.getFileContentsOnBranch(this.addPath(path), this.targetBranch); + } + catch (e) { + return null; + } + } + async getManifest(path) { + const content = await this.getContent(path); + return content ? (0, common_1.parseCargoManifest)(content.parsedContent) : null; + } +} +exports.Rust = Rust; +//# sourceMappingURL=rust.js.map + +/***/ }), + +/***/ 49658: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Sfdx = void 0; +const base_1 = __nccwpck_require__(25606); +const changelog_1 = __nccwpck_require__(11128); +const errors_1 = __nccwpck_require__(10818); +const sfdx_project_json_1 = __nccwpck_require__(15475); +const sfdxProjectJsonFileName = 'sfdx-project.json'; +class Sfdx extends base_1.BaseStrategy { + async buildUpdates(options) { + const updates = []; + const version = options.newVersion; + updates.push({ + path: this.addPath(this.changelogPath), + createIfMissing: true, + updater: new changelog_1.Changelog({ + version, + changelogEntry: options.changelogEntry, + }), + }); + updates.push({ + path: this.addPath(sfdxProjectJsonFileName), + createIfMissing: false, + cachedFileContents: this.sfdxProjectJsonContents, + updater: new sfdx_project_json_1.SfdxProjectJson({ + version, + }), + }); + return updates; + } + async getDefaultPackageName() { + const pkgJsonContents = await this.getSfdxProjectJsonContents(); + const pkg = JSON.parse(pkgJsonContents.parsedContent); + return pkg.name; + } + async getSfdxProjectJsonContents() { + if (!this.sfdxProjectJsonContents) { + try { + this.sfdxProjectJsonContents = + await this.github.getFileContentsOnBranch(this.addPath(sfdxProjectJsonFileName), this.targetBranch); + } + catch (e) { + if (e instanceof errors_1.FileNotFoundError) { + throw new errors_1.MissingRequiredFileError(this.addPath(sfdxProjectJsonFileName), 'sfdx', `${this.repository.owner}/${this.repository.repo}`); + } + throw e; + } + } + return this.sfdxProjectJsonContents; + } +} +exports.Sfdx = Sfdx; +//# sourceMappingURL=sfdx.js.map + +/***/ }), + +/***/ 13671: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Simple = void 0; +// Generic +const changelog_1 = __nccwpck_require__(11128); +// version.txt support +const base_1 = __nccwpck_require__(25606); +const default_1 = __nccwpck_require__(41143); +class Simple extends base_1.BaseStrategy { + constructor(options) { + var _a; + super(options); + this.versionFile = (_a = options.versionFile) !== null && _a !== void 0 ? _a : 'version.txt'; + } + async buildUpdates(options) { + const updates = []; + const version = options.newVersion; + updates.push({ + path: this.addPath(this.changelogPath), + createIfMissing: true, + updater: new changelog_1.Changelog({ + version, + changelogEntry: options.changelogEntry, + }), + }); + updates.push({ + path: this.addPath(this.versionFile), + createIfMissing: false, + updater: new default_1.DefaultUpdater({ + version, + }), + }); + return updates; + } +} +exports.Simple = Simple; +//# sourceMappingURL=simple.js.map + +/***/ }), + +/***/ 95720: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TerraformModule = void 0; +// Generic +const changelog_1 = __nccwpck_require__(11128); +// Terraform specific. +const readme_1 = __nccwpck_require__(57277); +const module_version_1 = __nccwpck_require__(61495); +const metadata_version_1 = __nccwpck_require__(68225); +const base_1 = __nccwpck_require__(25606); +const version_1 = __nccwpck_require__(25112); +class TerraformModule extends base_1.BaseStrategy { + async buildUpdates(options) { + const updates = []; + const version = options.newVersion; + updates.push({ + path: this.addPath(this.changelogPath), + createIfMissing: true, + updater: new changelog_1.Changelog({ + version, + changelogEntry: options.changelogEntry, + }), + }); + // Update version in README to current candidate version. + // A module may have submodules, so find all submodules. + const readmeFiles = await Promise.all([ + this.github.findFilesByFilenameAndRef('readme.md', this.targetBranch, this.path), + this.github.findFilesByFilenameAndRef('README.md', this.targetBranch, this.path), + ]).then(([v, vt]) => { + return v.concat(vt); + }); + readmeFiles.forEach(path => { + updates.push({ + path: this.addPath(path), + createIfMissing: false, + updater: new readme_1.ReadMe({ + version, + }), + }); + }); + // Update versions.tf to current candidate version. + // A module may have submodules, so find all versions.tfand versions.tf.tmpl to update. + const versionFiles = await Promise.all([ + this.github.findFilesByFilenameAndRef('versions.tf', this.targetBranch, this.path), + this.github.findFilesByFilenameAndRef('versions.tf.tmpl', this.targetBranch, this.path), + ]).then(([v, vt]) => { + return v.concat(vt); + }); + versionFiles.forEach(path => { + updates.push({ + path: this.addPath(path), + createIfMissing: false, + updater: new module_version_1.ModuleVersion({ + version, + }), + }); + }); + // Update metadata.yaml to current candidate version. + const metadataFiles = await this.github.findFilesByFilenameAndRef('metadata.yaml', this.targetBranch, this.path); + metadataFiles.forEach(path => { + updates.push({ + path: this.addPath(path), + createIfMissing: false, + updater: new metadata_version_1.MetadataVersion({ + version, + }), + }); + }); + return updates; + } + initialReleaseVersion() { + return version_1.Version.parse('0.1.0'); + } +} +exports.TerraformModule = TerraformModule; +//# sourceMappingURL=terraform-module.js.map + +/***/ }), + +/***/ 77490: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BaseXml = void 0; +const dom = __nccwpck_require__(49213); +/** + * Base class for all updaters working with XML files. + */ +class BaseXml { + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content) { + const document = new dom.DOMParser().parseFromString(content); + const updated = this.updateDocument(document); + if (updated) { + const newContent = new dom.XMLSerializer().serializeToString(document); + if (content.endsWith('\n') && !newContent.endsWith('\n')) { + return `${newContent}\n`; + } + return newContent; + } + else { + return content; + } + } +} +exports.BaseXml = BaseXml; +//# sourceMappingURL=base-xml.js.map + +/***/ }), + +/***/ 93440: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ModuleBazel = void 0; +const default_1 = __nccwpck_require__(41143); +/** + * Updates a Bazel Module file. + */ +class ModuleBazel extends default_1.DefaultUpdater { + updateContent(content) { + const match = content.match(/module[\s\S]*?\([\s\S]*?version\s*=\s*(['"])(.*?)\1/m); + if (!match) { + return content; + } + const [fullMatch, , version] = match; + const module = fullMatch.replace(version, this.version.toString()); + return content.replace(fullMatch, module); + } +} +exports.ModuleBazel = ModuleBazel; +//# sourceMappingURL=module-bazel.js.map + +/***/ }), + +/***/ 34540: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ChangelogJson = void 0; +const logger_1 = __nccwpck_require__(18792); +const default_1 = __nccwpck_require__(41143); +const crypto_1 = __nccwpck_require__(6113); +const BREAKING_CHANGE_TITLE = 'BREAKING CHANGE'; +const COMMIT_PREFIX = /^[^:]+: ?/; +const PR_SUFFIX_REGEX = / ?\(#(?[0-9]+)\)$/; +/** + * Maintians a machine readable CHANGELOG in chnagelog.json. + * See: https://gist.github.com/bcoe/50ef0a0024bbf107cd5bc0adbdc04758 + */ +class ChangelogJson extends default_1.DefaultUpdater { + /** + * Instantiate a new SamplesPackageJson updater + * @param options + */ + constructor(options) { + super(options); + this.language = options.language; + this.artifactName = options.artifactName; + this.commits = options.commits; + } + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + var _a; + const parsed = JSON.parse(content); + logger.info(`adding release ${this.version} for ${this.artifactName}`); + const changes = []; + for (const commit of this.commits) { + const issues = new Set(); + // The commit.message field contains the type/scope prefix. + let message = commit.message.replace(COMMIT_PREFIX, ''); + // When squashing commits, GitHub adds a suffix refrencing + // the # of the PR, e.g., chore(main): release 15.5.1 (#1838) + // this logic removes this suffix and prepends it to the + // issues array. + const match = message.match(PR_SUFFIX_REGEX); + if (match && ((_a = match.groups) === null || _a === void 0 ? void 0 : _a.pr)) { + message = message.replace(match[0], ''); + issues.add(match.groups.pr); + } + // Array.from(someSet) will maintain elements in insertion + // order, given this we add references after the pr suffix. + for (const ref of commit.references) { + issues.add(ref.issue); + } + const change = { + type: commit.type, + sha: commit.sha, + message: message, + issues: Array.from(issues), + }; + if (commit.scope) + change.scope = commit.scope; + for (const note of commit.notes) { + if (note.title === BREAKING_CHANGE_TITLE) { + change.breakingChangeNote = note.text; + } + } + changes.push(change); + } + // If all commits were ignored, simply return the original changelog.json. + if (changes.length === 0) { + return content; + } + const time = new Date().toISOString(); + const release = { + changes, + version: this.version.toString(), + language: this.language, + artifactName: this.artifactName, + id: (0, crypto_1.randomUUID)(), + createTime: time, + }; + parsed.entries.unshift(release); + parsed.updateTime = time; + return JSON.stringify(parsed, null, 2); + } +} +exports.ChangelogJson = ChangelogJson; +//# sourceMappingURL=changelog-json.js.map + +/***/ }), + +/***/ 11128: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Changelog = void 0; +const default_1 = __nccwpck_require__(41143); +const DEFAULT_VERSION_HEADER_REGEX = '\n###? v?[0-9[]'; +class Changelog extends default_1.DefaultUpdater { + constructor(options) { + var _a; + super(options); + this.changelogEntry = options.changelogEntry; + this.versionHeaderRegex = new RegExp((_a = options.versionHeaderRegex) !== null && _a !== void 0 ? _a : DEFAULT_VERSION_HEADER_REGEX, 's'); + } + updateContent(content) { + content = content || ''; + // Handle both H2 (features/BREAKING CHANGES) and H3 (fixes). + const lastEntryIndex = content.search(this.versionHeaderRegex); + if (lastEntryIndex === -1) { + if (content) { + return `${this.header()}\n${this.changelogEntry}\n\n${adjustHeaders(content).trim()}\n`; + } + else { + return `${this.header()}\n${this.changelogEntry}\n`; + } + } + else { + const before = content.slice(0, lastEntryIndex); + const after = content.slice(lastEntryIndex); + return `${before}\n${this.changelogEntry}\n${after}`.trim() + '\n'; + } + } + header() { + return `\ +# Changelog +`; + } +} +exports.Changelog = Changelog; +// Helper to increase markdown H1 headers to H2 +function adjustHeaders(content) { + return content.replace(/^#(\s)/gm, '##$1'); +} +//# sourceMappingURL=changelog.js.map + +/***/ }), + +/***/ 93373: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.mergeUpdates = exports.CompositeUpdater = void 0; +/** + * The CompositeUpdater chains 0...n updaters and updates + * the content in order. + */ +class CompositeUpdater { + /** + * Instantiate a new CompositeUpdater + * @param {Updater[]} updaters The updaters to chain together + */ + constructor(...updaters) { + this.updaters = updaters; + } + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content) { + for (const updater of this.updaters) { + content = updater.updateContent(content); + } + return content || ''; + } +} +exports.CompositeUpdater = CompositeUpdater; +function mergeUpdates(updates) { + const updatesByPath = {}; + for (const update of updates) { + if (updatesByPath[update.path]) { + updatesByPath[update.path].push(update); + } + else { + updatesByPath[update.path] = [update]; + } + } + const newUpdates = []; + for (const path in updatesByPath) { + const update = updatesByPath[path]; + const updaters = update.map(u => u.updater); + newUpdates.push({ + path, + createIfMissing: update[0].createIfMissing, + updater: updaters.length === 1 ? updaters[0] : new CompositeUpdater(...updaters), + }); + } + return newUpdates; +} +exports.mergeUpdates = mergeUpdates; +//# sourceMappingURL=composite.js.map + +/***/ }), + +/***/ 12492: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PubspecYaml = void 0; +const logger_1 = __nccwpck_require__(18792); +const default_1 = __nccwpck_require__(41143); +/** + * Updates a Dart pubspec.yaml file. + */ +class PubspecYaml extends default_1.DefaultUpdater { + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + const oldVersion = content.match(/^version: ([0-9.]+)\+?(.*$)/m); + let buildNumber = ''; + if (oldVersion) { + buildNumber = oldVersion[2]; + const parsedBuild = parseInt(buildNumber); + if (!isNaN(parsedBuild)) { + buildNumber = `+${parsedBuild + 1}`; + logger.info(`updating from ${oldVersion[1]}+${oldVersion[2]} to ${this.version}${buildNumber}`); + } + else if (buildNumber.length > 0) { + buildNumber = `+${buildNumber}`; + logger.info(`updating from ${oldVersion[1]}+${oldVersion[2]} to ${this.version}${buildNumber}`); + } + else { + logger.info(`updating from ${oldVersion[1]} to ${this.version}`); + } + } + return content.replace(/^version: .*$/m, `version: ${this.version}${buildNumber}`); + } +} +exports.PubspecYaml = PubspecYaml; +//# sourceMappingURL=pubspec-yaml.js.map + +/***/ }), + +/***/ 41143: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DefaultUpdater = void 0; +/** + * This updater writes a plain file with the version string as the + * only content. + */ +class DefaultUpdater { + constructor(options) { + this.version = options.version; + this.versionsMap = options.versionsMap; + } + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(_content) { + return this.version + '\n'; + } +} +exports.DefaultUpdater = DefaultUpdater; +//# sourceMappingURL=default.js.map + +/***/ }), + +/***/ 73292: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Apis = void 0; +const logger_1 = __nccwpck_require__(18792); +const json_stringify_1 = __nccwpck_require__(61480); +/** + * Updates the apis.json format. See + * https://github.com/googleapis/google-cloud-dotnet/blob/main/apis/README.md. + */ +class Apis { + constructor(component, version) { + this.component = component; + this.version = version; + } + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + const data = JSON.parse(content); + const api = data.apis.find(api => api.id === this.component); + if (!api) { + logger.warn(`Failed to find component: ${this.component} in apis.json`); + return content; + } + api.version = this.version.toString(); + return (0, json_stringify_1.jsonStringify)(data, content); + } +} +exports.Apis = Apis; +//# sourceMappingURL=apis.js.map + +/***/ }), + +/***/ 99827: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ElixirMixExs = void 0; +const logger_1 = __nccwpck_require__(18792); +const default_1 = __nccwpck_require__(41143); +/** + * Updates an Elixir mix.exs file and looks for a version string. + */ +class ElixirMixExs extends default_1.DefaultUpdater { + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + const oldModuleAttributeVersion = content.match(/@version "([A-Za-z0-9_\-+.~]+)"/); + if (oldModuleAttributeVersion) { + logger.info(`updating module attribute version from ${oldModuleAttributeVersion[1]} to ${this.version}`); + return content.replace(/@version "[A-Za-z0-9_\-+.~]+"/, `@version "${this.version}"`); + } + const oldInlineVersion = content.match(/version: "([A-Za-z0-9_\-+.~]+)"/); + if (oldInlineVersion) { + logger.info(`updating inline version from ${oldInlineVersion[1]} to ${this.version}`); + } + return content.replace(/version: "[A-Za-z0-9_\-+.~]+",/, `version: "${this.version}",`); + } +} +exports.ElixirMixExs = ElixirMixExs; +//# sourceMappingURL=elixir-mix-exs.js.map + +/***/ }), + +/***/ 897: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AppJson = void 0; +const json_stringify_1 = __nccwpck_require__(61480); +const logger_1 = __nccwpck_require__(18792); +const default_1 = __nccwpck_require__(41143); +/** + * This updates a React Natve Expo project app.json file's main, ios and android + * versions. All values except the `android.versionCode` are standard semver + * version numbers. For the `android.versionCode`, the semver number is used as + * the basis for the `versionCode`. + */ +class AppJson extends default_1.DefaultUpdater { + constructor(options) { + super(options); + this.expoSDKVersion = options.expoSDKVersion; + } + /** + * Given initial file contents, return updated contents. + */ + updateContent(content, logger = logger_1.logger) { + var _a, _b; + const parsed = JSON.parse(content); + logger.info(`updating Expo version from ${parsed.expo.version} to ${this.version}`); + parsed.expo.version = this.version.toString(); + if ((_a = parsed.expo.ios) === null || _a === void 0 ? void 0 : _a.buildNumber) { + logger.info(`updating iOS version from ${parsed.expo.ios.buildNumber} to ${this.version}`); + parsed.expo.ios.buildNumber = this.version.toString(); + } + if ((_b = parsed.expo.android) === null || _b === void 0 ? void 0 : _b.versionCode) { + // Android versionCode + // https://developer.android.com/studio/publish/versioning#appversioning + let expoMajorVersion = 0; + try { + expoMajorVersion = this.expoSDKVersion.major; + } + catch (e) { + // Rethrow with a nice error message. + throw new Error('Unable to determine the Expo SDK version for this project. Make sure that the expo package is installed for your project.'); + } + // Implements the `versionCode` strategy described by Maxi Rosson + // @see https://medium.com/@maxirosson/versioning-android-apps-d6ec171cfd82 + const versionCode = expoMajorVersion * 10000000 + + this.version.major * 10000 + + this.version.minor * 100 + + this.version.patch; + logger.info(`updating Android version from ${parsed.expo.android.versionCode} to ${versionCode}`); + parsed.expo.android.versionCode = versionCode; + } + return (0, json_stringify_1.jsonStringify)(parsed, content); + } +} +exports.AppJson = AppJson; +//# sourceMappingURL=app-json.js.map + +/***/ }), + +/***/ 33700: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GenericJson = void 0; +const jsonpath_plus_1 = __nccwpck_require__(2452); +const json_stringify_1 = __nccwpck_require__(61480); +const logger_1 = __nccwpck_require__(18792); +const VERSION_REGEX = /(?\d+)\.(?\d+)\.(?\d+)(-(?[\w.]+))?(\+(?[-\w.]+))?/; +class GenericJson { + constructor(jsonpath, version) { + this.jsonpath = jsonpath; + this.version = version; + } + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + const data = JSON.parse(content); + (0, jsonpath_plus_1.JSONPath)({ + resultType: 'all', + path: this.jsonpath, + json: data, + callback: (payload, _payloadType, _fullPayload) => { + if (typeof payload.value !== 'string') { + logger.warn(`No string in ${this.jsonpath}. Skipping.`); + return payload; + } + if (!payload.value.match(VERSION_REGEX)) { + logger.warn(`No version found in ${this.jsonpath}. Skipping.`); + return payload; + } + payload.parent[payload.parentProperty] = payload.parent[payload.parentProperty].replace(VERSION_REGEX, this.version.toString()); + return payload; + }, + }); + return (0, json_stringify_1.jsonStringify)(data, content); + } +} +exports.GenericJson = GenericJson; +//# sourceMappingURL=generic-json.js.map + +/***/ }), + +/***/ 75074: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GenericToml = void 0; +const jsonpath_plus_1 = __nccwpck_require__(2452); +const toml_edit_1 = __nccwpck_require__(42237); +const logger_1 = __nccwpck_require__(18792); +/** + * Updates TOML document according to given JSONPath. + * + * Note that used parser does reformat the document and removes all comments, + * and converts everything to pure TOML. + * If you want to retain formatting, use generic updater with comment hints. + */ +class GenericToml { + constructor(jsonpath, version) { + this.jsonpath = jsonpath; + this.version = version; + } + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + let data; + try { + data = (0, toml_edit_1.parseWith)(content); + } + catch (e) { + logger.warn('Invalid toml, cannot be parsed', e); + return content; + } + const pointers = (0, jsonpath_plus_1.JSONPath)({ + path: this.jsonpath, + json: data, + resultType: 'pointer', + }); + const paths = pointers.map(pointer => pointer.split('/').filter(Boolean)); + if (!paths || paths.length === 0) { + logger.warn(`No entries modified in ${this.jsonpath}`); + return content; + } + let processed = content; + paths.forEach(path => { + if (path[0] === '$') + path = path.slice(1); + processed = (0, toml_edit_1.replaceTomlValue)(processed, path, this.version.toString()); + }); + return processed; + } +} +exports.GenericToml = GenericToml; +//# sourceMappingURL=generic-toml.js.map + +/***/ }), + +/***/ 71636: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GenericXml = void 0; +const base_xml_1 = __nccwpck_require__(77490); +const xpath = __nccwpck_require__(65319); +class GenericXml extends base_xml_1.BaseXml { + constructor(xpath, version) { + super(); + this.xpath = xpath; + this.version = version; + } + updateDocument(document) { + const version = this.version.toString(); + let updated = false; + for (const node of xpath.select(this.xpath, document)) { + if (node.textContent !== version) { + node.textContent = version; + updated = true; + } + } + return updated; + } +} +exports.GenericXml = GenericXml; +//# sourceMappingURL=generic-xml.js.map + +/***/ }), + +/***/ 31861: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GenericYaml = void 0; +const jsonpath_plus_1 = __nccwpck_require__(2452); +const yaml = __nccwpck_require__(21917); +const logger_1 = __nccwpck_require__(18792); +const DOCUMENT_SEPARATOR = '---\n'; +/** + * Updates YAML document according to given JSONPath. + * + * Note that used parser does reformat the document and removes all comments, + * and converts everything to pure YAML (even JSON source). + * If you want to retain formatting, use generic updater with comment hints. + * + * When applied on multi-document file, it updates all documents. + */ +class GenericYaml { + constructor(jsonpath, version) { + this.jsonpath = jsonpath; + this.version = version; + } + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + // Parse possibly multi-document file + let docs; + try { + docs = yaml.loadAll(content, null, { json: true }); + } + catch (e) { + logger.warn('Invalid yaml, cannot be parsed', e); + return content; + } + // Update each document + let modified = false; + docs.forEach(data => { + (0, jsonpath_plus_1.JSONPath)({ + resultType: 'all', + path: this.jsonpath, + json: data, + callback: (payload, _payloadType, _fullPayload) => { + if (typeof payload.value !== 'string') { + logger.warn(`No string in ${this.jsonpath}. Skipping.`); + return payload; + } + modified = true; + payload.parent[payload.parentProperty] = this.version.toString(); + return payload; + }, + }); + }); + // If nothing was modified, return original content + if (!modified) { + logger.warn(`No entries modified in ${this.jsonpath}`); + return content; + } + // Stringify documents + if (docs.length === 1) { + // Single doc + return yaml.dump(docs[0]); + } + else { + // Multi-document, each document starts with separator + return docs.map(data => DOCUMENT_SEPARATOR + yaml.dump(data)).join(''); + } + } +} +exports.GenericYaml = GenericYaml; +//# sourceMappingURL=generic-yaml.js.map + +/***/ }), + +/***/ 62963: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Generic = void 0; +const default_1 = __nccwpck_require__(41143); +const logger_1 = __nccwpck_require__(18792); +const VERSION_REGEX = /(?\d+)\.(?\d+)\.(?\d+)(-(?[\w.]+))?(\+(?[-\w.]+))?/; +const SINGLE_VERSION_REGEX = /\b\d+\b/; +const INLINE_UPDATE_REGEX = /x-release-please-(?major|minor|patch|version)/; +const BLOCK_START_REGEX = /x-release-please-start-(?major|minor|patch|version)/; +const BLOCK_END_REGEX = /x-release-please-end/; +/** + * The Generic updater looks for well known patterns and replaces + * content. The well known patterns are: + * + * 1. `x-release-please-version` if this string is found on the line, + * then replace a semver-looking string on that line with the next + * version + * 2. `x-release-please-major` if this string is found on the line, + * then replace an integer looking value with the next version's + * major + * 3. `x-release-please-minor` if this string is found on the line, + * then replace an integer looking value with the next version's + * minor + * 4. `x-release-please-patch` if this string is found on the line, + * then replace an integer looking value with the next version's + * patch + * + * You can also use a block-based replacement. Content between the + * opening `x-release-please-start-version` and `x-release-please-end` will + * be considered for version replacement. You can also open these blocks + * with `x-release-please-start-` to replace single + * numbers + */ +class Generic extends default_1.DefaultUpdater { + constructor(options) { + var _a, _b, _c; + super(options); + this.inlineUpdateRegex = (_a = options.inlineUpdateRegex) !== null && _a !== void 0 ? _a : INLINE_UPDATE_REGEX; + this.blockStartRegex = (_b = options.blockStartRegex) !== null && _b !== void 0 ? _b : BLOCK_START_REGEX; + this.blockEndRegex = (_c = options.blockEndRegex) !== null && _c !== void 0 ? _c : BLOCK_END_REGEX; + } + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + if (!content) { + return ''; + } + const newLines = []; + let blockScope; + function replaceVersion(line, scope, version) { + switch (scope) { + case 'major': + newLines.push(line.replace(SINGLE_VERSION_REGEX, `${version.major}`)); + return; + case 'minor': + newLines.push(line.replace(SINGLE_VERSION_REGEX, `${version.minor}`)); + return; + case 'patch': + newLines.push(line.replace(SINGLE_VERSION_REGEX, `${version.patch}`)); + return; + case 'version': + newLines.push(line.replace(VERSION_REGEX, version.toString())); + return; + default: + logger.warn(`unknown block scope: ${scope}`); + newLines.push(line); + } + } + content.split(/\r?\n/).forEach(line => { + var _a, _b; + let match = line.match(this.inlineUpdateRegex); + if (match) { + // replace inline versions + replaceVersion(line, (((_a = match.groups) === null || _a === void 0 ? void 0 : _a.scope) || 'version'), this.version); + } + else if (blockScope) { + // in a block, so try to replace versions + replaceVersion(line, blockScope, this.version); + if (line.match(this.blockEndRegex)) { + blockScope = undefined; + } + } + else { + // look for block start line + match = line.match(this.blockStartRegex); + if (match) { + if ((_b = match.groups) === null || _b === void 0 ? void 0 : _b.scope) { + blockScope = match.groups.scope; + } + else { + blockScope = 'version'; + } + } + newLines.push(line); + } + }); + return newLines.join('\n'); + } +} +exports.Generic = Generic; +//# sourceMappingURL=generic.js.map + +/***/ }), + +/***/ 32735: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.VersionGo = void 0; +const default_1 = __nccwpck_require__(41143); +class VersionGo extends default_1.DefaultUpdater { + updateContent(content) { + return content.replace(/const Version = "[0-9]+\.[0-9]+\.[0-9](-\w+)?"/, `const Version = "${this.version.toString()}"`); + } +} +exports.VersionGo = VersionGo; +//# sourceMappingURL=version-go.js.map + +/***/ }), + +/***/ 57947: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ChartYaml = void 0; +const yaml = __nccwpck_require__(44083); +const logger_1 = __nccwpck_require__(18792); +const default_1 = __nccwpck_require__(41143); +/** + * Updates a Helm chart.yaml file. + */ +class ChartYaml extends default_1.DefaultUpdater { + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + const chart = yaml.parseDocument(content); + if (chart === null || chart === undefined) { + return ''; + } + const oldVersion = chart.get('version'); + logger.info(`updating from ${oldVersion} to ${this.version}`); + chart.set('version', this.version.toString()); + return chart.toString(); + } +} +exports.ChartYaml = ChartYaml; +//# sourceMappingURL=chart-yaml.js.map + +/***/ }), + +/***/ 24678: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.JavaReleased = void 0; +const generic_1 = __nccwpck_require__(62963); +const INLINE_UPDATE_REGEX = /x-release-please-released-(?major|minor|patch|version)/; +const BLOCK_START_REGEX = /x-release-please-released-start-(?major|minor|patch|version)/; +const BLOCK_END_REGEX = /x-release-please-released-end/; +const REGEX_OPTIONS = { + inlineUpdateRegex: INLINE_UPDATE_REGEX, + blockStartRegex: BLOCK_START_REGEX, + blockEndRegex: BLOCK_END_REGEX, +}; +/** + * The JavaReleased updater is used only when updating to stable (not SNAPSHOT) + * versions. It looks for well known patterns and replaces content. + * The well known patterns are: + * + * 1. `x-release-please-released-version` if this string is found on the line, + * then replace a semver-looking string on that line with the next + * version + * 2. `x-release-please-released-major` if this string is found on the line, + * then replace an integer looking value with the next version's + * major + * 3. `x-release-please-released-minor` if this string is found on the line, + * then replace an integer looking value with the next version's + * minor + * 4. `x-release-please-released-patch` if this string is found on the line, + * then replace an integer looking value with the next version's + * patch + * + * You can also use a block-based replacement. Content between the + * opening `x-release-please-released-start-version` and `x-release-please-released-end` will + * be considered for version replacement. You can also open these blocks + * with `x-release-please-released-start-` to replace single + * numbers + */ +class JavaReleased extends generic_1.Generic { + constructor(options) { + super({ + ...REGEX_OPTIONS, + ...options, + }); + } +} +exports.JavaReleased = JavaReleased; +//# sourceMappingURL=java-released.js.map + +/***/ }), + +/***/ 3972: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.JavaUpdate = void 0; +const default_1 = __nccwpck_require__(41143); +const logger_1 = __nccwpck_require__(18792); +const INLINE_UPDATE_REGEX = /{x-version-update:([\w\-_]+):(current|released)}/; +const BLOCK_START_REGEX = /{x-version-update-start:([\w\-_]+):(current|released)}/; +const BLOCK_END_REGEX = /{x-version-update-end}/; +const VERSION_REGEX = /\d+\.\d+\.\d+(-\w+(\.\d+)?)?(-SNAPSHOT)?/; +/** + * Updates a file annotated with region markers. These region markers are + * either denoted inline with `{x-version-update::current|released}` + * or with a `{x-version-update-start:}` and `{x-version-update-end}`. + */ +class JavaUpdate extends default_1.DefaultUpdater { + constructor(options) { + super(options); + this.isSnapshot = !!options.isSnapshot; + } + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + if (!this.versionsMap) { + logger.warn('missing versions map'); + return content; + } + const newLines = []; + let blockPackageName = null; + content.split(/\r?\n/).forEach(line => { + let match = line.match(INLINE_UPDATE_REGEX); + if (match && (!this.isSnapshot || match[2] === 'current')) { + const newVersion = this.versionsMap.get(match[1]); + if (newVersion) { + newLines.push(line.replace(VERSION_REGEX, newVersion.toString())); + } + else { + newLines.push(line); + } + } + else if (blockPackageName) { + const newVersion = this.versionsMap.get(blockPackageName); + if (newVersion) { + newLines.push(line.replace(VERSION_REGEX, newVersion.toString())); + } + else { + newLines.push(line); + } + if (line.match(BLOCK_END_REGEX)) { + blockPackageName = null; + } + } + else { + match = line.match(BLOCK_START_REGEX); + if (match && (!this.isSnapshot || match[2] === 'current')) { + blockPackageName = match[1]; + } + newLines.push(line); + } + }); + return newLines.join('\n'); + } +} +exports.JavaUpdate = JavaUpdate; +//# sourceMappingURL=java-update.js.map + +/***/ }), + +/***/ 55023: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseDependencyNode = exports.PomXml = void 0; +const base_xml_1 = __nccwpck_require__(77490); +const xpath = __nccwpck_require__(65319); +const XPATH_PROJECT_VERSION = '/*[local-name()="project"]/*[local-name()="version"]'; +const XPATH_PROJECT_PARENT_VERSION = '/*[local-name()="project"]/*[local-name()="parent"]/*[local-name()="version"]'; +const XPATH_PROJECT_DEPENDENCIES = '/*[local-name()="project"]/*[local-name()="dependencies"]/*[local-name()="dependency"]'; +const XPATH_PROJECT_DEPENDENCY_MANAGEMENT_DEPENDENCIES = '/*[local-name()="project"]/*[local-name()="dependencyManagement"]/*[local-name()="dependencies"]/*[local-name()="dependency"]'; +/** + * Updates version pom.xml files. + * + * If present it updates project.version element. + * If project.version is not present, it updates project.parent.version. + */ +class PomXml extends base_xml_1.BaseXml { + constructor(version, dependencyVersions) { + super(); + this.version = version; + this.dependencyVersions = dependencyVersions; + } + updateDocument(document) { + // NOTE this intentionally ignores namespaces - let the maven decide, what's valid and what's not + const updates = []; + // Update project.version + const projectVersionNodes = xpath.select(XPATH_PROJECT_VERSION, document); + if (projectVersionNodes.length) { + // If found update, detect actual change + updates.push({ + nodes: projectVersionNodes, + version: this.version, + }); + } + else { + // Try updating project.parent.version + const parentVersionNodes = xpath.select(XPATH_PROJECT_PARENT_VERSION, document); + updates.push({ + nodes: parentVersionNodes, + version: this.version, + }); + } + if (this.dependencyVersions) { + updates.push(...this.dependencyUpdates(document, this.dependencyVersions)); + } + let updated = false; + for (const { nodes, version } of updates) { + updated = PomXml.updateNodes(nodes, version.toString()) || updated; + } + return updated; + } + dependencyUpdates(document, updatedVersions) { + const updates = []; + const dependencyNodes = xpath.select(XPATH_PROJECT_DEPENDENCIES, document); + const dependencyManagementNodes = xpath.select(XPATH_PROJECT_DEPENDENCY_MANAGEMENT_DEPENDENCIES, document); + // try to update dependency versions + for (const [name, version] of updatedVersions.entries()) { + // look under: + // - project/dependencies + // - project/dependencyManagement/dependencies + const [groupId, artifactId] = name.split(':'); + for (const nodeGroup of [dependencyNodes, dependencyManagementNodes]) { + const nodes = nodeGroup.reduce((collection, node) => { + const dependencyNode = parseDependencyNode(node); + if (dependencyNode.groupId === groupId && + dependencyNode.artifactId === artifactId && + dependencyNode.version !== version.toString() && + dependencyNode.versionNode) { + collection.push(dependencyNode.versionNode); + } + return collection; + }, []); + if (nodes.length) { + updates.push({ + name, + nodes, + version, + }); + } + } + } + return updates; + } + static updateNodes(nodes, value) { + const toUpdate = nodes.filter(node => node.textContent !== value); + toUpdate.forEach(node => (node.textContent = value)); + return toUpdate.length > 0; + } +} +exports.PomXml = PomXml; +function parseDependencyNode(node) { + var _a, _b, _c, _d; + let groupId = ''; + let artifactId = ''; + let scope; + let version; + let versionNode; + for (let i = 0; i < node.childNodes.length; i++) { + const childNode = node.childNodes.item(i); + if (childNode.nodeName === 'groupId') { + groupId = ((_a = childNode.firstChild) === null || _a === void 0 ? void 0 : _a.textContent) || ''; + } + else if (childNode.nodeName === 'artifactId') { + artifactId = ((_b = childNode.firstChild) === null || _b === void 0 ? void 0 : _b.textContent) || ''; + } + else if (childNode.nodeName === 'scope') { + scope = ((_c = childNode.firstChild) === null || _c === void 0 ? void 0 : _c.textContent) || ''; + } + else if (childNode.nodeName === 'version') { + version = ((_d = childNode.firstChild) === null || _d === void 0 ? void 0 : _d.textContent) || ''; + versionNode = childNode; + } + } + return { + groupId, + artifactId, + scope, + version, + versionNode, + }; +} +exports.parseDependencyNode = parseDependencyNode; +//# sourceMappingURL=pom-xml.js.map + +/***/ }), + +/***/ 45153: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.VersionsManifest = void 0; +const java_update_1 = __nccwpck_require__(3972); +const version_1 = __nccwpck_require__(25112); +const logger_1 = __nccwpck_require__(18792); +/** + * Updates a versions.txt file which contains current versions of + * components within a Java repo. + * @see https://github.com/googleapis/java-asset/blob/main/versions.txt + */ +class VersionsManifest extends java_update_1.JavaUpdate { + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + if (!this.versionsMap) { + logger.warn('missing versions map'); + return content; + } + let newContent = content; + this.versionsMap.forEach((version, packageName) => { + newContent = this.updateSingleVersion(newContent, packageName, version.toString()); + }); + return newContent; + } + updateSingleVersion(content, packageName, version) { + const newLines = []; + content.split(/\r?\n/).forEach(line => { + if (version.includes('SNAPSHOT')) { + newLines.push(line.replace(new RegExp(`^${packageName}:(.*):(.*)`, 'g'), `${packageName}:$1:${version}`)); + } + else { + newLines.push(line.replace(new RegExp(`^${packageName}:(.*):(.*)`, 'g'), `${packageName}:${version}:${version}`)); + } + }); + return newLines.join('\n'); + } + static parseVersions(content) { + const versions = new Map(); + content.split(/\r?\n/).forEach(line => { + const match = line.match(/^([\w\-_]+):([^:]+):([^:]+)/); + if (match) { + versions.set(match[1], version_1.Version.parse(match[2])); + } + }); + return versions; + } + static needsSnapshot(content) { + return !content.split(/\r?\n/).some(line => { + return !!line.match(/^[\w\-_]+:.+:.+-SNAPSHOT/); + }); + } +} +exports.VersionsManifest = VersionsManifest; +//# sourceMappingURL=versions-manifest.js.map + +/***/ }), + +/***/ 75678: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.KRMBlueprintVersion = void 0; +const logger_1 = __nccwpck_require__(18792); +const default_1 = __nccwpck_require__(41143); +/** + * Updates KMR blueprint yaml file. + */ +class KRMBlueprintVersion extends default_1.DefaultUpdater { + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + var _a; + // js-yaml(and kpt TS SDK) does not preserve comments hence regex match + // match starting cnrm/ ending with semver to prevent wrong updates like pinned config.kubernetes.io/function + let matchRegex = '(cnrm/.*/)(v[0-9]+.[0-9]+.[0-9]+)+(-w+)?'; + // if explicit previous version, match only that version + if ((_a = this.versionsMap) === null || _a === void 0 ? void 0 : _a.has('previousVersion')) { + matchRegex = `(cnrm/.*/)(v${this.versionsMap.get('previousVersion')})+(-w+)?`; + } + const oldVersion = content.match(new RegExp(matchRegex)); + if (oldVersion) { + logger.info(`updating from ${oldVersion[2]} to v${this.version}`); + } + const newVersion = content.replace(new RegExp(matchRegex, 'g'), `$1v${this.version}`); + return newVersion; + } +} +exports.KRMBlueprintVersion = KRMBlueprintVersion; +//# sourceMappingURL=krm-blueprint-version.js.map + +/***/ }), + +/***/ 15189: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.updateDependencies = exports.NPM_PROTOCOL_REGEXP = exports.newVersionWithRange = exports.PackageJson = void 0; +const json_stringify_1 = __nccwpck_require__(61480); +const logger_1 = __nccwpck_require__(18792); +const default_1 = __nccwpck_require__(41143); +/** + * This updates a Node.js package.json file's main version. + */ +class PackageJson extends default_1.DefaultUpdater { + constructor(options) { + super(options); + this.updatePeerDependencies = false; + this.updatePeerDependencies = options.updatePeerDependencies || false; + } + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @param logger + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + const parsed = JSON.parse(content); + logger.info(`updating from ${parsed.version} to ${this.version}`); + parsed.version = this.version.toString(); + // If additional dependency versions specified, then update dependency versions + // while preserving any valid version range prefixes. + if (this.versionsMap) { + if (parsed.dependencies) { + updateDependencies(parsed.dependencies, this.versionsMap); + } + if (parsed.devDependencies) { + updateDependencies(parsed.devDependencies, this.versionsMap); + } + if (parsed.peerDependencies && this.updatePeerDependencies) { + updateDependencies(parsed.peerDependencies, this.versionsMap); + } + if (parsed.optionalDependencies) { + updateDependencies(parsed.optionalDependencies, this.versionsMap); + } + } + return (0, json_stringify_1.jsonStringify)(parsed, content); + } +} +exports.PackageJson = PackageJson; +var SUPPORTED_RANGE_PREFIXES; +(function (SUPPORTED_RANGE_PREFIXES) { + SUPPORTED_RANGE_PREFIXES["CARET"] = "^"; + SUPPORTED_RANGE_PREFIXES["TILDE"] = "~"; + SUPPORTED_RANGE_PREFIXES["EQUAL_OR_GREATER_THAN"] = ">="; + SUPPORTED_RANGE_PREFIXES["EQUAL_OR_LESS_THAN"] = "<="; + SUPPORTED_RANGE_PREFIXES["GREATER_THAN"] = ">"; + SUPPORTED_RANGE_PREFIXES["LESS_THAN"] = "<"; +})(SUPPORTED_RANGE_PREFIXES || (SUPPORTED_RANGE_PREFIXES = {})); +function detectRangePrefix(version) { + return (Object.values(SUPPORTED_RANGE_PREFIXES).find(supportedRangePrefix => version.startsWith(supportedRangePrefix)) || ''); +} +/** + * Helper to coerce a new version value into a version range that preserves the + * version range prefix of the original version. + * @param {string} oldVersion Old semver with range + * @param {Version} newVersion The new version to update with + */ +function newVersionWithRange(oldVersion, newVersion) { + const prefix = detectRangePrefix(oldVersion); + if (prefix) { + return `${prefix}${newVersion}`; + } + return newVersion.toString(); +} +exports.newVersionWithRange = newVersionWithRange; +exports.NPM_PROTOCOL_REGEXP = /^[a-z]+:/; +/** + * Helper function to update dependency versions for all new versions specified + * in the updated versions map. Note that this mutates the existing input. + * @param {Record} dependencies Entries in package.json dependencies + * where the key is the dependency name and the value is the dependency range + * @param {VersionsMap} updatedVersions Map of new versions (without dependency range prefixes) + */ +function updateDependencies(dependencies, updatedVersions) { + for (const depName of Object.keys(dependencies)) { + const oldVersion = dependencies[depName]; + if (exports.NPM_PROTOCOL_REGEXP.test(oldVersion)) { + continue; + } + const newVersion = updatedVersions.get(depName); + if (newVersion) { + dependencies[depName] = newVersionWithRange(oldVersion, newVersion); + } + } +} +exports.updateDependencies = updateDependencies; +//# sourceMappingURL=package-json.js.map + +/***/ }), + +/***/ 55554: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PackageLockJson = void 0; +const json_stringify_1 = __nccwpck_require__(61480); +const logger_1 = __nccwpck_require__(18792); +const package_json_1 = __nccwpck_require__(15189); +/** + * Updates a Node.js package-lock.json file's version and '' package + * version (for a v2 lock file). + */ +class PackageLockJson { + constructor(options) { + this.version = options.version; + this.versionsMap = options.versionsMap; + } + updateContent(content, logger = logger_1.logger) { + const parsed = JSON.parse(content); + if (this.version) { + logger.info(`updating from ${parsed.version} to ${this.version}`); + parsed.version = this.version.toString(); + } + if (parsed.lockfileVersion === 2 || parsed.lockfileVersion === 3) { + if (this.version) { + parsed.packages[''].version = this.version.toString(); + } + if (this.versionsMap) { + this.versionsMap.forEach((version, name) => { + let pkg = parsed.packages['node_modules/' + name]; + if (!pkg) { + return; + } + // @see https://docs.npmjs.com/cli/v10/configuring-npm/package-lock-json#packages + if (pkg.link && pkg.resolved) { + pkg = parsed.packages[pkg.resolved]; + if (!pkg) { + return; + } + } + pkg.version = version.toString(); + if (pkg.dependencies) { + (0, package_json_1.updateDependencies)(pkg.dependencies, this.versionsMap); + } + if (pkg.devDependencies) { + (0, package_json_1.updateDependencies)(pkg.devDependencies, this.versionsMap); + } + if (pkg.peerDependencies) { + (0, package_json_1.updateDependencies)(pkg.peerDependencies, this.versionsMap); + } + if (pkg.optionalDependencies) { + (0, package_json_1.updateDependencies)(pkg.optionalDependencies, this.versionsMap); + } + }); + } + } + if (this.versionsMap) { + for (const [, obj] of Object.entries(parsed.packages)) { + if (!obj.name) { + continue; + } + const ver = this.versionsMap.get(obj.name); + if (ver) { + obj.version = ver.toString(); + } + } + } + return (0, json_stringify_1.jsonStringify)(parsed, content); + } +} +exports.PackageLockJson = PackageLockJson; +//# sourceMappingURL=package-lock-json.js.map + +/***/ }), + +/***/ 56196: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SamplesPackageJson = void 0; +const logger_1 = __nccwpck_require__(18792); +const json_stringify_1 = __nccwpck_require__(61480); +const default_1 = __nccwpck_require__(41143); +/** + * Updates the a Node.js package.json file with the library in the + * dependencies section. + */ +class SamplesPackageJson extends default_1.DefaultUpdater { + /** + * Instantiate a new SamplesPackageJson updater + * @param options + */ + constructor(options) { + super(options); + this.packageName = options.packageName; + } + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + const parsed = JSON.parse(content); + if (!parsed.dependencies || !parsed.dependencies[this.packageName]) { + return content; + } + logger.info(`updating ${this.packageName} dependency from ${parsed.dependencies[this.packageName]} to ^${this.version}`); + parsed.dependencies[this.packageName] = `^${this.version}`; + return (0, json_stringify_1.jsonStringify)(parsed, content); + } +} +exports.SamplesPackageJson = SamplesPackageJson; +//# sourceMappingURL=samples-package-json.js.map + +/***/ }), + +/***/ 47944: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DuneProject = void 0; +const logger_1 = __nccwpck_require__(18792); +const default_1 = __nccwpck_require__(41143); +/** + * Updates an OCaml dune-project file. + */ +class DuneProject extends default_1.DefaultUpdater { + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + const oldVersion = content.match(/^\(version ([A-Za-z0-9_\-+.~]+)\)$/m); + if (oldVersion) { + logger.info(`updating from ${oldVersion[1]} to ${this.version}`); + } + return content.replace(/^\(version ([A-Za-z0-9_\-+.~]+)\)$/m, `(version ${this.version})`); + } +} +exports.DuneProject = DuneProject; +//# sourceMappingURL=dune-project.js.map + +/***/ }), + +/***/ 93366: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.EsyJson = void 0; +const logger_1 = __nccwpck_require__(18792); +const json_stringify_1 = __nccwpck_require__(61480); +const default_1 = __nccwpck_require__(41143); +/** + * Updates an OCaml esy.json file. + */ +class EsyJson extends default_1.DefaultUpdater { + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + const parsed = JSON.parse(content); + logger.info(`updating from ${parsed.version} to ${this.version}`); + parsed.version = this.version.toString(); + return (0, json_stringify_1.jsonStringify)(parsed, content); + } +} +exports.EsyJson = EsyJson; +//# sourceMappingURL=esy-json.js.map + +/***/ }), + +/***/ 63454: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Opam = void 0; +const logger_1 = __nccwpck_require__(18792); +const default_1 = __nccwpck_require__(41143); +/** + * Updates an OCaml .opam file + */ +class Opam extends default_1.DefaultUpdater { + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + const oldVersion = content.match(/^version: "([A-Za-z0-9_\-+.~]+)"$/m); + if (oldVersion) { + logger.info(`updating from ${oldVersion[1]} to ${this.version}`); + } + return content.replace(/^version: "[A-Za-z0-9_\-+.~]+"$/m, `version: "${this.version}"`); + } +} +exports.Opam = Opam; +//# sourceMappingURL=opam.js.map + +/***/ }), + +/***/ 35752: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PHPClientVersion = void 0; +const default_1 = __nccwpck_require__(41143); +/** + * Updates a php file that has a constant VERSION defined. + */ +class PHPClientVersion extends default_1.DefaultUpdater { + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content) { + return content.replace(/const VERSION = '[0-9]+\.[0-9]+\.[0-9]+'/, `const VERSION = '${this.version}'`); + } +} +exports.PHPClientVersion = PHPClientVersion; +//# sourceMappingURL=php-client-version.js.map + +/***/ }), + +/***/ 44955: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.RootComposerUpdatePackages = void 0; +const logger_1 = __nccwpck_require__(18792); +const json_stringify_1 = __nccwpck_require__(61480); +const default_1 = __nccwpck_require__(41143); +/** + * Updates a root composer.json + */ +class RootComposerUpdatePackages extends default_1.DefaultUpdater { + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + if (!this.version && (!this.versionsMap || this.versionsMap.size === 0)) { + logger.info('no updates necessary'); + return content; + } + const parsed = JSON.parse(content); + if (parsed['version']) { + const fromVersion = parsed['version']; + const toVersion = this.version.toString() || '1.0.0'; + parsed['version'] = toVersion; + logger.info(`updating "version" from ${fromVersion} to ${toVersion}`); + } + if (this.versionsMap) { + for (const [key, version] of this.versionsMap.entries()) { + const toVersion = version.toString() || '1.0.0'; + let fromVersion; + if (parsed.replace) { + fromVersion = parsed.replace[key]; + parsed.replace[key] = toVersion; + } + if (parsed[key]) { + fromVersion !== null && fromVersion !== void 0 ? fromVersion : (fromVersion = parsed[key]); + parsed[key] = toVersion; + } + logger.info(`updating ${key} from ${fromVersion} to ${toVersion}`); + } + } + return (0, json_stringify_1.jsonStringify)(parsed, content); + } +} +exports.RootComposerUpdatePackages = RootComposerUpdatePackages; +//# sourceMappingURL=root-composer-update-packages.js.map + +/***/ }), + +/***/ 30952: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PyProjectToml = exports.parsePyProject = void 0; +const TOML = __nccwpck_require__(62901); +const logger_1 = __nccwpck_require__(18792); +const toml_edit_1 = __nccwpck_require__(42237); +const default_1 = __nccwpck_require__(41143); +function parsePyProject(content) { + return TOML.parse(content); +} +exports.parsePyProject = parsePyProject; +/** + * Updates a pyproject.toml file + */ +class PyProjectToml extends default_1.DefaultUpdater { + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + var _a; + const parsed = parsePyProject(content); + const project = parsed.project || ((_a = parsed.tool) === null || _a === void 0 ? void 0 : _a.poetry); + if (!(project === null || project === void 0 ? void 0 : project.version)) { + // Throw warning if the version is dynamically generated. + if ((project === null || project === void 0 ? void 0 : project.dynamic) && project.dynamic.includes('version')) { + const msg = "dynamic version found in 'pyproject.toml'. Skipping update."; + logger.warn(msg); + return content; + } + const msg = 'invalid file'; + logger.error(msg); + throw new Error(msg); + } + return (0, toml_edit_1.replaceTomlValue)(content, (parsed.project ? ['project'] : ['tool', 'poetry']).concat('version'), this.version.toString()); + } +} +exports.PyProjectToml = PyProjectToml; +//# sourceMappingURL=pyproject-toml.js.map + +/***/ }), + +/***/ 4064: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PythonFileWithVersion = void 0; +const default_1 = __nccwpck_require__(41143); +/** + * Python file with a __version__ property (or attribute, or whatever). + */ +class PythonFileWithVersion extends default_1.DefaultUpdater { + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content) { + return content.replace(/(__version__ ?= ?["'])[0-9]+\.[0-9]+\.[0-9]+(?:-\w+)?(["'])/, `$1${this.version}$2`); + } +} +exports.PythonFileWithVersion = PythonFileWithVersion; +//# sourceMappingURL=python-file-with-version.js.map + +/***/ }), + +/***/ 32650: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SetupCfg = void 0; +const default_1 = __nccwpck_require__(41143); +/** + * Updates a setup.cfg file + */ +class SetupCfg extends default_1.DefaultUpdater { + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content) { + return content.replace(/(version ?= ?)[0-9]+\.[0-9]+\.[0-9]+(?:-\w+)?/, `$1${this.version}`); + } +} +exports.SetupCfg = SetupCfg; +//# sourceMappingURL=setup-cfg.js.map + +/***/ }), + +/***/ 14874: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SetupPy = void 0; +const default_1 = __nccwpck_require__(41143); +/** + * Updates a setup.py file. + */ +class SetupPy extends default_1.DefaultUpdater { + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content) { + return content.replace(/(version ?= ?["'])[0-9]+\.[0-9]+\.[0-9]+(?:-\w+)?(["'])/, `$1${this.version}$2`); + } +} +exports.SetupPy = SetupPy; +//# sourceMappingURL=setup-py.js.map + +/***/ }), + +/***/ 44068: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.RawContent = void 0; +/** + * This updater ignores previous content and writes the provided + * content verbatim. + */ +class RawContent { + /** + * Create a new RawContent instance + * @param {string} rawContent The raw content to set as the contents. + */ + constructor(rawContent) { + this.rawContent = rawContent; + } + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(_content) { + return this.rawContent; + } +} +exports.RawContent = RawContent; +//# sourceMappingURL=raw-content.js.map + +/***/ }), + +/***/ 90094: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ReleasePleaseManifest = void 0; +const json_stringify_1 = __nccwpck_require__(61480); +const default_1 = __nccwpck_require__(41143); +class ReleasePleaseManifest extends default_1.DefaultUpdater { + updateContent(content) { + const parsed = content ? JSON.parse(content) : {}; + for (const [path, version] of this.versionsMap) { + parsed[path] = version.toString(); + } + if (content) { + return (0, json_stringify_1.jsonStringify)(parsed, content); + } + else { + return JSON.stringify(parsed, null, 2); + } + } +} +exports.ReleasePleaseManifest = ReleasePleaseManifest; +//# sourceMappingURL=release-please-manifest.js.map + +/***/ }), + +/***/ 95138: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveRubyGemfileLockVersion = exports.stringifyRubyVersion = exports.RUBY_VERSION_REGEX = void 0; +// Ruby gem semver strings using `.` seperator for prereleases rather then `-` +// See https://guides.rubygems.org/patterns/ +exports.RUBY_VERSION_REGEX = /((\d+).(\d)+.(\d+)(.\w+.*)?)/g; +/** + * Stringify a version to a ruby compatible version string + * + * @param version The version to stringify + * @param useDotPrePreleaseSeperator Use a `.` seperator for prereleases rather then `-` + * @returns a ruby compatible version string + */ +function stringifyRubyVersion(version, useDotPrePreleaseSeperator = false) { + if (!useDotPrePreleaseSeperator) { + return version.toString(); + } + return `${version.major}.${version.minor}.${version.patch}${version.preRelease ? `.${version.preRelease}` : ''}`; +} +exports.stringifyRubyVersion = stringifyRubyVersion; +/** + * This function mimics Gem::Version parsing of version semver strings + * + * @param versionString The version string to resolve + * @returns A Gem::Version compatible version string + */ +function resolveRubyGemfileLockVersion(versionString) { + // Replace `-` with `.pre.` as per ruby gem parsing + // See https://github.com/rubygems/rubygems/blob/master/lib/rubygems/version.rb#L229 + return versionString.replace(/-/g, '.pre.'); +} +exports.resolveRubyGemfileLockVersion = resolveRubyGemfileLockVersion; +//# sourceMappingURL=common.js.map + +/***/ }), + +/***/ 21147: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GemfileLock = exports.buildGemfileLockVersionRegex = void 0; +const default_1 = __nccwpck_require__(41143); +const common_1 = __nccwpck_require__(95138); +/** + * Builds a regex matching a gem version in a Gemfile.lock file. + * @example + * rails (7.0.1) + * rails (7.0.1.alpha1) + */ +function buildGemfileLockVersionRegex(gemName) { + return new RegExp(`s*${gemName} \\(${common_1.RUBY_VERSION_REGEX.source}\\)`); +} +exports.buildGemfileLockVersionRegex = buildGemfileLockVersionRegex; +/** + * Updates a Gemfile.lock files which is expected to have a local path version string. + */ +class GemfileLock extends default_1.DefaultUpdater { + constructor(options) { + super(options); + this.gemName = options.gemName; + } + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content) { + if (!this.gemName) { + return content; + } + // Bundler will convert 1.0.0-alpha1 to 1.0.0.pre.alpha1, so we need to + // do the same here. + const versionString = (0, common_1.resolveRubyGemfileLockVersion)(this.version.toString()); + return content.replace(buildGemfileLockVersionRegex(this.gemName), `${this.gemName} (${versionString})`); + } +} +exports.GemfileLock = GemfileLock; +//# sourceMappingURL=gemfile-lock.js.map + +/***/ }), + +/***/ 43499: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.VersionRB = void 0; +const default_1 = __nccwpck_require__(41143); +const common_1 = __nccwpck_require__(95138); +const RUBY_VERSION_RB_REGEX = new RegExp(`(["'])(${common_1.RUBY_VERSION_REGEX.source})(["'])`); +/** + * Updates a versions.rb file which is expected to have a version string. + */ +class VersionRB extends default_1.DefaultUpdater { + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content) { + return content.replace(RUBY_VERSION_RB_REGEX, `$1${(0, common_1.stringifyRubyVersion)(this.version)}$1`); + } +} +exports.VersionRB = VersionRB; +//# sourceMappingURL=version-rb.js.map + +/***/ }), + +/***/ 84670: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CargoLock = void 0; +const toml_edit_1 = __nccwpck_require__(42237); +const common_1 = __nccwpck_require__(66815); +const logger_1 = __nccwpck_require__(18792); +/** + * Updates `Cargo.lock` lockfiles, preserving formatting and comments. + */ +class CargoLock { + constructor(versionsMap) { + this.versionsMap = versionsMap; + } + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + let payload = content; + const parsed = (0, common_1.parseCargoLockfile)(payload); + if (!parsed.package) { + logger.error('is not a Cargo lockfile'); + throw new Error('is not a Cargo lockfile'); + } + // n.b for `replaceTomlString`, we need to keep track of the index + // (position) of the package we're considering. + for (let i = 0; i < parsed.package.length; i++) { + const pkg = parsed.package[i]; + if (!pkg.name) { + // all `[[package]]` entries should have a name, + // but if they don't, ignore them silently. + continue; // to next package + } + const nextVersion = this.versionsMap.get(pkg.name); + if (!nextVersion) { + // this package is not upgraded. + continue; // to next package + } + // note: in ECMAScript, using strings to index arrays is perfectly valid, + // which is lucky because `replaceTomlString` expect "all strings" in its + // `path` argument. + const packageIndex = i.toString(); + logger.info(`updating ${pkg.name} in`); + payload = (0, toml_edit_1.replaceTomlValue)(payload, ['package', packageIndex, 'version'], nextVersion.toString()); + } + return payload; + } +} +exports.CargoLock = CargoLock; +//# sourceMappingURL=cargo-lock.js.map + +/***/ }), + +/***/ 39089: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CargoToml = void 0; +const toml_edit_1 = __nccwpck_require__(42237); +const common_1 = __nccwpck_require__(66815); +const logger_1 = __nccwpck_require__(18792); +const default_1 = __nccwpck_require__(41143); +/** + * Updates `Cargo.toml` manifests, preserving formatting and comments. + */ +class CargoToml extends default_1.DefaultUpdater { + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + let payload = content; + if (!this.versionsMap) { + throw new Error('updateContent called with no versions'); + } + const parsed = (0, common_1.parseCargoManifest)(payload); + if (!parsed.package) { + const msg = 'is not a package manifest (might be a cargo workspace)'; + logger.error(msg); + throw new Error(msg); + } + payload = (0, toml_edit_1.replaceTomlValue)(payload, ['package', 'version'], this.version.toString()); + for (const [pkgName, pkgVersion] of this.versionsMap) { + for (const depKind of common_1.DEP_KINDS) { + const deps = parsed[depKind]; + if (!deps) { + continue; // to next depKind + } + if (!deps[pkgName]) { + continue; // to next depKind + } + const dep = deps[pkgName]; + if (typeof dep === 'string' || typeof dep.path === 'undefined') { + logger.info(`skipping ${depKind}.${pkgName} (no path set)`); + continue; // to next depKind + } + if (typeof dep.version === 'undefined') { + logger.info(`skipping ${depKind}.${pkgName} (no version set)`); + continue; // to next depKind + } + logger.info(`updating ${depKind}.${pkgName} from ${dep.version} to ${pkgVersion}`); + payload = (0, toml_edit_1.replaceTomlValue)(payload, [depKind, pkgName, 'version'], pkgVersion.toString()); + } + // Update platform-specific dependencies + if (parsed.target) { + for (const targetName of Object.keys(parsed.target)) { + for (const depKind of common_1.DEP_KINDS) { + const deps = parsed.target[targetName][depKind]; + if (!deps) { + continue; // to next depKind + } + if (!deps[pkgName]) { + continue; // to next depKind + } + const dep = deps[pkgName]; + if (typeof dep === 'string' || typeof dep.path === 'undefined') { + logger.info(`skipping target.${targetName}.${depKind}.${pkgName} in`); + continue; // to next depKind + } + logger.info(`updating target.${targetName}.${depKind}.${pkgName} from ${dep.version} to ${pkgVersion}`); + payload = (0, toml_edit_1.replaceTomlValue)(payload, ['target', targetName, depKind, pkgName, 'version'], pkgVersion.toString()); + } + } + } + } + return payload; + } +} +exports.CargoToml = CargoToml; +//# sourceMappingURL=cargo-toml.js.map + +/***/ }), + +/***/ 66815: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseCargoLockfile = exports.parseCargoManifest = exports.DEP_KINDS = void 0; +const TOML = __nccwpck_require__(62901); +/** + * All possible dependency kinds for `CargoManifest`, + * typed properly. + */ +exports.DEP_KINDS = [ + 'dependencies', + 'dev-dependencies', + 'build-dependencies', +]; +function parseCargoManifest(content) { + return TOML.parse(content); +} +exports.parseCargoManifest = parseCargoManifest; +function parseCargoLockfile(content) { + return TOML.parse(content); +} +exports.parseCargoLockfile = parseCargoLockfile; +//# sourceMappingURL=common.js.map + +/***/ }), + +/***/ 15475: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SfdxProjectJson = void 0; +const json_stringify_1 = __nccwpck_require__(61480); +const logger_1 = __nccwpck_require__(18792); +const default_1 = __nccwpck_require__(41143); +/** + * This updates a sfdx sfdx-project.json file's main version. + */ +class SfdxProjectJson extends default_1.DefaultUpdater { + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + const parsed = JSON.parse(content); + for (const packDir of parsed.packageDirectories) { + if (packDir.default) { + logger.info(`updating from ${packDir.versionNumber} to ${this.version}`); + packDir.versionNumber = `${this.version.toString()}.NEXT`; + } + } + return (0, json_stringify_1.jsonStringify)(parsed, content); + } +} +exports.SfdxProjectJson = SfdxProjectJson; +//# sourceMappingURL=sfdx-project-json.js.map + +/***/ }), + +/***/ 68225: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MetadataVersion = void 0; +const logger_1 = __nccwpck_require__(18792); +const default_1 = __nccwpck_require__(41143); +/** + * Updates a Terraform metadata.yaml or metadata.display.yaml file(s). + */ +class MetadataVersion extends default_1.DefaultUpdater { + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + const oldVersion = content.match(/version: [0-9]+\.[0-9]+\.[0-9]+(-\w+)?/); + if (oldVersion) { + logger.info(`updating from ${oldVersion} to v${this.version}`); + } + return content.replace(/version: [0-9]+\.[0-9]+\.[0-9]+(-\w+)?/g, `version: ${this.version}`); + } +} +exports.MetadataVersion = MetadataVersion; +//# sourceMappingURL=metadata-version.js.map + +/***/ }), + +/***/ 61495: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ModuleVersion = void 0; +const logger_1 = __nccwpck_require__(18792); +const default_1 = __nccwpck_require__(41143); +/** + * Updates a Terraform Module versions.tf file. + */ +class ModuleVersion extends default_1.DefaultUpdater { + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content, logger = logger_1.logger) { + const oldVersion = content.match(/v[0-9]+\.[0-9]+\.[0-9]+(-\w+)?/); + if (oldVersion) { + logger.info(`updating from ${oldVersion} to v${this.version}`); + } + return content.replace(/v[0-9]+\.[0-9]+\.[0-9]+(-\w+)?/g, `v${this.version}`); + } +} +exports.ModuleVersion = ModuleVersion; +//# sourceMappingURL=module-version.js.map + +/***/ }), + +/***/ 57277: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ReadMe = void 0; +const default_1 = __nccwpck_require__(41143); +/** + * Updates a Terraform module's README. + */ +class ReadMe extends default_1.DefaultUpdater { + /** + * Given initial file contents, return updated contents. + * @param {string} content The initial content + * @returns {string} The updated content + */ + updateContent(content) { + return content.replace(/version = "~> [\d]+.[\d]+"/, `version = "~> ${this.version.major}.${this.version.minor}"`); + } +} +exports.ReadMe = ReadMe; +//# sourceMappingURL=readme.js.map + +/***/ }), + +/***/ 94148: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BranchName = void 0; +const version_1 = __nccwpck_require__(25112); +const logger_1 = __nccwpck_require__(18792); +// cannot import from '..' - transpiled code references to RELEASE_PLEASE +// at the script level are undefined, they are only defined inside function +// or instance methods/properties. +// import {RELEASE_PLEASE} from '../constants'; +const RELEASE_PLEASE = 'release-please'; +function getAllResourceNames() { + return [ + AutoreleaseBranchName, + ComponentBranchName, + GroupBranchName, + DefaultBranchName, + V12ComponentBranchName, + V12DefaultBranchName, + ]; +} +class BranchName { + static parse(branchName, logger = logger_1.logger) { + try { + const branchNameClass = getAllResourceNames().find(clazz => { + return clazz.matches(branchName); + }); + if (!branchNameClass) { + return undefined; + } + return new branchNameClass(branchName); + } + catch (e) { + logger.warn(`Error parsing branch name: ${branchName}`, e); + return undefined; + } + } + static ofComponentVersion(branchPrefix, version) { + return new AutoreleaseBranchName(`release-${branchPrefix}-v${version}`); + } + static ofVersion(version) { + return new AutoreleaseBranchName(`release-v${version}`); + } + static ofTargetBranch(targetBranch) { + return new DefaultBranchName(`${RELEASE_PLEASE}--branches--${targetBranch}`); + } + static ofComponentTargetBranch(component, targetBranch) { + return new ComponentBranchName(`${RELEASE_PLEASE}--branches--${targetBranch}--components--${component}`); + } + static ofGroupTargetBranch(group, targetBranch) { + return new GroupBranchName(`${RELEASE_PLEASE}--branches--${targetBranch}--groups--${safeBranchName(group)}`); + } + constructor(_branchName) { } + static matches(_branchName) { + return false; + } + getTargetBranch() { + return this.targetBranch; + } + getComponent() { + return this.component; + } + getVersion() { + return this.version; + } + toString() { + return ''; + } +} +exports.BranchName = BranchName; +/** + * This is the legacy branch pattern used by releasetool + * + * @see https://github.com/googleapis/releasetool + */ +const AUTORELEASE_PATTERN = /^release-?(?[\w-.]*)?-v(?[0-9].*)$/; +const RELEASE_PLEASE_BRANCH_PREFIX = 'release-please--branches'; +class AutoreleaseBranchName extends BranchName { + static matches(branchName) { + if (branchName.startsWith(RELEASE_PLEASE_BRANCH_PREFIX)) { + return false; + } + return !!branchName.match(AUTORELEASE_PATTERN); + } + constructor(branchName) { + super(branchName); + const match = branchName.match(AUTORELEASE_PATTERN); + if (match === null || match === void 0 ? void 0 : match.groups) { + this.component = match.groups['component']; + this.version = version_1.Version.parse(match.groups['version']); + } + } + toString() { + var _a, _b; + if (this.component) { + return `release-${this.component}-v${(_a = this.version) === null || _a === void 0 ? void 0 : _a.toString()}`; + } + return `release-v${(_b = this.version) === null || _b === void 0 ? void 0 : _b.toString()}`; + } +} +/** + * This is a parsable branch pattern used by release-please v12. + * It has potential issues due to git treating `/` like directories. + * This should be removed at some point in the future. + * + * @see https://github.com/googleapis/release-please/issues/1024 + */ +const V12_DEFAULT_PATTERN = `^${RELEASE_PLEASE}/branches/(?[^/]+)$`; +class V12DefaultBranchName extends BranchName { + static matches(branchName) { + return !!branchName.match(V12_DEFAULT_PATTERN); + } + constructor(branchName) { + super(branchName); + const match = branchName.match(V12_DEFAULT_PATTERN); + if (match === null || match === void 0 ? void 0 : match.groups) { + this.targetBranch = match.groups['branch']; + } + } + toString() { + return `${RELEASE_PLEASE}/branches/${this.targetBranch}`; + } +} +/** + * This is a parsable branch pattern used by release-please v12. + * It has potential issues due to git treating `/` like directories. + * This should be removed at some point in the future. + * + * @see https://github.com/googleapis/release-please/issues/1024 + */ +const V12_COMPONENT_PATTERN = `^${RELEASE_PLEASE}/branches/(?[^/]+)/components/(?.+)$`; +class V12ComponentBranchName extends BranchName { + static matches(branchName) { + return !!branchName.match(V12_COMPONENT_PATTERN); + } + constructor(branchName) { + super(branchName); + const match = branchName.match(V12_COMPONENT_PATTERN); + if (match === null || match === void 0 ? void 0 : match.groups) { + this.targetBranch = match.groups['branch']; + this.component = match.groups['component']; + } + } + toString() { + return `${RELEASE_PLEASE}/branches/${this.targetBranch}/components/${this.component}`; + } +} +const DEFAULT_PATTERN = `^${RELEASE_PLEASE}--branches--(?.+)$`; +class DefaultBranchName extends BranchName { + static matches(branchName) { + return !!branchName.match(DEFAULT_PATTERN); + } + constructor(branchName) { + super(branchName); + const match = branchName.match(DEFAULT_PATTERN); + if (match === null || match === void 0 ? void 0 : match.groups) { + this.targetBranch = match.groups['branch']; + } + } + toString() { + return `${RELEASE_PLEASE}--branches--${this.targetBranch}`; + } +} +const COMPONENT_PATTERN = `^${RELEASE_PLEASE}--branches--(?.+)--components--(?.+)$`; +class ComponentBranchName extends BranchName { + static matches(branchName) { + return !!branchName.match(COMPONENT_PATTERN); + } + constructor(branchName) { + super(branchName); + const match = branchName.match(COMPONENT_PATTERN); + if (match === null || match === void 0 ? void 0 : match.groups) { + this.targetBranch = match.groups['branch']; + this.component = match.groups['component']; + } + } + toString() { + return `${RELEASE_PLEASE}--branches--${this.targetBranch}--components--${this.component}`; + } +} +const GROUP_PATTERN = `^${RELEASE_PLEASE}--branches--(?.+)--groups--(?.+)$`; +class GroupBranchName extends BranchName { + static matches(branchName) { + return !!branchName.match(GROUP_PATTERN); + } + constructor(branchName) { + super(branchName); + const match = branchName.match(GROUP_PATTERN); + if (match === null || match === void 0 ? void 0 : match.groups) { + this.targetBranch = match.groups['branch']; + this.component = match.groups['group']; + } + } + toString() { + return `${RELEASE_PLEASE}--branches--${this.targetBranch}--groups--${this.component}`; + } +} +function safeBranchName(branchName) { + // convert disallowed characters in branch names, replacing them with '-'. + // replace multiple consecutive '-' with a single '-' to avoid interfering with + // our regexes for parsing the branch names + return branchName.replace(/[^\w\d]/g, '-').replace(/-+/g, '-'); +} +//# sourceMappingURL=branch-name.js.map + +/***/ }), + +/***/ 88719: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CommitExclude = void 0; +const manifest_1 = __nccwpck_require__(24026); +const commit_utils_1 = __nccwpck_require__(81318); +class CommitExclude { + constructor(config) { + this.excludePaths = {}; + Object.entries(config).forEach(([path, releaseConfig]) => { + if (releaseConfig.excludePaths) { + this.excludePaths[path] = (0, commit_utils_1.normalizePaths)(releaseConfig.excludePaths); + } + }); + } + excludeCommits(commitsPerPath) { + const filteredCommitsPerPath = {}; + Object.entries(commitsPerPath).forEach(([path, commits]) => { + if (this.excludePaths[path]) { + commits = commits.filter(commit => this.shouldInclude(commit, this.excludePaths[path], path)); + } + filteredCommitsPerPath[path] = commits; + }); + return filteredCommitsPerPath; + } + shouldInclude(commit, excludePaths, packagePath) { + return (!commit.files || + !commit.files + .filter(file => this.isRelevant(file, packagePath)) + .every(file => excludePaths.some(path => this.isRelevant(file, path)))); + } + isRelevant(file, path) { + return path === manifest_1.ROOT_PROJECT_PATH || file.indexOf(`${path}/`) === 0; + } +} +exports.CommitExclude = CommitExclude; +//# sourceMappingURL=commit-exclude.js.map + +/***/ }), + +/***/ 95086: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CommitSplit = void 0; +const manifest_1 = __nccwpck_require__(24026); +const commit_utils_1 = __nccwpck_require__(81318); +/** + * Helper class for splitting commits by component path. If `packagePaths` + * is configured, then only consider the provided paths. If `includeEmpty` + * is configured, then commits without any touched files apply to all + * configured component paths. + */ +class CommitSplit { + constructor(opts) { + opts = opts || {}; + this.includeEmpty = !!opts.includeEmpty; + if (opts.packagePaths) { + const paths = (0, commit_utils_1.normalizePaths)(opts.packagePaths); + this.packagePaths = paths + .filter(path => { + // The special "." path, representing the root of the module, should be + // ignored by commit-split as it is assigned all commits in manifest.ts + return path !== manifest_1.ROOT_PROJECT_PATH; + }) + .sort((a, b) => b.length - a.length); // sort by longest paths first + } + } + /** + * Split commits by component path. If the commit splitter is configured + * with a set of tracked package paths, then only consider paths for + * configured components. If `includeEmpty` is configured, then a commit + * that does not touch any files will be applied to all components' + * commits. + * @param {Commit[]} commits The commits to split + * @returns {Record} Commits indexed by component path + */ + split(commits) { + const splitCommits = {}; + commits.forEach(commit => { + if (commit.files === undefined) { + throw new Error(`Commit ${commit.sha} is missing files. Did you set "backfillFiles" to "true"?`); + } + const dedupe = new Set(); + for (let i = 0; i < commit.files.length; i++) { + const file = commit.files[i]; + // NOTE: GitHub API always returns paths using the `/` separator, + // regardless of what platform the client code is running on + const splitPath = file.split('/'); + // indicates that we have a top-level file and not a folder + // in this edge-case we should not attempt to update the path. + if (splitPath.length === 1) + continue; + let pkgName; + if (this.packagePaths) { + // only track paths under this.packagePaths + pkgName = this.packagePaths.find(p => file.indexOf(`${p}/`) === 0); + } + else { + // track paths by top level folder + pkgName = splitPath[0]; + } + if (!pkgName || dedupe.has(pkgName)) + continue; + else + dedupe.add(pkgName); + if (!splitCommits[pkgName]) + splitCommits[pkgName] = []; + splitCommits[pkgName].push(commit); + } + if (commit.files.length === 0 && this.includeEmpty) { + if (this.packagePaths) { + for (const pkgName of this.packagePaths) { + splitCommits[pkgName] = splitCommits[pkgName] || []; + splitCommits[pkgName].push(commit); + } + } + else { + for (const pkgName in splitCommits) { + splitCommits[pkgName].push(commit); + } + } + } + }); + return splitCommits; + } +} +exports.CommitSplit = CommitSplit; +//# sourceMappingURL=commit-split.js.map + +/***/ }), + +/***/ 81318: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.normalizePaths = void 0; +const normalizePaths = (paths) => { + return paths.map(path => { + // normalize so that all paths have leading and trailing slashes for + // non-overlap validation. + // NOTE: GitHub API always returns paths using the `/` separator, + // regardless of what platform the client code is running on + let newPath = path.replace(/\/$/, ''); + newPath = newPath.replace(/^\//, ''); + newPath = newPath.replace(/$/, '/'); + newPath = newPath.replace(/^/, '/'); + // store them with leading and trailing slashes removed. + newPath = newPath.replace(/\/$/, ''); + newPath = newPath.replace(/^\//, ''); + return newPath; + }); +}; +exports.normalizePaths = normalizePaths; +//# sourceMappingURL=commit-utils.js.map + +/***/ }), + +/***/ 49526: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.filterCommits = void 0; +const BREAKING_CHANGE_NOTE = 'BREAKING CHANGE'; +const DEFAULT_CHANGELOG_SECTIONS = [ + { type: 'feat', section: 'Features' }, + { type: 'fix', section: 'Bug Fixes' }, + { type: 'perf', section: 'Performance Improvements' }, + { type: 'revert', section: 'Reverts' }, + { type: 'chore', section: 'Miscellaneous Chores', hidden: true }, + { type: 'docs', section: 'Documentation', hidden: true }, + { type: 'style', section: 'Styles', hidden: true }, + { type: 'refactor', section: 'Code Refactoring', hidden: true }, + { type: 'test', section: 'Tests', hidden: true }, + { type: 'build', section: 'Build System', hidden: true }, + { type: 'ci', section: 'Continuous Integration', hidden: true }, +]; +/** + * Given a set of conventional commits and the configured + * changelog sections provided by the user, return the set + * of commits that should be displayed: + * + * @param commits + * @param changelogSections + * @returns ConventionalCommit[] + */ +function filterCommits(commits, changelogSections) { + changelogSections = changelogSections !== null && changelogSections !== void 0 ? changelogSections : DEFAULT_CHANGELOG_SECTIONS; + const hiddenSections = []; + const visibleSections = []; + for (const section of changelogSections) { + if (!section.hidden) + visibleSections.push(section.type); + else + hiddenSections.push(section.type); + } + return commits.filter(commit => { + const isBreaking = commit.notes.find(note => { + return note.title === BREAKING_CHANGE_NOTE; + }); + return (visibleSections.includes(commit.type) || + (isBreaking && hiddenSections.includes(commit.type))); + }); +} +exports.filterCommits = filterCommits; +//# sourceMappingURL=filter-commits.js.map + +/***/ }), + +/***/ 81133: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.indentCommit = void 0; +function indentCommit(commit) { + const reduced = []; + let inList = false; + commit.message.split(/\r?\n/).forEach((line, i) => { + if (i !== 0) + line = ` ${line}`; + else + reduced.push(line); + if (/^\s*\*/.test(line)) { + inList = true; + reduced.push(line); + } + else if (/^ +[\w]/.test(line) && inList) { + reduced[reduced.length - 1] = `${reduced[reduced.length - 1]}\n${line}`; + } + else { + inList = false; + } + }); + return reduced.join('\n'); +} +exports.indentCommit = indentCommit; +//# sourceMappingURL=indent-commit.js.map + +/***/ }), + +/***/ 61480: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.jsonStringify = void 0; +const detectIndent = __nccwpck_require__(83084); +function jsonStringify(parsed, content, replacer) { + return `${content.slice(0, content.indexOf('{'))}${JSON.stringify(parsed, replacer, detectIndent(content.trim()).indent)}${content.slice(content.lastIndexOf('}') + 1)}`; +} +exports.jsonStringify = jsonStringify; +//# sourceMappingURL=json-stringify.js.map + +/***/ }), + +/***/ 18792: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setLogger = exports.logger = exports.CheckpointLogger = void 0; +const chalk = __nccwpck_require__(78818); +const figures = __nccwpck_require__(57099); +const errorPrefix = chalk.red(figures.cross); +const warnPrefix = chalk.yellow(figures.warning); +const infoPrefix = chalk.green(figures.tick); +const debugPrefix = chalk.gray(figures.pointer); +const tracePrefix = chalk.dim.gray(figures.pointerSmall); +class CheckpointLogger { + constructor(includeDebug = false, includeTrace = false) { + this.error = (...args) => { + console.error(`${errorPrefix}`, ...args); + }; + this.warn = (...args) => { + console.warn(`${warnPrefix}`, ...args); + }; + this.info = (...args) => { + console.info(`${infoPrefix}`, ...args); + }; + this.debug = (...args) => { + if (this.includeDebug) + console.debug(`${debugPrefix}`, ...args); + }; + this.trace = (...args) => { + if (this.includeTrace) + console.debug(`${tracePrefix}`, ...args); + }; + this.includeDebug = includeDebug; + this.includeTrace = includeTrace; + } +} +exports.CheckpointLogger = CheckpointLogger; +/* eslint-enable @typescript-eslint/no-explicit-any */ +exports.logger = new CheckpointLogger(true); +function setLogger(userLogger) { + exports.logger = userLogger; +} +exports.setLogger = setLogger; +//# sourceMappingURL=logger.js.map + +/***/ }), + +/***/ 11941: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PullRequestBody = void 0; +const logger_1 = __nccwpck_require__(18792); +const node_html_parser_1 = __nccwpck_require__(14363); +const version_1 = __nccwpck_require__(25112); +const DEFAULT_HEADER = ':robot: I have created a release *beep* *boop*'; +const DEFAULT_FOOTER = 'This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please).'; +const NOTES_DELIMITER = '---'; +class PullRequestBody { + constructor(releaseData, options) { + var _a; + this.header = (options === null || options === void 0 ? void 0 : options.header) || DEFAULT_HEADER; + this.footer = (options === null || options === void 0 ? void 0 : options.footer) || DEFAULT_FOOTER; + this.extra = options === null || options === void 0 ? void 0 : options.extra; + this.releaseData = releaseData; + this.useComponents = (_a = options === null || options === void 0 ? void 0 : options.useComponents) !== null && _a !== void 0 ? _a : this.releaseData.length > 1; + } + static parse(body, logger = logger_1.logger) { + const parts = splitBody(body); + if (!parts) { + logger.error('Pull request body did not match'); + return undefined; + } + let data = extractMultipleReleases(parts.content, logger); + let useComponents = true; + if (data.length === 0) { + data = extractSingleRelease(parts.content, logger); + useComponents = false; + if (data.length === 0) { + logger.warn('Failed to parse releases.'); + } + } + return new PullRequestBody(data, { + header: parts.header, + footer: parts.footer, + useComponents, + }); + } + notes() { + if (this.useComponents) { + return this.releaseData + .map(release => { + var _a; + return `
${release.component ? `${release.component}: ` : ''}${(_a = release.version) === null || _a === void 0 ? void 0 : _a.toString()}\n\n${release.notes}\n
`; + }) + .join('\n\n'); + } + return this.releaseData.map(release => release.notes).join('\n\n'); + } + toString() { + const notes = this.notes(); + return `${this.header} +${NOTES_DELIMITER} + + +${notes} + +${NOTES_DELIMITER}${this.extra ? `\n\n${this.extra}\n` : ''} +${this.footer}`; + } +} +exports.PullRequestBody = PullRequestBody; +function splitBody(body) { + const lines = body.trim().replace(/\r\n/g, '\n').split('\n'); + const index = lines.indexOf(NOTES_DELIMITER); + if (index === -1) { + return undefined; + } + let lastIndex = lines.lastIndexOf(NOTES_DELIMITER); + if (lastIndex === index) { + lastIndex = lines.length - 1; + } + const header = lines.slice(0, index).join('\n').trim(); + const content = lines.slice(index + 1, lastIndex).join('\n'); + const footer = lines.slice(lastIndex + 1).join('\n'); + return { + header, + footer, + content, + }; +} +const SUMMARY_PATTERN = /^(?.*[^:]):? (?\d+\.\d+\.\d+.*)$/; +const COMPONENTLESS_SUMMARY_PATTERN = /^(?\d+\.\d+\.\d+.*)$/; +function extractMultipleReleases(notes, logger) { + const data = []; + const root = (0, node_html_parser_1.parse)(notes); + for (const detail of root.getElementsByTagName('details')) { + const summaryNode = detail.getElementsByTagName('summary')[0]; + const summary = summaryNode === null || summaryNode === void 0 ? void 0 : summaryNode.textContent; + const match = summary.match(SUMMARY_PATTERN); + if (match === null || match === void 0 ? void 0 : match.groups) { + detail.removeChild(summaryNode); + const notes = detail.textContent.trim(); + data.push({ + component: match.groups.component, + version: version_1.Version.parse(match.groups.version), + notes, + }); + } + else { + const componentlessMatch = summary.match(COMPONENTLESS_SUMMARY_PATTERN); + if (!(componentlessMatch === null || componentlessMatch === void 0 ? void 0 : componentlessMatch.groups)) { + logger.warn(`Summary: ${summary} did not match the expected pattern`); + continue; + } + detail.removeChild(summaryNode); + const notes = detail.textContent.trim(); + data.push({ + version: version_1.Version.parse(componentlessMatch.groups.version), + notes, + }); + } + } + return data; +} +const COMPARE_REGEX = /^#{2,} \[?(?\d+\.\d+\.\d+[^\]]*)\]?/; +function extractSingleRelease(body, logger) { + var _a; + body = body.trim(); + const match = body.match(COMPARE_REGEX); + const versionString = (_a = match === null || match === void 0 ? void 0 : match.groups) === null || _a === void 0 ? void 0 : _a.version; + if (!versionString) { + logger.warn('Failed to find version in release notes'); + return []; + } + return [ + { + version: version_1.Version.parse(versionString), + notes: body, + }, + ]; +} +//# sourceMappingURL=pull-request-body.js.map + +/***/ }), + +/***/ 58010: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FilePullRequestOverflowHandler = void 0; +const pull_request_body_1 = __nccwpck_require__(11941); +const logger_1 = __nccwpck_require__(18792); +const url_1 = __nccwpck_require__(57310); +const MAX_ISSUE_BODY_SIZE = 65536; +const OVERFLOW_MESSAGE = 'This release is too large to preview in the pull request body. View the full release notes here:'; +const OVERFLOW_MESSAGE_REGEX = new RegExp(`${OVERFLOW_MESSAGE} (?.*)`); +const RELEASE_NOTES_FILENAME = 'release-notes.md'; +const FILE_PATH_REGEX = new RegExp(`blob/(?.*)/${RELEASE_NOTES_FILENAME}`); +/** + * This implementation of PullRequestOverflowHandler stores the full release + * notes on a new git branch. The branch name is derived from the head branch + * name of the release pull request. + */ +class FilePullRequestOverflowHandler { + constructor(github, logger = logger_1.logger) { + this.github = github; + this.logger = logger; + } + /** + * Optionally store the full release notes into `release-notes.md` file + * on a new branch if they do not fit into the body of a pull request. + * + * The new release notes will have a link to the GitHub UI for that file + * which should render the release notes nicely. + * @param {ReleasePullRequest} pullRequest The candidate release pull request + * @returns {string} The new pull request body which contains a link to + * the full content. + */ + async handleOverflow(pullRequest, maxSize = MAX_ISSUE_BODY_SIZE) { + const notes = pullRequest.body.toString(); + if (notes.length > maxSize) { + const notesBranchName = `${pullRequest.headRefName}--release-notes`; + const url = await this.github.createFileOnNewBranch(RELEASE_NOTES_FILENAME, notes, notesBranchName, this.github.repository.defaultBranch); + return `${OVERFLOW_MESSAGE} ${url}`; + } + return notes; + } + /** + * Given a pull request, retrieve the full release notes from the stored + * file if the body was too big to store in the pull request body. + * @param {PullRequest} pullRequest The pull request from GitHub + * @return {PullRequestBody} The parsed pull request body + */ + async parseOverflow(pullRequest) { + var _a, _b; + const match = pullRequest.body.match(OVERFLOW_MESSAGE_REGEX); + if ((_a = match === null || match === void 0 ? void 0 : match.groups) === null || _a === void 0 ? void 0 : _a.url) { + this.logger.info(`Pull request body overflows, parsing full body from: ${match.groups.url}`); + const url = new url_1.URL(match.groups.url); + const pathMatch = url.pathname.match(FILE_PATH_REGEX); + if ((_b = pathMatch === null || pathMatch === void 0 ? void 0 : pathMatch.groups) === null || _b === void 0 ? void 0 : _b.branchName) { + const fileContents = await this.github.getFileContentsOnBranch(RELEASE_NOTES_FILENAME, pathMatch.groups.branchName); + return pull_request_body_1.PullRequestBody.parse(fileContents.parsedContent); + } + this.logger.warn(`Could not parse branch from ${match.groups.url}`); + return pull_request_body_1.PullRequestBody.parse(pullRequest.body, this.logger); + } + return pull_request_body_1.PullRequestBody.parse(pullRequest.body, this.logger); + } +} +exports.FilePullRequestOverflowHandler = FilePullRequestOverflowHandler; +//# sourceMappingURL=pull-request-overflow-handler.js.map + +/***/ }), + +/***/ 28866: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PullRequestTitle = exports.generateMatchPattern = void 0; +const logger_1 = __nccwpck_require__(18792); +const version_1 = __nccwpck_require__(25112); +// cannot import from '..' - transpiled code references to RELEASE_PLEASE +// at the script level are undefined, they are only defined inside function +// or instance methods/properties. +const DEFAULT_PR_TITLE_PATTERN = 'chore${scope}: release${component} ${version}'; +const COMPONENT_NO_SPACE = false; +function generateMatchPattern(pullRequestTitlePattern, componentNoSpace, logger = logger_1.logger) { + if (pullRequestTitlePattern && + pullRequestTitlePattern.search(/\$\{scope\}/) === -1) + logger.warn("pullRequestTitlePattern miss the part of '${scope}'"); + if (pullRequestTitlePattern && + pullRequestTitlePattern.search(/\$\{component\}/) === -1) + logger.warn("pullRequestTitlePattern miss the part of '${component}'"); + if (pullRequestTitlePattern && + pullRequestTitlePattern.search(/\$\{version\}/) === -1) + logger.warn("pullRequestTitlePattern miss the part of '${version}'"); + return new RegExp(`^${(pullRequestTitlePattern || DEFAULT_PR_TITLE_PATTERN) + .replace('[', '\\[') // TODO: handle all regex escaping + .replace(']', '\\]') + .replace('(', '\\(') + .replace(')', '\\)') + .replace('${scope}', '(\\((?[\\w-./]+)\\))?') + .replace('${component}', componentNoSpace === true + ? '?(?@?[\\w-./]*)?' + : ' ?(?@?[\\w-./]*)?') + .replace('${version}', 'v?(?[0-9].*)') + .replace('${branch}', '(?[\\w-./]+)?')}$`); +} +exports.generateMatchPattern = generateMatchPattern; +class PullRequestTitle { + constructor(opts) { + this.version = opts.version; + this.component = opts.component; + this.targetBranch = opts.targetBranch; + this.pullRequestTitlePattern = + opts.pullRequestTitlePattern || DEFAULT_PR_TITLE_PATTERN; + this.componentNoSpace = opts.componentNoSpace || COMPONENT_NO_SPACE; + this.matchPattern = generateMatchPattern(this.pullRequestTitlePattern, this.componentNoSpace, opts.logger); + } + static parse(title, pullRequestTitlePattern, componentNoSpace, logger = logger_1.logger) { + const matchPattern = generateMatchPattern(pullRequestTitlePattern, componentNoSpace, logger); + const match = title.match(matchPattern); + if (match === null || match === void 0 ? void 0 : match.groups) { + return new PullRequestTitle({ + version: match.groups['version'] + ? version_1.Version.parse(match.groups['version']) + : undefined, + component: match.groups['component'], + targetBranch: match.groups['branch'], + pullRequestTitlePattern, + componentNoSpace, + logger, + }); + } + return undefined; + } + static ofComponentVersion(component, version, pullRequestTitlePattern, componentNoSpace) { + return new PullRequestTitle({ + version, + component, + pullRequestTitlePattern, + componentNoSpace, + }); + } + static ofVersion(version, pullRequestTitlePattern, componentNoSpace) { + return new PullRequestTitle({ + version, + pullRequestTitlePattern, + componentNoSpace, + }); + } + static ofTargetBranchVersion(targetBranch, version, pullRequestTitlePattern, componentNoSpace) { + return new PullRequestTitle({ + version, + targetBranch, + pullRequestTitlePattern, + componentNoSpace, + }); + } + static ofComponentTargetBranchVersion(component, targetBranch, version, pullRequestTitlePattern, componentNoSpace) { + return new PullRequestTitle({ + version, + component, + targetBranch, + pullRequestTitlePattern, + componentNoSpace, + }); + } + static ofTargetBranch(targetBranch, pullRequestTitlePattern, componentNoSpace) { + return new PullRequestTitle({ + targetBranch, + pullRequestTitlePattern, + componentNoSpace, + }); + } + getTargetBranch() { + return this.targetBranch; + } + getComponent() { + return this.component; + } + getVersion() { + return this.version; + } + toString() { + var _a; + const scope = this.targetBranch ? `(${this.targetBranch})` : ''; + const component = this.componentNoSpace === true + ? this.component + ? `${this.component}` + : '' + : this.component + ? ` ${this.component}` + : ''; + const version = (_a = this.version) !== null && _a !== void 0 ? _a : ''; + if (this.componentNoSpace === true && !component) { + console.log('`component` is empty. Removing component from title pattern..'); + this.pullRequestTitlePattern = this.pullRequestTitlePattern.replace('${component} ', ''); + } + return this.pullRequestTitlePattern + .replace('${scope}', scope) + .replace('${component}', component) + .replace('${version}', version.toString()) + .replace('${branch}', this.targetBranch || '') + .trim(); + } +} +exports.PullRequestTitle = PullRequestTitle; +//# sourceMappingURL=pull-request-title.js.map + +/***/ }), + +/***/ 74069: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.signoffCommitMessage = void 0; +// cannot import from '..' - transpiled code references to RELEASE_PLEASE +// at the script level are undefined, they are only defined inside function +// or instance methods/properties. +function isValidSignoffUser(signoffUser) { + // Parse the name and email address from a string in the following format + // Display Name + const pattern = /^([^<]+)\s*<([^>]+)>$/i; + // Check we have a match + const isMatch = new RegExp(pattern).test(signoffUser); + return isMatch; +} +function signoffCommitMessage(commitMessage, signoffUser) { + if (!isValidSignoffUser(signoffUser)) { + throw new Error(`The format of '${signoffUser}' is not a valid email address with display name`); + } + return commitMessage + `\n\nSigned-off-by: ${signoffUser}`; +} +exports.signoffCommitMessage = signoffCommitMessage; +//# sourceMappingURL=signoff-commit-message.js.map + +/***/ }), + +/***/ 91203: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TagName = void 0; +const version_1 = __nccwpck_require__(25112); +const TAG_PATTERN = /^((?.*)(?[^a-zA-Z0-9]))?(?v)?(?\d+\.\d+\.\d+.*)$/; +const DEFAULT_SEPARATOR = '-'; +class TagName { + constructor(version, component, separator = DEFAULT_SEPARATOR, includeV = true) { + this.version = version; + this.component = component; + this.separator = separator; + this.includeV = includeV; + } + static parse(tagName) { + const match = tagName.match(TAG_PATTERN); + if (match === null || match === void 0 ? void 0 : match.groups) { + return new TagName(version_1.Version.parse(match.groups.version), match.groups.component, match.groups.separator, !!match.groups.v); + } + return; + } + toString() { + if (this.component) { + return `${this.component}${this.separator}${this.includeV ? 'v' : ''}${this.version.toString()}`; + } + return `${this.includeV ? 'v' : ''}${this.version.toString()}`; + } +} +exports.TagName = TagName; +//# sourceMappingURL=tag-name.js.map + +/***/ }), + +/***/ 42237: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.replaceTomlValue = exports.parseWith = void 0; +const TOMLParser = __nccwpck_require__(68784); +const taggedValueMarker = Symbol('__TAGGED_VALUE'); +/** + * A custom variant of `TOMLParser` that replaces all values with a tagged + * variant that includes their start and end positions, allowing them to be + * replaced. + */ +class TaggedTOMLParser extends TOMLParser { + parseValue() { + // Remember the start position of the value. + // + // Off-by-one correctness: by this point, `this.pos` points one character + // *after* the first character of the value, which is in `this.char` + this.state.__TAGGED_START = this.pos - 1; + return super.parseValue(); + } + next(fn) { + const prevState = this.state; + super.next(fn); // `next` returns void + // Carry over the start position. If it wasn't set, (say, if we were parsing + // something other than a value), we're just assigning `undefined` here. + this.state.__TAGGED_START = prevState.__TAGGED_START; + } + return(value) { + const prevState = this.state; + super.return(value); // `return` returns void + if (prevState.__TAGGED_START && typeof this.state.returned !== 'object') { + // If the parser we just returned from remembered a start position, + // tag the returned value with "start" and "end". + // Note that we don't tag objects to avoid encountering multiple tagged + // values when replacing later on. + const taggedValue = { + [taggedValueMarker]: true, + start: prevState.__TAGGED_START, + end: this.pos, + value: this.state.returned, + }; + this.state.returned = taggedValue; + } + } +} +/** + * Parses input as TOML with the given parser + * @param input A string + * @param parserType The TOML parser to use (might be custom) + */ +function parseWith(input, parserType = TaggedTOMLParser) { + const parser = new parserType(); + parser.parse(input); + return parser.finish(); +} +exports.parseWith = parseWith; +function isTaggedValue(x) { + if (!x) { + return false; + } + if (typeof x !== 'object') { + return false; + } + const ts = x; + return ts[taggedValueMarker] === true; +} +/** + * Given TOML input and a path to a value, attempt to replace + * that value without modifying the formatting. + * @param input A string that's valid TOML + * @param path Path to a value to replace. When replacing 'deps.tokio.version', pass ['deps', 'tokio', 'version']. The value must already exist. + * @param newValue The value to replace the value at `path` with. Is passed through `JSON.stringify()` when replacing: strings will end up being double-quoted strings, properly escaped. Numbers will be numbers. + */ +function replaceTomlValue(input, path, newValue) { + // our pointer into the object "tree", initially points to the root. + let current = parseWith(input, TaggedTOMLParser); + // navigate down the object tree, following the path, expecting only objects. + // Note that tagged strings (generated by `TaggedTOMLParser`) are also objects. + for (let i = 0; i < path.length; i++) { + const key = path[i]; + // // We may encounter tagged values when descending through the object tree + // if (isTaggedValue(current)) { + // if (!current.value || typeof current.value !== 'object') { + // const msg = `partial path does not lead to table: ${path + // .slice(0, i) + // .join('.')}`; + // throw new Error(msg); + // } + // current = current.value as Record; + // } + const next = current[key]; + if (typeof next !== 'object') { + const msg = `path not found in object: ${path.slice(0, i + 1).join('.')}`; + throw new Error(msg); + } + current = next; + } + if (!isTaggedValue(current)) { + const msg = `value at path ${path.join('.')} is not tagged`; + throw new Error(msg); + } + const before = input.slice(0, current.start); + const after = input.slice(current.end); + const output = before + JSON.stringify(newValue) + after; + try { + parseWith(output, TOMLParser); + } + catch (e) { + throw new Error(`After replacing value, result is not valid TOML: ${e}`); + } + return output; +} +exports.replaceTomlValue = replaceTomlValue; +//# sourceMappingURL=toml-edit.js.map + +/***/ }), + +/***/ 25112: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Version = void 0; +const semver = __nccwpck_require__(11383); +const VERSION_REGEX = /(?\d+)\.(?\d+)\.(?\d+)(-(?[^+]+))?(\+(?.*))?/; +/** + * This data class is used to represent a SemVer version. + */ +class Version { + constructor(major, minor, patch, preRelease, build) { + this.major = major; + this.minor = minor; + this.patch = patch; + this.preRelease = preRelease; + this.build = build; + } + /** + * Parse a version string into a data class. + * + * @param {string} versionString the input version string + * @returns {Version} the parsed version + * @throws {Error} if the version string cannot be parsed + */ + static parse(versionString) { + const match = versionString.match(VERSION_REGEX); + if (!(match === null || match === void 0 ? void 0 : match.groups)) { + throw Error(`unable to parse version string: ${versionString}`); + } + const major = Number(match.groups.major); + const minor = Number(match.groups.minor); + const patch = Number(match.groups.patch); + const preRelease = match.groups.preRelease; + const build = match.groups.build; + return new Version(major, minor, patch, preRelease, build); + } + /** + * Comparator to other Versions to be used in sorting. + * + * @param {Version} other The other version to compare to + * @returns {number} -1 if this version is earlier, 0 if the versions + * are the same, or 1 otherwise. + */ + compare(other) { + return semver.compare(this.toString(), other.toString()); + } + /** + * Returns a normalized string version of this version. + * + * @returns {string} + */ + toString() { + const preReleasePart = this.preRelease ? `-${this.preRelease}` : ''; + const buildPart = this.build ? `+${this.build}` : ''; + return `${this.major}.${this.minor}.${this.patch}${preReleasePart}${buildPart}`; + } + get isPreMajor() { + return this.major < 1; + } +} +exports.Version = Version; +//# sourceMappingURL=version.js.map + +/***/ }), + +/***/ 17161: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AlwaysBumpMajor = void 0; +const default_1 = __nccwpck_require__(77033); +const versioning_strategy_1 = __nccwpck_require__(55237); +/** + * This VersioningStrategy always bumps the major version. + */ +class AlwaysBumpMajor extends default_1.DefaultVersioningStrategy { + determineReleaseType(_version, _commits) { + return new versioning_strategy_1.MajorVersionUpdate(); + } +} +exports.AlwaysBumpMajor = AlwaysBumpMajor; +//# sourceMappingURL=always-bump-major.js.map + +/***/ }), + +/***/ 39864: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AlwaysBumpMinor = void 0; +const default_1 = __nccwpck_require__(77033); +const versioning_strategy_1 = __nccwpck_require__(55237); +/** + * This VersioningStrategy always bumps the minor version. + */ +class AlwaysBumpMinor extends default_1.DefaultVersioningStrategy { + determineReleaseType(_version, _commits) { + return new versioning_strategy_1.MinorVersionUpdate(); + } +} +exports.AlwaysBumpMinor = AlwaysBumpMinor; +//# sourceMappingURL=always-bump-minor.js.map + +/***/ }), + +/***/ 42320: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AlwaysBumpPatch = void 0; +const default_1 = __nccwpck_require__(77033); +const versioning_strategy_1 = __nccwpck_require__(55237); +/** + * This VersioningStrategy always bumps the patch version. This + * strategy is useful for backport branches. + */ +class AlwaysBumpPatch extends default_1.DefaultVersioningStrategy { + determineReleaseType(_version, _commits) { + return new versioning_strategy_1.PatchVersionUpdate(); + } +} +exports.AlwaysBumpPatch = AlwaysBumpPatch; +//# sourceMappingURL=always-bump-patch.js.map + +/***/ }), + +/***/ 77033: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DefaultVersioningStrategy = void 0; +const versioning_strategy_1 = __nccwpck_require__(55237); +const version_1 = __nccwpck_require__(25112); +const logger_1 = __nccwpck_require__(18792); +/** + * This is the default VersioningStrategy for release-please. Breaking + * changes should bump the major, features should bump the minor, and other + * significant changes should bump the patch version. + */ +class DefaultVersioningStrategy { + /** + * Create a new DefaultVersioningStrategy + * @param {DefaultVersioningStrategyOptions} options Configuration options + * @param {boolean} options.bumpMinorPreMajor If the current version is less than 1.0.0, + * then bump the minor version for breaking changes + * @param {boolean} options.bumpPatchForMinorPreMajor If the current version is less than + * 1.0.0, then bump the patch version for features + */ + constructor(options = {}) { + var _a; + this.bumpMinorPreMajor = options.bumpMinorPreMajor === true; + this.bumpPatchForMinorPreMajor = options.bumpPatchForMinorPreMajor === true; + this.logger = (_a = options.logger) !== null && _a !== void 0 ? _a : logger_1.logger; + } + /** + * Given the current version of an artifact and a list of commits, + * return a VersionUpdater that knows how to bump the version. + * + * This is useful for chaining together versioning strategies. + * + * @param {Version} version The current version + * @param {ConventionalCommit[]} commits The list of commits to consider + * @returns {VersionUpdater} Updater for bumping the next version. + */ + determineReleaseType(version, commits) { + // iterate through list of commits and find biggest commit type + let breaking = 0; + let features = 0; + for (const commit of commits) { + const releaseAs = commit.notes.find(note => note.title === 'RELEASE AS'); + if (releaseAs) { + // commits are handled newest to oldest, so take the first one (newest) found + this.logger.debug(`found Release-As: ${releaseAs.text}, forcing version`); + return new versioning_strategy_1.CustomVersionUpdate(version_1.Version.parse(releaseAs.text).toString()); + } + if (commit.breaking) { + breaking++; + } + else if (commit.type === 'feat' || commit.type === 'feature') { + features++; + } + } + if (breaking > 0) { + if (version.isPreMajor && this.bumpMinorPreMajor) { + return new versioning_strategy_1.MinorVersionUpdate(); + } + else { + return new versioning_strategy_1.MajorVersionUpdate(); + } + } + else if (features > 0) { + if (version.isPreMajor && this.bumpPatchForMinorPreMajor) { + return new versioning_strategy_1.PatchVersionUpdate(); + } + else { + return new versioning_strategy_1.MinorVersionUpdate(); + } + } + return new versioning_strategy_1.PatchVersionUpdate(); + } + /** + * Given the current version of an artifact and a list of commits, + * return the next version. + * + * @param {Version} version The current version + * @param {ConventionalCommit[]} commits The list of commits to consider + * @returns {Version} The next version + */ + bump(version, commits) { + return this.determineReleaseType(version, commits).bump(version); + } +} +exports.DefaultVersioningStrategy = DefaultVersioningStrategy; +//# sourceMappingURL=default.js.map + +/***/ }), + +/***/ 94636: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DependencyManifest = void 0; +const version_1 = __nccwpck_require__(25112); +const semver = __nccwpck_require__(11383); +const default_1 = __nccwpck_require__(77033); +const versioning_strategy_1 = __nccwpck_require__(55237); +const DEPENDENCY_UPDATE_REGEX = /^deps: update dependency (.*) to (v[^\s]*)(\s\(#\d+\))?$/m; +/** + * This VersioningStrategy looks at `deps` type commits and tries to + * mirror the semantic version bump for that dependency update. For + * example, an update to v2, would be treated as a major version bump. + * + * It also respects the default commit types and will pick the + * greatest version bump. + */ +class DependencyManifest extends default_1.DefaultVersioningStrategy { + determineReleaseType(version, commits) { + const regularBump = super.determineReleaseType(version, commits); + const dependencyUpdates = buildDependencyUpdates(commits); + let breaking = 0; + let features = 0; + for (const dep in dependencyUpdates) { + const version = dependencyUpdates[dep]; + if (version.patch === 0) { + if (version.minor === 0) { + breaking++; + } + else { + features++; + } + } + } + let dependencyBump; + if (breaking > 0) { + if (version.isPreMajor && this.bumpMinorPreMajor) { + dependencyBump = new versioning_strategy_1.MinorVersionUpdate(); + } + else { + dependencyBump = new versioning_strategy_1.MajorVersionUpdate(); + } + } + else if (features > 0) { + if (version.isPreMajor && this.bumpPatchForMinorPreMajor) { + dependencyBump = new versioning_strategy_1.PatchVersionUpdate(); + } + else { + dependencyBump = new versioning_strategy_1.MinorVersionUpdate(); + } + } + else { + dependencyBump = new versioning_strategy_1.PatchVersionUpdate(); + } + if (semver.lte(dependencyBump.bump(version).toString(), regularBump.bump(version).toString())) { + return regularBump; + } + else { + return dependencyBump; + } + } +} +exports.DependencyManifest = DependencyManifest; +function buildDependencyUpdates(commits) { + const versionsMap = {}; + for (const commit of commits) { + const match = commit.message.match(DEPENDENCY_UPDATE_REGEX); + if (!match) + continue; + const versionString = match[2]; + let version; + try { + version = version_1.Version.parse(versionString); + } + catch (_a) { + version = version_1.Version.parse(`${versionString}.0.0`); + } + // commits are sorted by latest first, so if there is a collision, + // then we've already recorded the latest version + if (versionsMap[match[1]]) + continue; + versionsMap[match[1]] = version; + } + return versionsMap; +} +//# sourceMappingURL=dependency-manifest.js.map + +/***/ }), + +/***/ 3277: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.JavaAddSnapshot = void 0; +const version_1 = __nccwpck_require__(25112); +const fakeCommit = { + message: 'fix: fake fix', + type: 'fix', + scope: null, + notes: [], + references: [], + bareMessage: 'fake fix', + breaking: false, + sha: 'abc123', + files: [], +}; +class AddSnapshotVersionUpdate { + constructor(strategy) { + this.strategy = strategy; + } + bump(version) { + const nextPatch = this.strategy.bump(version, [fakeCommit]); + return new version_1.Version(nextPatch.major, nextPatch.minor, nextPatch.patch, nextPatch.preRelease ? `${nextPatch.preRelease}-SNAPSHOT` : 'SNAPSHOT', nextPatch.build); + } +} +/** + * This VersioningStrategy is used by Java releases to bump + * to the next snapshot version. + */ +class JavaAddSnapshot { + constructor(strategy) { + this.strategy = strategy; + } + determineReleaseType(_version, _commits) { + return new AddSnapshotVersionUpdate(this.strategy); + } + bump(version, commits) { + return this.determineReleaseType(version, commits).bump(version); + } +} +exports.JavaAddSnapshot = JavaAddSnapshot; +//# sourceMappingURL=java-add-snapshot.js.map + +/***/ }), + +/***/ 81189: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.JavaSnapshot = void 0; +const version_1 = __nccwpck_require__(25112); +const fakeCommit = { + message: 'fix: fake fix', + type: 'fix', + scope: null, + notes: [], + references: [], + bareMessage: 'fake fix', + breaking: false, + sha: 'abc123', + files: [], +}; +class RemoveSnapshotVersionUpdate { + constructor(parent) { + this.parent = parent; + } + bump(version) { + if (this.parent) { + version = this.parent.bump(version); + } + return new version_1.Version(version.major, version.minor, version.patch, version.preRelease + ? version.preRelease.replace(/-?SNAPSHOT/, '') + : undefined, version.build); + } +} +/** + * This VersioningStrategy is used by Java releases to bump + * to the next non-snapshot version. + */ +class JavaSnapshot { + constructor(strategy) { + this.strategy = strategy; + } + determineReleaseType(version, commits) { + var _a; + const parentBump = this.strategy.determineReleaseType(version, commits); + if ((_a = version.preRelease) === null || _a === void 0 ? void 0 : _a.match(/-?SNAPSHOT/)) { + const patchBumpVersion = this.strategy + .determineReleaseType(version, [fakeCommit]) + .bump(version); + const parentBumpVersion = parentBump.bump(version); + if (patchBumpVersion.toString() === parentBumpVersion.toString()) { + return new RemoveSnapshotVersionUpdate(); + } + return new RemoveSnapshotVersionUpdate(parentBump); + } + return parentBump; + } + bump(version, commits) { + return this.determineReleaseType(version, commits).bump(version); + } +} +exports.JavaSnapshot = JavaSnapshot; +//# sourceMappingURL=java-snapshot.js.map + +/***/ }), + +/***/ 21475: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PrereleaseVersioningStrategy = void 0; +const default_1 = __nccwpck_require__(77033); +const version_1 = __nccwpck_require__(25112); +const versioning_strategy_1 = __nccwpck_require__(55237); +/** + * Regex to match the last set of numbers in a string + * Example: 1.2.3-beta01-01 -> 01 + */ +const PRERELEASE_NUMBER = /(?\d+)(?=\D*$)/; +class AbstractPrereleaseVersionUpdate { + constructor(prereleaseType) { + this.prereleaseType = prereleaseType; + } + /** + * Returns the new bumped prerelease version + * + * That is, if the current version is 1.2.3-beta01, the next prerelease version + * will be 1.2.3-beta02. If no number is found, the prerelease version will be + * 1.2.3-beta. If multiple numbers are found, the last set of numbers will be + * incremented, e.g. 1.2.3-beta01-01 -> 1.2.3-beta01-02. + * + * @param {prerelease} string The current version + * @returns {Version} The bumped version + */ + bumpPrerelease(prerelease) { + const match = prerelease.match(PRERELEASE_NUMBER); + let nextPrerelease = `${prerelease}.1`; + if (match === null || match === void 0 ? void 0 : match.groups) { + const numberLength = match.groups.number.length; + const nextPrereleaseNumber = Number(match.groups.number) + 1; + const paddedNextPrereleaseNumber = `${nextPrereleaseNumber}`.padStart(numberLength, '0'); + nextPrerelease = prerelease.replace(PRERELEASE_NUMBER, paddedNextPrereleaseNumber); + } + return nextPrerelease; + } +} +class PrereleasePatchVersionUpdate extends AbstractPrereleaseVersionUpdate { + /** + * Returns the new bumped version + * + * @param {Version} version The current version + * @returns {Version} The bumped version + */ + bump(version) { + if (version.preRelease) { + const nextPrerelease = this.bumpPrerelease(version.preRelease); + return new version_1.Version(version.major, version.minor, version.patch, nextPrerelease, version.build); + } + return new version_1.Version(version.major, version.minor, version.patch + 1, this.prereleaseType, version.build); + } +} +class PrereleaseMinorVersionUpdate extends AbstractPrereleaseVersionUpdate { + /** + * Returns the new bumped version + * + * @param {Version} version The current version + * @returns {Version} The bumped version + */ + bump(version) { + if (version.preRelease) { + if (version.patch === 0) { + const nextPrerelease = this.bumpPrerelease(version.preRelease); + return new version_1.Version(version.major, version.minor, version.patch, nextPrerelease, version.build); + } + return new versioning_strategy_1.MinorVersionUpdate().bump(version); + } + return new version_1.Version(version.major, version.minor + 1, 0, this.prereleaseType, version.build); + } +} +class PrereleaseMajorVersionUpdate extends AbstractPrereleaseVersionUpdate { + /** + * Returns the new bumped version + * + * @param {Version} version The current version + * @returns {Version} The bumped version + */ + bump(version) { + if (version.preRelease) { + if (version.patch === 0 && version.minor === 0) { + const nextPrerelease = this.bumpPrerelease(version.preRelease); + return new version_1.Version(version.major, version.minor, version.patch, nextPrerelease, version.build); + } + return new versioning_strategy_1.MajorVersionUpdate().bump(version); + } + return new version_1.Version(version.major + 1, 0, 0, this.prereleaseType, version.build); + } +} +/** + * This versioning strategy will increment the pre-release number for patch + * bumps if there is a pre-release number (preserving any leading 0s). + * Example: 1.2.3-beta01 -> 1.2.3-beta02. + */ +class PrereleaseVersioningStrategy extends default_1.DefaultVersioningStrategy { + constructor(options = {}) { + super(options); + this.prereleaseType = options.prereleaseType; + } + determineReleaseType(version, commits) { + // iterate through list of commits and find biggest commit type + let breaking = 0; + let features = 0; + for (const commit of commits) { + const releaseAs = commit.notes.find(note => note.title === 'RELEASE AS'); + if (releaseAs) { + // commits are handled newest to oldest, so take the first one (newest) found + this.logger.debug(`found Release-As: ${releaseAs.text}, forcing version`); + return new versioning_strategy_1.CustomVersionUpdate(version_1.Version.parse(releaseAs.text).toString()); + } + if (commit.breaking) { + breaking++; + } + else if (commit.type === 'feat' || commit.type === 'feature') { + features++; + } + } + if (breaking > 0) { + if (version.isPreMajor && this.bumpMinorPreMajor) { + return new PrereleaseMinorVersionUpdate(this.prereleaseType); + } + else { + return new PrereleaseMajorVersionUpdate(this.prereleaseType); + } + } + else if (features > 0) { + if (version.isPreMajor && this.bumpPatchForMinorPreMajor) { + return new PrereleasePatchVersionUpdate(this.prereleaseType); + } + else { + return new PrereleaseMinorVersionUpdate(this.prereleaseType); + } + } + return new PrereleasePatchVersionUpdate(this.prereleaseType); + } +} +exports.PrereleaseVersioningStrategy = PrereleaseVersioningStrategy; +//# sourceMappingURL=prerelease.js.map + +/***/ }), + +/***/ 23897: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ServicePackVersioningStrategy = void 0; +const version_1 = __nccwpck_require__(25112); +const default_1 = __nccwpck_require__(77033); +const SERVICE_PACK_PATTERN = /sp\.(\d+)/; +/** + * This version updater knows how to bump from a non-service pack + * version to a service pack version and increment the service + * pack number in subsequent releases. + */ +class ServicePackVersionUpdate { + bump(version) { + var _a; + const match = (_a = version.preRelease) === null || _a === void 0 ? void 0 : _a.match(SERVICE_PACK_PATTERN); + if (match) { + const spNumber = Number(match[1]); + return new version_1.Version(version.major, version.minor, version.patch, `sp.${spNumber + 1}`, version.build); + } + return new version_1.Version(version.major, version.minor, version.patch, 'sp.1', version.build); + } +} +/** + * This VersioningStrategy is used for "service pack" versioning. In this + * strategy, we use the pre-release field with a pattern of `sp-\d+` where + * the number is an auto-incrementing integer starting with 1. + */ +class ServicePackVersioningStrategy extends default_1.DefaultVersioningStrategy { + determineReleaseType(_version, _commits) { + return new ServicePackVersionUpdate(); + } +} +exports.ServicePackVersioningStrategy = ServicePackVersioningStrategy; +//# sourceMappingURL=service-pack.js.map + +/***/ }), + +/***/ 55237: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CustomVersionUpdate = exports.PatchVersionUpdate = exports.MinorVersionUpdate = exports.MajorVersionUpdate = void 0; +const version_1 = __nccwpck_require__(25112); +/** + * This VersionUpdater performs a SemVer major version bump. + */ +class MajorVersionUpdate { + /** + * Returns the new bumped version + * + * @param {Version} version The current version + * @returns {Version} The bumped version + */ + bump(version) { + return new version_1.Version(version.major + 1, 0, 0, version.preRelease, version.build); + } +} +exports.MajorVersionUpdate = MajorVersionUpdate; +/** + * This VersionUpdater performs a SemVer minor version bump. + */ +class MinorVersionUpdate { + /** + * Returns the new bumped version + * + * @param {Version} version The current version + * @returns {Version} The bumped version + */ + bump(version) { + return new version_1.Version(version.major, version.minor + 1, 0, version.preRelease, version.build); + } +} +exports.MinorVersionUpdate = MinorVersionUpdate; +/** + * This VersionUpdater performs a SemVer patch version bump. + */ +class PatchVersionUpdate { + /** + * Returns the new bumped version + * + * @param {Version} version The current version + * @returns {Version} The bumped version + */ + bump(version) { + return new version_1.Version(version.major, version.minor, version.patch + 1, version.preRelease, version.build); + } +} +exports.PatchVersionUpdate = PatchVersionUpdate; +/** + * This VersionUpdater sets the version to a specific version. + */ +class CustomVersionUpdate { + constructor(versionString) { + this.versionString = versionString; + } + /** + * Returns the new bumped version. This version is specified + * at initialization. + * + * @param {Version} version The current version + * @returns {Version} The bumped version + */ + bump(_version) { + return version_1.Version.parse(this.versionString); + } +} +exports.CustomVersionUpdate = CustomVersionUpdate; +//# sourceMappingURL=versioning-strategy.js.map + /***/ }), /***/ 40334: @@ -16942,6 +32188,336 @@ module.exports = function(val) { }; +/***/ }), + +/***/ 41542: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +module.exports = __nccwpck_require__(80785); + + +/***/ }), + +/***/ 80785: +/***/ ((module) => { + +"use strict"; + + +var AsyncLock = function (opts) { + opts = opts || {}; + + this.Promise = opts.Promise || Promise; + + // format: {key : [fn, fn]} + // queues[key] = null indicates no job running for key + this.queues = Object.create(null); + + // lock is reentrant for same domain + this.domainReentrant = opts.domainReentrant || false; + if (this.domainReentrant) { + if (typeof process === 'undefined' || typeof process.domain === 'undefined') { + throw new Error( + 'Domain-reentrant locks require `process.domain` to exist. Please flip `opts.domainReentrant = false`, ' + + 'use a NodeJS version that still implements Domain, or install a browser polyfill.'); + } + // domain of current running func {key : fn} + this.domains = Object.create(null); + } + + this.timeout = opts.timeout || AsyncLock.DEFAULT_TIMEOUT; + this.maxOccupationTime = opts.maxOccupationTime || AsyncLock.DEFAULT_MAX_OCCUPATION_TIME; + this.maxExecutionTime = opts.maxExecutionTime || AsyncLock.DEFAULT_MAX_EXECUTION_TIME; + if (opts.maxPending === Infinity || (Number.isInteger(opts.maxPending) && opts.maxPending >= 0)) { + this.maxPending = opts.maxPending; + } else { + this.maxPending = AsyncLock.DEFAULT_MAX_PENDING; + } +}; + +AsyncLock.DEFAULT_TIMEOUT = 0; //Never +AsyncLock.DEFAULT_MAX_OCCUPATION_TIME = 0; //Never +AsyncLock.DEFAULT_MAX_EXECUTION_TIME = 0; //Never +AsyncLock.DEFAULT_MAX_PENDING = 1000; + +/** + * Acquire Locks + * + * @param {String|Array} key resource key or keys to lock + * @param {function} fn async function + * @param {function} cb callback function, otherwise will return a promise + * @param {Object} opts options + */ +AsyncLock.prototype.acquire = function (key, fn, cb, opts) { + if (Array.isArray(key)) { + return this._acquireBatch(key, fn, cb, opts); + } + + if (typeof (fn) !== 'function') { + throw new Error('You must pass a function to execute'); + } + + // faux-deferred promise using new Promise() (as Promise.defer is deprecated) + var deferredResolve = null; + var deferredReject = null; + var deferred = null; + + if (typeof (cb) !== 'function') { + opts = cb; + cb = null; + + // will return a promise + deferred = new this.Promise(function(resolve, reject) { + deferredResolve = resolve; + deferredReject = reject; + }); + } + + opts = opts || {}; + + var resolved = false; + var timer = null; + var occupationTimer = null; + var executionTimer = null; + var self = this; + + var done = function (locked, err, ret) { + + if (occupationTimer) { + clearTimeout(occupationTimer); + occupationTimer = null; + } + + if (executionTimer) { + clearTimeout(executionTimer); + executionTimer = null; + } + + if (locked) { + if (!!self.queues[key] && self.queues[key].length === 0) { + delete self.queues[key]; + } + if (self.domainReentrant) { + delete self.domains[key]; + } + } + + if (!resolved) { + if (!deferred) { + if (typeof (cb) === 'function') { + cb(err, ret); + } + } + else { + //promise mode + if (err) { + deferredReject(err); + } + else { + deferredResolve(ret); + } + } + resolved = true; + } + + if (locked) { + //run next func + if (!!self.queues[key] && self.queues[key].length > 0) { + self.queues[key].shift()(); + } + } + }; + + var exec = function (locked) { + if (resolved) { // may due to timed out + return done(locked); + } + + if (timer) { + clearTimeout(timer); + timer = null; + } + + if (self.domainReentrant && locked) { + self.domains[key] = process.domain; + } + + var maxExecutionTime = opts.maxExecutionTime || self.maxExecutionTime; + if (maxExecutionTime) { + executionTimer = setTimeout(function () { + if (!!self.queues[key]) { + done(locked, new Error('Maximum execution time is exceeded ' + key)); + } + }, maxExecutionTime); + } + + // Callback mode + if (fn.length === 1) { + var called = false; + try { + fn(function (err, ret) { + if (!called) { + called = true; + done(locked, err, ret); + } + }); + } catch (err) { + // catching error thrown in user function fn + if (!called) { + called = true; + done(locked, err); + } + } + } + else { + // Promise mode + self._promiseTry(function () { + return fn(); + }) + .then(function(ret){ + done(locked, undefined, ret); + }, function(error){ + done(locked, error); + }); + } + }; + + if (self.domainReentrant && !!process.domain) { + exec = process.domain.bind(exec); + } + + var maxPending = opts.maxPending || self.maxPending; + + if (!self.queues[key]) { + self.queues[key] = []; + exec(true); + } + else if (self.domainReentrant && !!process.domain && process.domain === self.domains[key]) { + // If code is in the same domain of current running task, run it directly + // Since lock is re-enterable + exec(false); + } + else if (self.queues[key].length >= maxPending) { + done(false, new Error('Too many pending tasks in queue ' + key)); + } + else { + var taskFn = function () { + exec(true); + }; + if (opts.skipQueue) { + self.queues[key].unshift(taskFn); + } else { + self.queues[key].push(taskFn); + } + + var timeout = opts.timeout || self.timeout; + if (timeout) { + timer = setTimeout(function () { + timer = null; + done(false, new Error('async-lock timed out in queue ' + key)); + }, timeout); + } + } + + var maxOccupationTime = opts.maxOccupationTime || self.maxOccupationTime; + if (maxOccupationTime) { + occupationTimer = setTimeout(function () { + if (!!self.queues[key]) { + done(false, new Error('Maximum occupation time is exceeded in queue ' + key)); + } + }, maxOccupationTime); + } + + if (deferred) { + return deferred; + } +}; + +/* + * Below is how this function works: + * + * Equivalent code: + * self.acquire(key1, function(cb){ + * self.acquire(key2, function(cb){ + * self.acquire(key3, fn, cb); + * }, cb); + * }, cb); + * + * Equivalent code: + * var fn3 = getFn(key3, fn); + * var fn2 = getFn(key2, fn3); + * var fn1 = getFn(key1, fn2); + * fn1(cb); + */ +AsyncLock.prototype._acquireBatch = function (keys, fn, cb, opts) { + if (typeof (cb) !== 'function') { + opts = cb; + cb = null; + } + + var self = this; + var getFn = function (key, fn) { + return function (cb) { + self.acquire(key, fn, cb, opts); + }; + }; + + var fnx = keys.reduceRight(function (prev, key) { + return getFn(key, prev); + }, fn); + + if (typeof (cb) === 'function') { + fnx(cb); + } + else { + return new this.Promise(function (resolve, reject) { + // check for promise mode in case keys is empty array + if (fnx.length === 1) { + fnx(function (err, ret) { + if (err) { + reject(err); + } + else { + resolve(ret); + } + }); + } else { + resolve(fnx()); + } + }); + } +}; + +/* + * Whether there is any running or pending asyncFunc + * + * @param {String} key + */ +AsyncLock.prototype.isBusy = function (key) { + if (!key) { + return Object.keys(this.queues).length > 0; + } + else { + return !!this.queues[key]; + } +}; + +/** + * Promise.try() implementation to become independent of Q-specific methods + */ +AsyncLock.prototype._promiseTry = function(fn) { + try { + return this.Promise.resolve(fn()); + } catch (e) { + return this.Promise.reject(e); + } +}; + +module.exports = AsyncLock; + + /***/ }), /***/ 33415: @@ -17703,1631 +33279,43 @@ module.exports = { /***/ }), -/***/ 49988: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 43268: +/***/ ((module) => { "use strict"; -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getDiffString = exports.getChanges = exports.parseChanges = exports.getAllDiffs = exports.getGitFileData = exports.findRepoRoot = exports.resolvePath = void 0; -const child_process_1 = __nccwpck_require__(32081); -const types_1 = __nccwpck_require__(13984); -const logger_1 = __nccwpck_require__(58192); -const fs_1 = __nccwpck_require__(57147); -const path = __nccwpck_require__(71017); -class InstallationError extends Error { - constructor(message) { - super(message); - this.name = 'InstallationError'; - } + +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string } -/** - * Get the absolute path of a relative path - * @param {string} dir the wildcard directory containing git change, not necessarily the root git directory - * @returns {string} the absolute path relative to the path that the user executed the bash command in - */ -function resolvePath(dir) { - const absoluteDir = path.resolve(process.cwd(), dir); - return absoluteDir; + +function replaceAll(str, search, replacement) { + search = search instanceof RegExp ? search : new RegExp(escapeRegExp(search), 'g'); + + return str.replace(search, replacement); } -exports.resolvePath = resolvePath; -/** - * Get the git root directory. - * Errors if the directory provided is not a git directory. - * @param {string} dir an absolute directory - * @returns {string} the absolute path of the git directory root - */ -function findRepoRoot(dir) { - try { - return (0, child_process_1.execSync)('git rev-parse --show-toplevel', { cwd: dir }) - .toString() - .trimRight(); // remove the trailing \n + +var CleanGitRef = { + clean: function clean(value) { + if (typeof value !== 'string') { + throw new Error('Expected a string, received: ' + value); } - catch (err) { - logger_1.logger.error(`The directory provided is not a git directory: ${dir}`); - throw err; - } -} -exports.findRepoRoot = findRepoRoot; -/** - * Returns the git diff old/new mode, status, and path. Given a git diff. - * Errors if there is a parsing error - * @param {string} gitDiffPattern A single file diff. Renames and copies are broken up into separate diffs. See https://git-scm.com/docs/git-diff#Documentation/git-diff.txt-git-diff-filesltpatterngt82308203 for more details - * @returns indexable git diff fields: old/new mode, status, and path - */ -function parseGitDiff(gitDiffPattern) { - try { - const fields = gitDiffPattern.split(' '); - const newMode = fields[1]; - const oldMode = fields[0].substring(1); - const statusAndPath = fields[4].split('\t'); - const status = statusAndPath[0]; - const relativePath = statusAndPath[1]; - return { oldMode, newMode, status, relativePath }; - } - catch (err) { - logger_1.logger.warn(`\`git diff --raw\` may have changed formats: \n ${gitDiffPattern}`); - throw err; - } -} -/** - * Get the GitHub mode, file content, and relative path asynchronously - * Rejects if there is a git diff error, or if the file contents could not be loaded. - * @param {string} gitRootDir the root of the local GitHub repository - * @param {string} gitDiffPattern A single file diff. Renames and copies are broken up into separate diffs. See https://git-scm.com/docs/git-diff#Documentation/git-diff.txt-git-diff-filesltpatterngt82308203 for more details - * @returns {Promise} the current mode, the relative path of the file in the Git Repository, and the file status. - */ -function getGitFileData(gitRootDir, gitDiffPattern) { - return new Promise((resolve, reject) => { - try { - const { oldMode, newMode, status, relativePath } = parseGitDiff(gitDiffPattern); - // if file is deleted, do not attempt to read it - if (status === 'D') { - resolve({ path: relativePath, fileData: new types_1.FileData(null, oldMode) }); - } - else { - // else read the file - (0, fs_1.readFile)(gitRootDir + '/' + relativePath, { - encoding: 'utf-8', - }, (err, content) => { - if (err) { - logger_1.logger.error(`Error loading file ${relativePath} in git directory ${gitRootDir}`); - reject(err); - } - resolve({ - path: relativePath, - fileData: new types_1.FileData(content, newMode), - }); - }); - } - } - catch (err) { - reject(err); - } - }); -} -exports.getGitFileData = getGitFileData; -/** - * Get all the diffs using `git diff` of a git directory. - * Errors if the git directory provided is not a git directory. - * @param {string} gitRootDir a git directory - * @returns {string[]} a list of git diffs - */ -function getAllDiffs(gitRootDir) { - (0, child_process_1.execSync)('git add -A', { cwd: gitRootDir }); - const diffs = (0, child_process_1.execSync)('git diff --raw --staged --no-renames', { - cwd: gitRootDir, - }) - .toString() // strictly return buffer for mocking purposes. sinon ts doesn't infer {encoding: 'utf-8'} - .trimRight() // remove the trailing new line - .split('\n') - .filter(line => !!line.trim()); - (0, child_process_1.execSync)('git reset .', { cwd: gitRootDir }); - return diffs; -} -exports.getAllDiffs = getAllDiffs; -/** - * Get the git changes of the current project asynchronously. - * Rejects if any of the files fails to load (if not deleted), - * or if there is a git diff parse error - * @param {string[]} diffs the git diff raw output (which only shows relative paths) - * @param {string} gitDir the root of the local GitHub repository - * @returns {Promise} the changeset - */ -async function parseChanges(diffs, gitDir) { - try { - // get updated file contents - const changes = new Map(); - const changePromises = []; - for (let i = 0; i < diffs.length; i++) { - // TODO - handle memory constraint - changePromises.push(getGitFileData(gitDir, diffs[i])); - } - const gitFileDatas = await Promise.all(changePromises); - for (let i = 0; i < gitFileDatas.length; i++) { - changes.set(gitFileDatas[i].path, gitFileDatas[i].fileData); - } - return changes; - } - catch (err) { - logger_1.logger.error('Error parsing git changes'); - throw err; - } -} -exports.parseChanges = parseChanges; -/** - * Throws an error if git is not installed - * @returns {void} void if git is installed - */ -function validateGitInstalled() { - try { - (0, child_process_1.execSync)('git --version'); - } - catch (err) { - logger_1.logger.error('git not installed'); - throw new InstallationError('git command is not recognized. Make sure git is installed.'); - } -} -/** - * Load the change set asynchronously. - * @param dir the directory containing git changes - * @returns {Promise} the change set - */ -function getChanges(dir) { - try { - validateGitInstalled(); - const absoluteDir = resolvePath(dir); - const gitRootDir = findRepoRoot(absoluteDir); - const diffs = getAllDiffs(gitRootDir); - return parseChanges(diffs, gitRootDir); - } - catch (err) { - if (!(err instanceof InstallationError)) { - logger_1.logger.error('Error loadng git changes.'); - } - throw err; - } -} -exports.getChanges = getChanges; -/** - * Get the git changes of the current project asynchronously. - * Rejects if any of the files fails to load (if not deleted), - * or if there is a git diff parse error - * @param {string[]} diffs the git diff raw output (which only shows relative paths) - * @param {string} gitDir the root of the local GitHub repository - * @returns {string} the diff - */ -function getDiffString(dir) { - try { - validateGitInstalled(); - const absoluteDir = resolvePath(dir); - const gitRootDir = findRepoRoot(absoluteDir); - (0, child_process_1.execSync)('git add -A', { cwd: gitRootDir }); - const diff = (0, child_process_1.execSync)('git diff --staged --no-renames', { - cwd: gitRootDir, - }) - .toString() // strictly return buffer for mocking purposes. sinon ts doesn't infer {encoding: 'utf-8'} - .trimRight(); // remove the trailing new line - (0, child_process_1.execSync)('git reset .', { cwd: gitRootDir }); - return diff; - } - catch (err) { - if (!(err instanceof InstallationError)) { - logger_1.logger.error('Error loadng git changes.'); - } - throw err; - } -} -exports.getDiffString = getDiffString; -//# sourceMappingURL=handle-git-dir-change.js.map -/***/ }), + value = replaceAll(value, './', '/'); + value = replaceAll(value, '..', '.'); + value = replaceAll(value, ' ', '-'); + value = replaceAll(value, /^[~^:?*\\\-]/g, ''); + value = replaceAll(value, /[~^:?*\\]/g, '-'); + value = replaceAll(value, /[~^:?*\\\-]$/g, ''); + value = replaceAll(value, '@{', '-'); + value = replaceAll(value, /\.$/g, ''); + value = replaceAll(value, /\/$/g, ''); + value = replaceAll(value, /\.lock$/g, ''); + return value; + } +}; -/***/ 9558: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.addReviewCommentsDefaults = exports.addPullRequestDefaults = void 0; -const DEFAULT_BRANCH_NAME = 'code-suggestions'; -const DEFAULT_PRIMARY_BRANCH = 'main'; -const DEFAULT_PAGE_SIZE = 100; -/** - * Add defaults to GitHub Pull Request options. - * Preserves the empty string. - * For ESCMAScript, null/undefined values are preserved for required fields. - * Recommended with an object validation function to check empty strings and incorrect types. - * @param {PullRequestUserOptions} options the user-provided github pull request options - * @returns {CreatePullRequest} git hub context with defaults applied - */ -function addPullRequestDefaults(options) { - const pullRequestSettings = { - upstreamOwner: options.upstreamOwner, - upstreamRepo: options.upstreamRepo, - description: options.description, - title: options.title, - message: options.message, - force: options.force || false, - branch: typeof options.branch === 'string' ? options.branch : DEFAULT_BRANCH_NAME, - primary: typeof options.primary === 'string' - ? options.primary - : DEFAULT_PRIMARY_BRANCH, - maintainersCanModify: options.maintainersCanModify === false ? false : true, - filesPerCommit: options.filesPerCommit, - }; - return pullRequestSettings; -} -exports.addPullRequestDefaults = addPullRequestDefaults; -/** - * Format user input for pull request review comments - * @param options The user's options input for review comments - * @returns the formatted version of user input for pull request review comments - */ -function addReviewCommentsDefaults(options) { - const createReviewComment = { - repo: options.repo, - owner: options.owner, - pullNumber: options.pullNumber, - // if zero set as 0 - pageSize: options.pageSize === null || options.pageSize === undefined - ? DEFAULT_PAGE_SIZE - : options.pageSize, - }; - return createReviewComment; -} -exports.addReviewCommentsDefaults = addReviewCommentsDefaults; -//# sourceMappingURL=default-options-handler.js.map - -/***/ }), - -/***/ 15313: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.CommitError = void 0; -class CommitError extends Error { - constructor(message, cause) { - super(message); - this.cause = cause; - } -} -exports.CommitError = CommitError; -//# sourceMappingURL=errors.js.map - -/***/ }), - -/***/ 58021: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.branch = exports.createBranch = exports.existsBranchWithName = exports.getBranchHead = exports.createRef = void 0; -const logger_1 = __nccwpck_require__(58192); -const REF_PREFIX = 'refs/heads/'; -const DEFAULT_PRIMARY_BRANCH = 'main'; -/** - * Create a new branch reference with the ref prefix - * @param {string} branchName name of the branch - */ -function createRef(branchName) { - return REF_PREFIX + branchName; -} -exports.createRef = createRef; -/** - * get branch commit HEAD SHA of a repository - * Throws an error if the branch cannot be found - * @param {Octokit} octokit The authenticated octokit instance - * @param {RepoDomain} origin The domain information of the remote origin repository - * @param {string} branch the name of the branch - * @returns {Promise} branch commit HEAD SHA - */ -async function getBranchHead(octokit, origin, branch) { - const branchData = (await octokit.repos.getBranch({ - owner: origin.owner, - repo: origin.repo, - branch, - })).data; - logger_1.logger.info(`Successfully found branch HEAD sha "${branchData.commit.sha}".`); - return branchData.commit.sha; -} -exports.getBranchHead = getBranchHead; -/** - * Determine if there is a branch with the provided name in the remote GitHub repository - * @param {Octokit} octokit The authenticated octokit instance - * @param {RepoDomain} remote The domain information of the remote repository - * @param {string} name The branch name to create on the repository - * @returns {Promise} if there is a branch already existing in the remote GitHub repository - */ -async function existsBranchWithName(octokit, remote, name) { - try { - const data = (await octokit.git.getRef({ - owner: remote.owner, - repo: remote.repo, - ref: `heads/${name}`, - })).data; - return data.ref ? true : false; - } - catch (err) { - if (err.status === 404) - return false; - else - throw err; - } -} -exports.existsBranchWithName = existsBranchWithName; -/** - * Create a branch on the remote repository if there is not an existing branch - * @param {Octokit} octokit The authenticated octokit instance - * @param {RepoDomain} remote The domain information of the remote origin repository - * @param {string} name The branch name to create on the origin repository - * @param {string} baseSha the sha that the base of the reference points to - * @param {boolean} duplicate whether there is an existing branch or not - * @returns {Promise} - */ -async function createBranch(octokit, remote, name, baseSha, duplicate) { - if (!duplicate) { - const refData = (await octokit.git.createRef({ - owner: remote.owner, - repo: remote.repo, - ref: createRef(name), - sha: baseSha, - })).data; - logger_1.logger.info(`Successfully created branch at ${refData.url}`); - } - else { - logger_1.logger.info('Skipping branch creation step...'); - } -} -exports.createBranch = createBranch; -/** - * Create a GitHub branch given a remote origin. - * Throws an exception if octokit fails, or if the base branch is invalid - * @param {Octokit} octokit The authenticated octokit instance - * @param {RepoDomain} origin The domain information of the remote origin repository - * @param {RepoDomain} upstream The domain information of the remote upstream repository - * @param {string} name The branch name to create on the origin repository - * @param {string} baseBranch the name of the branch to base the new branch off of. Default is main - * @returns {Promise} the base SHA for subsequent commits to be based off for the origin branch - */ -async function branch(octokit, origin, upstream, name, baseBranch = DEFAULT_PRIMARY_BRANCH) { - // create branch from primary branch HEAD SHA - try { - const baseSha = await getBranchHead(octokit, upstream, baseBranch); - const duplicate = await existsBranchWithName(octokit, origin, name); - await createBranch(octokit, origin, name, baseSha, duplicate); - return baseSha; - } - catch (err) { - logger_1.logger.error('Error when creating branch'); - throw err; - } -} -exports.branch = branch; -//# sourceMappingURL=branch.js.map - -/***/ }), - -/***/ 4424: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.commitAndPush = exports.updateRef = exports.createTree = exports.generateTreeObjects = void 0; -const logger_1 = __nccwpck_require__(58192); -const create_commit_1 = __nccwpck_require__(93018); -const errors_1 = __nccwpck_require__(15313); -const DEFAULT_FILES_PER_COMMIT = 100; -/** - * Generate and return a GitHub tree object structure - * containing the target change data - * See https://developer.github.com/v3/git/trees/#tree-object - * @param {Changes} changes the set of repository changes - * @returns {TreeObject[]} The new GitHub changes - */ -function generateTreeObjects(changes) { - const tree = []; - changes.forEach((fileData, path) => { - if (fileData.content === null) { - // if no file content then file is deleted - tree.push({ - path, - mode: fileData.mode, - type: 'blob', - sha: null, - }); - } - else { - // update file with its content - tree.push({ - path, - mode: fileData.mode, - type: 'blob', - content: fileData.content, - }); - } - }); - return tree; -} -exports.generateTreeObjects = generateTreeObjects; -function* inGroupsOf(all, groupSize) { - for (let i = 0; i < all.length; i += groupSize) { - yield all.slice(i, i + groupSize); - } -} -/** - * Upload and create a remote GitHub tree - * and resolves with the new tree SHA. - * Rejects if GitHub V3 API fails with the GitHub error response - * @param {Octokit} octokit The authenticated octokit instance - * @param {RepoDomain} origin the the remote repository to push changes to - * @param {string} refHead the base of the new commit(s) - * @param {TreeObject[]} tree the set of GitHub changes to upload - * @returns {Promise} the GitHub tree SHA - * @throws {CommitError} - */ -async function createTree(octokit, origin, refHead, tree) { - const oldTreeSha = (await octokit.git.getCommit({ - owner: origin.owner, - repo: origin.repo, - commit_sha: refHead, - })).data.tree.sha; - logger_1.logger.info('Got the latest commit tree'); - try { - const treeSha = (await octokit.git.createTree({ - owner: origin.owner, - repo: origin.repo, - tree, - base_tree: oldTreeSha, - })).data.sha; - logger_1.logger.info(`Successfully created a tree with the desired changes with SHA ${treeSha}`); - return treeSha; - } - catch (e) { - throw new errors_1.CommitError(`Error adding to tree: ${refHead}`, e); - } -} -exports.createTree = createTree; -/** - * Update a reference to a SHA - * Rejects if GitHub V3 API fails with the GitHub error response - * @param {Octokit} octokit The authenticated octokit instance - * @param {BranchDomain} origin the the remote branch to push changes to - * @param {string} newSha the ref to update the commit HEAD to - * @param {boolean} force to force the commit changes given refHead - * @returns {Promise} - */ -async function updateRef(octokit, origin, newSha, force) { - logger_1.logger.info(`Updating reference heads/${origin.branch} to ${newSha}`); - try { - await octokit.git.updateRef({ - owner: origin.owner, - repo: origin.repo, - ref: `heads/${origin.branch}`, - sha: newSha, - force, - }); - logger_1.logger.info(`Successfully updated reference ${origin.branch} to ${newSha}`); - } - catch (e) { - throw new errors_1.CommitError(`Error updating ref heads/${origin.branch} to ${newSha}`, e); - } -} -exports.updateRef = updateRef; -/** - * Given a set of changes, apply the commit(s) on top of the given branch's head and upload it to GitHub - * Rejects if GitHub V3 API fails with the GitHub error response - * @param {Octokit} octokit The authenticated octokit instance - * @param {string} refHead the base of the new commit(s) - * @param {Changes} changes the set of repository changes - * @param {RepoDomain} origin the the remote repository to push changes to - * @param {string} originBranchName the remote branch that will contain the new changes - * @param {string} commitMessage the message of the new commit - * @param {boolean} force to force the commit changes given refHead - * @returns {Promise} - * @throws {CommitError} - */ -async function commitAndPush(octokit, refHead, changes, originBranch, commitMessage, force, options) { - var _a; - const filesPerCommit = (_a = options === null || options === void 0 ? void 0 : options.filesPerCommit) !== null && _a !== void 0 ? _a : DEFAULT_FILES_PER_COMMIT; - const tree = generateTreeObjects(changes); - for (const treeGroup of inGroupsOf(tree, filesPerCommit)) { - const treeSha = await createTree(octokit, originBranch, refHead, treeGroup); - refHead = await (0, create_commit_1.createCommit)(octokit, originBranch, refHead, treeSha, commitMessage, options); - } - await updateRef(octokit, originBranch, refHead, force); -} -exports.commitAndPush = commitAndPush; -//# sourceMappingURL=commit-and-push.js.map - -/***/ }), - -/***/ 93018: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.createCommit = void 0; -const logger_1 = __nccwpck_require__(58192); -const errors_1 = __nccwpck_require__(15313); -/** - * Create a commit with a repo snapshot SHA on top of the reference HEAD - * and resolves with the SHA of the commit. - * Rejects if GitHub V3 API fails with the GitHub error response - * @param {Octokit} octokit The authenticated octokit instance - * @param {RepoDomain} origin the the remote repository to push changes to - * @param {string} refHead the base of the new commit(s) - * @param {string} treeSha the tree SHA that this commit will point to - * @param {string} message the message of the new commit - * @returns {Promise} the new commit SHA - * @see https://docs.github.com/en/rest/git/commits?apiVersion=2022-11-28#create-a-commit - */ -async function createCommit(octokit, origin, refHead, treeSha, message, options = {}) { - try { - const signature = options.signer - ? await options.signer.generateSignature({ - message, - tree: treeSha, - parents: [refHead], - author: options.author, - committer: options.committer, - }) - : undefined; - const { data: { sha, url }, } = await octokit.git.createCommit({ - owner: origin.owner, - repo: origin.repo, - message, - tree: treeSha, - parents: [refHead], - signature, - author: options.author, - committer: options.committer, - }); - logger_1.logger.info(`Successfully created commit. See commit at ${url}`); - return sha; - } - catch (e) { - throw new errors_1.CommitError(`Error creating commit for: ${treeSha}`, e); - } -} -exports.createCommit = createCommit; -//# sourceMappingURL=create-commit.js.map - -/***/ }), - -/***/ 67386: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.fork = void 0; -const logger_1 = __nccwpck_require__(58192); -/** - * Fork the GitHub owner's repository. - * Returns the fork owner and fork repo when the fork creation request to GitHub succeeds. - * Otherwise throws error. - * - * If fork already exists no new fork is created, no error occurs, and the existing Fork data is returned - * with the `updated_at` + any historical repo changes. - * @param {Octokit} octokit The authenticated octokit instance - * @param {RepoDomain} upstream upstream repository information - * @returns {Promise} the forked repository name, as well as the owner of that fork - */ -async function fork(octokit, upstream) { - try { - const forkedRepo = (await octokit.repos.createFork({ - owner: upstream.owner, - repo: upstream.repo, - })).data; - const origin = { - repo: forkedRepo.name, - owner: forkedRepo.owner.login, - }; - logger_1.logger.info(`Create fork request was successful for ${origin.owner}/${origin.repo}`); - return origin; - } - catch (err) { - logger_1.logger.error('Error when forking'); - throw err; - } -} -exports.fork = fork; -//# sourceMappingURL=fork.js.map - -/***/ }), - -/***/ 8831: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.addLabels = void 0; -const logger_1 = __nccwpck_require__(58192); -/** - * Create a GitHub PR on the upstream organization's repo - * Throws an error if the GitHub API fails - * @param {Octokit} octokit The authenticated octokit instance - * @param {RepoDomain} upstream The upstream repository - * @param {BranchDomain} origin The remote origin information that contains the origin branch - * @param {number} issue_number The issue number to add labels to. Can also be a PR number - * @param {string[]} labels The list of labels to apply to the issue/pull request. Default is []. the funciton will no-op. - * @returns {Promise} The list of resulting labels after the addition of the given labels - */ -async function addLabels(octokit, upstream, origin, issue_number, labels) { - if (!labels || labels.length === 0) { - return []; - } - const labelsResponseData = (await octokit.issues.addLabels({ - owner: upstream.owner, - repo: origin.repo, - issue_number: issue_number, - labels: labels, - })).data; - logger_1.logger.info(`Successfully added labels ${labels} to issue: ${issue_number}`); - return labelsResponseData.map(l => l.name); -} -exports.addLabels = addLabels; -//# sourceMappingURL=labels.js.map - -/***/ }), - -/***/ 24461: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.openPullRequest = void 0; -const logger_1 = __nccwpck_require__(58192); -const DEFAULT_PRIMARY = 'main'; -/** - * Create a GitHub PR on the upstream organization's repo - * Throws an error if the GitHub API fails - * @param {Octokit} octokit The authenticated octokit instance - * @param {RepoDomain} upstream The upstream repository - * @param {BranchDomain} origin The remote origin information that contains the origin branch - * @param {Description} description The pull request title and detailed description - * @param {boolean} maintainersCanModify Whether or not maintainers can modify the pull request. Default is true - * @param {string} upstreamPrimary The upstream repository's primary branch. Default is main. - * @param draft Open a DRAFT pull request. Defaults to false. - * @returns {Promise} - */ -async function openPullRequest(octokit, upstream, origin, description, maintainersCanModify = true, upstreamPrimary = DEFAULT_PRIMARY, draft = false) { - const head = `${origin.owner}:${origin.branch}`; - const existingPullRequest = (await octokit.pulls.list({ - owner: upstream.owner, - repo: origin.repo, - head, - })).data.find(pr => pr.head.label === head); - if (existingPullRequest) { - logger_1.logger.info(`Found existing pull request for reference ${origin.owner}:${origin.branch}. Skipping creating a new pull request.`); - return existingPullRequest.number; - } - const pullResponseData = (await octokit.pulls.create({ - owner: upstream.owner, - repo: origin.repo, - title: description.title, - head: `${origin.owner}:${origin.branch}`, - base: upstreamPrimary, - body: description.body, - maintainer_can_modify: maintainersCanModify, - draft: draft, - })).data; - logger_1.logger.info(`Successfully opened pull request available at url: ${pullResponseData.url}.`); - return pullResponseData.number; -} -exports.openPullRequest = openPullRequest; -//# sourceMappingURL=open-pull-request.js.map - -/***/ }), - -/***/ 43102: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getPullRequestHunks = exports.getCurrentPullRequestPatches = exports.createPullRequestReview = exports.makeInlineSuggestions = exports.buildReviewComments = exports.buildSummaryComment = void 0; -const logger_1 = __nccwpck_require__(58192); -const diff_utils_1 = __nccwpck_require__(15276); -const hunk_utils_1 = __nccwpck_require__(26381); -function hunkErrorMessage(hunk) { - return ` * lines ${hunk.oldStart}-${hunk.oldEnd}`; -} -function fileErrorMessage(filename, hunks) { - return `* ${filename}\n` + hunks.map(hunkErrorMessage).join('\n'); -} -/** - * Build an error message based on invalid hunks. - * Returns an empty string if the provided hunks are empty. - * @param invalidHunks a map of filename to hunks that are not suggestable - */ -function buildSummaryComment(invalidHunks) { - if (invalidHunks.size === 0) { - return ''; - } - return ('Some suggestions could not be made:\n' + - Array.from(invalidHunks, ([filename, hunks]) => fileErrorMessage(filename, hunks)).join('\n')); -} -exports.buildSummaryComment = buildSummaryComment; -const COMFORT_PREVIEW_HEADER = 'application/vnd.github.comfort-fade-preview+json'; -/** - * Convert the patch suggestions into GitHub parameter objects. - * Use this to generate review comments - * For information see: - * https://developer.github.com/v3/pulls/comments/#create-a-review-comment-for-a-pull-request - * @param suggestions - */ -function buildReviewComments(suggestions) { - const fileComments = []; - suggestions.forEach((hunks, fileName) => { - hunks.forEach(hunk => { - const newContent = hunk.newContent.join('\n'); - if (hunk.oldStart === hunk.oldEnd) { - const singleComment = { - path: fileName, - body: `\`\`\`suggestion\n${newContent}\n\`\`\``, - line: hunk.oldEnd, - side: 'RIGHT', - }; - fileComments.push(singleComment); - } - else { - const comment = { - path: fileName, - body: `\`\`\`suggestion\n${newContent}\n\`\`\``, - start_line: hunk.oldStart, - line: hunk.oldEnd, - side: 'RIGHT', - start_side: 'RIGHT', - }; - fileComments.push(comment); - } - }); - }); - return fileComments; -} -exports.buildReviewComments = buildReviewComments; -/** - * Make a request to GitHub to make review comments - * @param octokit an authenticated octokit instance - * @param suggestions code suggestions patches - * @param remote the repository domain - * @param pullNumber the pull request number to make a review on - */ -async function makeInlineSuggestions(octokit, suggestions, outOfScopeSuggestions, remote, pullNumber) { - const comments = buildReviewComments(suggestions); - if (!comments.length) { - logger_1.logger.info('No valid suggestions to make'); - } - if (!comments.length && !outOfScopeSuggestions.size) { - logger_1.logger.info('No suggestions were generated. Exiting...'); - return null; - } - const summaryComment = buildSummaryComment(outOfScopeSuggestions); - if (summaryComment) { - logger_1.logger.warn('Some suggestions could not be made'); - } - // apply the suggestions to the latest sha - // the latest Pull Request hunk range includes - // all previous commit valid hunk ranges - const headSha = (await octokit.pulls.get({ - owner: remote.owner, - repo: remote.repo, - pull_number: pullNumber, - })).data.head.sha; - const reviewNumber = (await octokit.pulls.createReview({ - owner: remote.owner, - repo: remote.repo, - pull_number: pullNumber, - commit_id: headSha, - event: 'COMMENT', - body: summaryComment, - headers: { accept: COMFORT_PREVIEW_HEADER }, - // Octokit type definitions doesn't support mulitiline comments, but the GitHub API does - comments: comments, - })).data.id; - logger_1.logger.info(`Successfully created a review on pull request: ${pullNumber}.`); - return reviewNumber; -} -exports.makeInlineSuggestions = makeInlineSuggestions; -/** - * Comment on a Pull Request - * @param {Octokit} octokit authenticated octokit isntance - * @param {RepoDomain} remote the Pull Request repository - * @param {number} pullNumber the Pull Request number - * @param {number} pageSize the number of files to comment on // TODO pagination - * @param {Map} diffContents the old and new contents of the files to suggest - * @returns the created review's id, or null if no review was made - */ -async function createPullRequestReview(octokit, remote, pullNumber, pageSize, diffContents) { - try { - // get the hunks from the pull request - const pullRequestHunks = await exports.getPullRequestHunks(octokit, remote, pullNumber, pageSize); - // get the hunks from the suggested change - const allSuggestedHunks = typeof diffContents === 'string' - ? (0, diff_utils_1.parseAllHunks)(diffContents) - : (0, hunk_utils_1.getRawSuggestionHunks)(diffContents); - // split hunks by commentable and uncommentable - const { validHunks, invalidHunks } = (0, hunk_utils_1.partitionSuggestedHunksByScope)(pullRequestHunks, allSuggestedHunks); - // create pull request review - const reviewNumber = await exports.makeInlineSuggestions(octokit, validHunks, invalidHunks, remote, pullNumber); - return reviewNumber; - } - catch (err) { - logger_1.logger.error('Failed to suggest'); - throw err; - } -} -exports.createPullRequestReview = createPullRequestReview; -/** - * For a pull request, get each remote file's patch text asynchronously - * Also get the list of files whose patch data could not be returned - * @param {Octokit} octokit the authenticated octokit instance - * @param {RepoDomain} remote the remote repository domain information - * @param {number} pullNumber the pull request number - * @param {number} pageSize the number of results to return per page - * @returns {Promise>} the stringified patch data for each file and the list of files whose patch data could not be resolved - */ -async function getCurrentPullRequestPatches(octokit, remote, pullNumber, pageSize) { - // TODO: support pagination - const filesMissingPatch = []; - const files = (await octokit.pulls.listFiles({ - owner: remote.owner, - repo: remote.repo, - pull_number: pullNumber, - per_page: pageSize, - })).data; - const patches = new Map(); - if (files.length === 0) { - logger_1.logger.error(`0 file results have returned from list files query for Pull Request #${pullNumber}. Cannot make suggestions on an empty Pull Request`); - throw Error('Empty Pull Request'); - } - files.forEach(file => { - if (file.patch === undefined) { - // files whose patch is too large do not return the patch text by default - // TODO handle file patches that are too large - logger_1.logger.warn(`File ${file.filename} may have a patch that is too large to display patch object.`); - filesMissingPatch.push(file.filename); - } - else { - patches.set(file.filename, file.patch); - } - }); - if (patches.size === 0) { - logger_1.logger.warn('0 patches have been returned. This could be because the patch results were too large to return.'); - } - return { patches, filesMissingPatch }; -} -exports.getCurrentPullRequestPatches = getCurrentPullRequestPatches; -/** - * For a pull request, get each remote file's current patch range to identify the scope of each patch as a Map. - * @param {Octokit} octokit the authenticated octokit instance - * @param {RepoDomain} remote the remote repository domain information - * @param {number} pullNumber the pull request number - * @param {number} pageSize the number of files to return per pull request list files query - * @returns {Promise>} the scope of each file in the pull request - */ -async function getPullRequestHunks(octokit, remote, pullNumber, pageSize) { - const files = (await octokit.pulls.listFiles({ - owner: remote.owner, - repo: remote.repo, - pull_number: pullNumber, - per_page: pageSize, - })).data; - const pullRequestHunks = new Map(); - if (files.length === 0) { - logger_1.logger.error(`0 file results have returned from list files query for Pull Request #${pullNumber}. Cannot make suggestions on an empty Pull Request`); - throw Error('Empty Pull Request'); - } - files.forEach(file => { - if (file.patch === undefined) { - // files whose patch is too large do not return the patch text by default - // TODO handle file patches that are too large - logger_1.logger.warn(`File ${file.filename} may have a patch that is too large to display patch object.`); - } - else { - const hunks = (0, diff_utils_1.parsePatch)(file.patch); - pullRequestHunks.set(file.filename, hunks); - } - }); - if (pullRequestHunks.size === 0) { - logger_1.logger.warn('0 patches have been returned. This could be because the patch results were too large to return.'); - } - return pullRequestHunks; -} -exports.getPullRequestHunks = getPullRequestHunks; -//# sourceMappingURL=review-pull-request.js.map - -/***/ }), - -/***/ 77103: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.parseTextFiles = exports.createPullRequest = exports.reviewPullRequest = exports.CommitError = exports.getDiffString = exports.getChanges = void 0; -const types_1 = __nccwpck_require__(13984); -const logger_1 = __nccwpck_require__(58192); -const default_options_handler_1 = __nccwpck_require__(9558); -const retry = __nccwpck_require__(33415); -const review_pull_request_1 = __nccwpck_require__(43102); -const branch_1 = __nccwpck_require__(58021); -const fork_1 = __nccwpck_require__(67386); -const commit_and_push_1 = __nccwpck_require__(4424); -const open_pull_request_1 = __nccwpck_require__(24461); -const labels_1 = __nccwpck_require__(8831); -var handle_git_dir_change_1 = __nccwpck_require__(49988); -Object.defineProperty(exports, "getChanges", ({ enumerable: true, get: function () { return handle_git_dir_change_1.getChanges; } })); -Object.defineProperty(exports, "getDiffString", ({ enumerable: true, get: function () { return handle_git_dir_change_1.getDiffString; } })); -var errors_1 = __nccwpck_require__(15313); -Object.defineProperty(exports, "CommitError", ({ enumerable: true, get: function () { return errors_1.CommitError; } })); -/** - * Given a set of suggestions, make all the multiline inline review comments on a given pull request given - * that they are in scope of the pull request. Outof scope suggestions are not made. - * - * In-scope suggestions are specifically: the suggestion for a file must correspond to a file in the remote pull request - * and the diff hunk computed for a file's contents must produce a range that is a subset of the pull request's files hunks. - * - * If a file is too large to load in the review, it is skipped in the suggestion phase. - * - * If changes are empty then the workflow will not run. - * Rethrows an HttpError if Octokit GitHub API returns an error. HttpError Octokit access_token and client_secret headers redact all sensitive information. - * @param octokit The authenticated octokit instance, instantiated with an access token having permissiong to create a fork on the target repository. - * @param diffContents A set of changes. The changes may be empty. - * @param options The configuration for interacting with GitHub provided by the user. - * @returns the created review's id number, or null if there are no changes to be made. - */ -async function reviewPullRequest(octokit, diffContents, options) { - (0, logger_1.setupLogger)(options.logger); - // if null undefined, or the empty map then no changes have been provided. - // Do not execute GitHub workflow - if (diffContents === null || - diffContents === undefined || - (typeof diffContents !== 'string' && diffContents.size === 0)) { - logger_1.logger.info('Empty changes provided. No suggestions to be made. Cancelling workflow.'); - return null; - } - const gitHubConfigs = (0, default_options_handler_1.addReviewCommentsDefaults)(options); - const remote = { - owner: gitHubConfigs.owner, - repo: gitHubConfigs.repo, - }; - const reviewNumber = await (0, review_pull_request_1.createPullRequestReview)(octokit, remote, gitHubConfigs.pullNumber, gitHubConfigs.pageSize, diffContents); - return reviewNumber; -} -exports.reviewPullRequest = reviewPullRequest; -/** - * Make a new GitHub Pull Request with a set of changes applied on top of primary branch HEAD. - * The changes are committed into a new branch based on the upstream repository options using the authenticated Octokit account. - * Then a Pull Request is made from that branch. - * - * Also throws error if git data from the fork is not ready in 5 minutes. - * - * From the docs - * https://developer.github.com/v3/repos/forks/#create-a-fork - * """ - * Forking a Repository happens asynchronously. - * You may have to wait a short period of time before you can access the git objects. - * If this takes longer than 5 minutes, be sure to contact GitHub Support or GitHub Premium Support. - * """ - * - * If changes are empty then the workflow will not run. - * Rethrows an HttpError if Octokit GitHub API returns an error. HttpError Octokit access_token and client_secret headers redact all sensitive information. - * @param {Octokit} octokit The authenticated octokit instance, instantiated with an access token having permissiong to create a fork on the target repository - * @param {Changes | null | undefined} changes A set of changes. The changes may be empty - * @param {CreatePullRequestUserOptions} options The configuration for interacting with GitHub provided by the user. - * @returns {Promise} the pull request number. Returns 0 if unsuccessful. - * @throws {CommitError} on failure during commit process - */ -async function createPullRequest(octokit, changes, options) { - (0, logger_1.setupLogger)(options.logger); - // if null undefined, or the empty map then no changes have been provided. - // Do not execute GitHub workflow - if (changes === null || changes === undefined || changes.size === 0) { - logger_1.logger.info('Empty change set provided. No changes need to be made. Cancelling workflow.'); - return 0; - } - const gitHubConfigs = (0, default_options_handler_1.addPullRequestDefaults)(options); - logger_1.logger.info('Starting GitHub PR workflow...'); - const upstream = { - owner: gitHubConfigs.upstreamOwner, - repo: gitHubConfigs.upstreamRepo, - }; - const origin = options.fork === false ? upstream : await (0, fork_1.fork)(octokit, upstream); - if (options.fork) { - // try to sync the fork - await retry(async () => await octokit.repos.mergeUpstream({ - owner: origin.owner, - repo: origin.repo, - branch: gitHubConfigs.primary, - }), { - retries: options.retry, - factor: 2.8411, - minTimeout: 3000, - randomize: false, - onRetry: (e, attempt) => { - e.message = `Error creating syncing upstream: ${e.message}`; - logger_1.logger.error(e); - logger_1.logger.info(`Retry attempt #${attempt}...`); - }, - }); - } - const originBranch = { - ...origin, - branch: gitHubConfigs.branch, - }; - // The `retry` flag defaults to `5` to maintain compatibility - options.retry = options.retry === undefined ? 5 : options.retry; - const refHeadSha = await retry(async () => await (0, branch_1.branch)(octokit, origin, upstream, originBranch.branch, gitHubConfigs.primary), { - retries: options.retry, - factor: 2.8411, - minTimeout: 3000, - randomize: false, - onRetry: (e, attempt) => { - e.message = `Error creating Pull Request: ${e.message}`; - logger_1.logger.error(e); - logger_1.logger.info(`Retry attempt #${attempt}...`); - }, - }); - await (0, commit_and_push_1.commitAndPush)(octokit, refHeadSha, changes, originBranch, gitHubConfigs.message, gitHubConfigs.force, options); - const description = { - body: gitHubConfigs.description, - title: gitHubConfigs.title, - }; - const prNumber = await (0, open_pull_request_1.openPullRequest)(octokit, upstream, originBranch, description, gitHubConfigs.maintainersCanModify, gitHubConfigs.primary, options.draft); - logger_1.logger.info(`Successfully opened pull request: ${prNumber}.`); - // addLabels will no-op if options.labels is undefined or empty. - await (0, labels_1.addLabels)(octokit, upstream, originBranch, prNumber, options.labels); - return prNumber; -} -exports.createPullRequest = createPullRequest; -/** - * Convert a Map or {[path: string]: string}, where the key is the relative file path in the repository, - * and the value is the text content. The files will be converted to a Map also containing the file mode information '100644' - * @param {Object | Map} textFiles a map/object where the key is the relative file path and the value is the text file content - * @returns {Changes} Map of the file path to the string file content and the file mode '100644' - */ -function parseTextFiles(textFiles) { - const changes = new Map(); - if (textFiles instanceof Map) { - textFiles.forEach((content, path) => { - if (typeof path !== 'string' || - (content !== null && typeof content !== 'string')) { - throw TypeError('The file changeset provided must have a string key and a string/null value'); - } - changes.set(path, new types_1.FileData(content)); - }); - } - else { - for (const [path, content] of Object.entries(textFiles)) { - if (typeof path !== 'string' || - (content !== null && typeof content !== 'string')) { - throw TypeError('The file changeset provided must have a string key and a string/null value'); - } - changes.set(path, new types_1.FileData(content)); - } - } - return changes; -} -exports.parseTextFiles = parseTextFiles; -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 58192: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.setupLogger = exports.logger = void 0; -class NullLogger { - constructor() { - this.error = () => { }; - this.warn = () => { }; - this.info = () => { }; - this.debug = () => { }; - this.trace = () => { }; - } -} -let logger = new NullLogger(); -exports.logger = logger; -function setupLogger(userLogger) { - if (userLogger) { - exports.logger = logger = userLogger; - } - else { - exports.logger = logger = new NullLogger(); - } -} -exports.setupLogger = setupLogger; -//# sourceMappingURL=logger.js.map - -/***/ }), - -/***/ 13984: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PatchSyntaxError = exports.FileData = void 0; -/** - * The content and the mode of a file. - * Default file mode is a text file which has code '100644'. - * If `content` is not null, then `content` must be the entire file content. - * See https://developer.github.com/v3/git/trees/#tree-object for details on mode. - */ -class FileData { - constructor(content, mode = '100644') { - this.mode = mode; - this.content = content; - } -} -exports.FileData = FileData; -class PatchSyntaxError extends Error { - constructor(message) { - super(message); - this.name = 'PatchSyntaxError'; - } -} -exports.PatchSyntaxError = PatchSyntaxError; -//# sourceMappingURL=types.js.map - -/***/ }), - -/***/ 15276: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getSuggestedHunks = exports.parseAllHunks = exports.parsePatch = void 0; -const parseDiff = __nccwpck_require__(94833); -const diff_1 = __nccwpck_require__(71672); -// This header is ignored for calculating patch ranges, but is neccessary -// for parsing a diff -const _DIFF_HEADER = `diff --git a/file.ext b/file.ext -index cac8fbc..87f387c 100644 ---- a/file.ext -+++ b/file.ext -`; -/** - * Given a patch expressed in GNU diff format, return the range of lines - * from the original content that are changed. - * @param diff Diff expressed in GNU diff format. - * @returns Hunk[] - */ -function parsePatch(patch) { - return parseAllHunks(_DIFF_HEADER + patch).get('file.ext') || []; -} -exports.parsePatch = parsePatch; -/** - * Given a diff expressed in GNU diff format, return the range of lines - * from the original content that are changed. - * @param diff Diff expressed in GNU diff format. - * @returns Map - */ -function parseAllHunks(diff) { - const hunksByFile = new Map(); - parseDiff(diff).forEach(file => { - const filename = file.to ? file.to : file.from; - const chunks = file.chunks.map(chunk => { - let oldStart = chunk.oldStart; - let newStart = chunk.newStart; - let normalLines = 0; - let changeSeen = false; - const newLines = []; - let previousLine = null; - let nextLine = null; - chunk.changes.forEach(change => { - // strip off leading '+', '-', or ' ' and trailing carriage return - const content = change.content.substring(1).replace(/[\n\r]+$/g, ''); - if (change.type === 'normal') { - normalLines++; - if (changeSeen) { - if (nextLine === null) { - nextLine = content; - } - } - else { - previousLine = content; - } - } - else { - if (change.type === 'add') { - // strip off leading '+' and trailing carriage return - newLines.push(content); - } - if (!changeSeen) { - oldStart += normalLines; - newStart += normalLines; - changeSeen = true; - } - } - }); - const newEnd = newStart + chunk.newLines - normalLines - 1; - const oldEnd = oldStart + chunk.oldLines - normalLines - 1; - let hunk = { - oldStart: oldStart, - oldEnd: oldEnd, - newStart: newStart, - newEnd: newEnd, - newContent: newLines, - }; - if (previousLine) { - hunk = { ...hunk, previousLine: previousLine }; - } - if (nextLine) { - hunk = { ...hunk, nextLine: nextLine }; - } - return hunk; - }); - hunksByFile.set(filename, chunks); - }); - return hunksByFile; -} -exports.parseAllHunks = parseAllHunks; -/** - * Given two texts, return the range of lines that are changed. - * @param oldContent The original content. - * @param newContent The new content. - * @returns Hunk[] - */ -function getSuggestedHunks(oldContent, newContent) { - const diff = (0, diff_1.createPatch)('unused', oldContent, newContent); - return parseAllHunks(diff).get('unused') || []; -} -exports.getSuggestedHunks = getSuggestedHunks; -//# sourceMappingURL=diff-utils.js.map - -/***/ }), - -/***/ 26381: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.partitionSuggestedHunksByScope = exports.getRawSuggestionHunks = exports.adjustHunkDown = exports.adjustHunkUp = void 0; -const diff_utils_1 = __nccwpck_require__(15276); -const logger_1 = __nccwpck_require__(58192); -/** - * Shift a Hunk up one line so it starts one line earlier. - * @param {Hunk} hunk - * @returns {Hunk | null} the adjusted Hunk or null if there is no preceeding line. - */ -function adjustHunkUp(hunk) { - if (!hunk.previousLine) { - return null; - } - return { - oldStart: hunk.oldStart - 1, - oldEnd: hunk.oldEnd, - newStart: hunk.newStart - 1, - newEnd: hunk.newEnd, - newContent: [hunk.previousLine, ...hunk.newContent], - }; -} -exports.adjustHunkUp = adjustHunkUp; -/** - * Shift a Hunk up one line so it ends one line later. - * @param {Hunk} hunk - * @returns {Hunk | null} the adjusted Hunk or null if there is no following line. - */ -function adjustHunkDown(hunk) { - if (!hunk.nextLine) { - return null; - } - return { - oldStart: hunk.oldStart, - oldEnd: hunk.oldEnd + 1, - newStart: hunk.newStart, - newEnd: hunk.newEnd + 1, - newContent: hunk.newContent.concat(hunk.nextLine), - }; -} -exports.adjustHunkDown = adjustHunkDown; -/** - * Given a map where the key is the file name and the value is the - * old content and new content of the file - * compute the hunk for each file whose old and new contents differ. - * Do not compute the hunk if the old content is the same as the new content. - * The hunk list is sorted and each interval is disjoint. - * @param {Map} diffContents a map of the original file contents and the new file contents - * @returns the hunks for each file whose old and new contents differ - */ -function getRawSuggestionHunks(diffContents) { - const fileHunks = new Map(); - diffContents.forEach((fileDiffContent, fileName) => { - // if identical don't calculate the hunk and continue in the loop - if (fileDiffContent.oldContent === fileDiffContent.newContent) { - return; - } - const hunks = (0, diff_utils_1.getSuggestedHunks)(fileDiffContent.oldContent, fileDiffContent.newContent); - fileHunks.set(fileName, hunks); - }); - logger_1.logger.info('Parsed ranges of old and new patch'); - return fileHunks; -} -exports.getRawSuggestionHunks = getRawSuggestionHunks; -function hunkOverlaps(validHunk, suggestedHunk) { - return (suggestedHunk.oldStart >= validHunk.newStart && - suggestedHunk.oldEnd <= validHunk.newEnd); -} -function partitionFileHunks(pullRequestHunks, suggestedHunks) { - // check ranges: the entirety of the old range of the suggested - // hunk must fit inside the new range of the valid Hunks - let i = 0; - let candidateHunk = pullRequestHunks[i]; - const validFileHunks = []; - const invalidFileHunks = []; - suggestedHunks.forEach(suggestedHunk => { - while (candidateHunk && suggestedHunk.oldStart > candidateHunk.newEnd) { - i++; - candidateHunk = pullRequestHunks[i]; - } - if (!candidateHunk) { - invalidFileHunks.push(suggestedHunk); - return; - } - // if deletion only or addition only - if (suggestedHunk.newEnd < suggestedHunk.newStart || - suggestedHunk.oldEnd < suggestedHunk.oldStart) { - // try using previous line - let adjustedHunk = adjustHunkUp(suggestedHunk); - if (adjustedHunk && hunkOverlaps(candidateHunk, adjustedHunk)) { - validFileHunks.push(adjustedHunk); - return; - } - // try using next line - adjustedHunk = adjustHunkDown(suggestedHunk); - if (adjustedHunk && hunkOverlaps(candidateHunk, adjustedHunk)) { - validFileHunks.push(adjustedHunk); - return; - } - } - else if (hunkOverlaps(candidateHunk, suggestedHunk)) { - validFileHunks.push(suggestedHunk); - return; - } - invalidFileHunks.push(suggestedHunk); - }); - return { validFileHunks, invalidFileHunks }; -} -/** - * Split suggested hunks into commentable and non-commentable hunks. Compares the new line ranges - * from pullRequestHunks against the old line ranges from allSuggestedHunks. - * @param pullRequestHunks {Map} The parsed hunks from that represents the valid lines to comment. - * @param allSuggestedHunks {Map} The hunks that represent suggested changes. - * @returns {PartitionedHunks} split hunks - */ -function partitionSuggestedHunksByScope(pullRequestHunks, allSuggestedHunks) { - const validHunks = new Map(); - const invalidHunks = new Map(); - allSuggestedHunks.forEach((suggestedHunks, filename) => { - const pullRequestFileHunks = pullRequestHunks.get(filename); - if (!pullRequestFileHunks) { - // file is not the original PR - invalidHunks.set(filename, suggestedHunks); - return; - } - const { validFileHunks, invalidFileHunks } = partitionFileHunks(pullRequestFileHunks, suggestedHunks); - if (validFileHunks.length > 0) { - validHunks.set(filename, validFileHunks); - } - if (invalidFileHunks.length > 0) { - invalidHunks.set(filename, invalidFileHunks); - } - }); - return { validHunks, invalidHunks }; -} -exports.partitionSuggestedHunksByScope = partitionSuggestedHunksByScope; -//# sourceMappingURL=hunk-utils.js.map +module.exports = CleanGitRef; /***/ }), @@ -21551,6 +35539,120 @@ function conventionalCommitsFilter (commits) { module.exports = conventionalCommitsFilter +/***/ }), + +/***/ 83201: +/***/ ((__unused_webpack_module, exports) => { + +/*! crc32.js (C) 2014-present SheetJS -- http://sheetjs.com */ +/* vim: set ts=2: */ +/*exported CRC32 */ +var CRC32; +(function (factory) { + /*jshint ignore:start */ + /*eslint-disable */ + if(typeof DO_NOT_EXPORT_CRC === 'undefined') { + if(true) { + factory(exports); + } else {} + } else { + factory(CRC32 = {}); + } + /*eslint-enable */ + /*jshint ignore:end */ +}(function(CRC32) { +CRC32.version = '1.2.2'; +/*global Int32Array */ +function signed_crc_table() { + var c = 0, table = new Array(256); + + for(var n =0; n != 256; ++n){ + c = n; + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + table[n] = c; + } + + return typeof Int32Array !== 'undefined' ? new Int32Array(table) : table; +} + +var T0 = signed_crc_table(); +function slice_by_16_tables(T) { + var c = 0, v = 0, n = 0, table = typeof Int32Array !== 'undefined' ? new Int32Array(4096) : new Array(4096) ; + + for(n = 0; n != 256; ++n) table[n] = T[n]; + for(n = 0; n != 256; ++n) { + v = T[n]; + for(c = 256 + n; c < 4096; c += 256) v = table[c] = (v >>> 8) ^ T[v & 0xFF]; + } + var out = []; + for(n = 1; n != 16; ++n) out[n - 1] = typeof Int32Array !== 'undefined' ? table.subarray(n * 256, n * 256 + 256) : table.slice(n * 256, n * 256 + 256); + return out; +} +var TT = slice_by_16_tables(T0); +var T1 = TT[0], T2 = TT[1], T3 = TT[2], T4 = TT[3], T5 = TT[4]; +var T6 = TT[5], T7 = TT[6], T8 = TT[7], T9 = TT[8], Ta = TT[9]; +var Tb = TT[10], Tc = TT[11], Td = TT[12], Te = TT[13], Tf = TT[14]; +function crc32_bstr(bstr, seed) { + var C = seed ^ -1; + for(var i = 0, L = bstr.length; i < L;) C = (C>>>8) ^ T0[(C^bstr.charCodeAt(i++))&0xFF]; + return ~C; +} + +function crc32_buf(B, seed) { + var C = seed ^ -1, L = B.length - 15, i = 0; + for(; i < L;) C = + Tf[B[i++] ^ (C & 255)] ^ + Te[B[i++] ^ ((C >> 8) & 255)] ^ + Td[B[i++] ^ ((C >> 16) & 255)] ^ + Tc[B[i++] ^ (C >>> 24)] ^ + Tb[B[i++]] ^ Ta[B[i++]] ^ T9[B[i++]] ^ T8[B[i++]] ^ + T7[B[i++]] ^ T6[B[i++]] ^ T5[B[i++]] ^ T4[B[i++]] ^ + T3[B[i++]] ^ T2[B[i++]] ^ T1[B[i++]] ^ T0[B[i++]]; + L += 15; + while(i < L) C = (C>>>8) ^ T0[(C^B[i++])&0xFF]; + return ~C; +} + +function crc32_str(str, seed) { + var C = seed ^ -1; + for(var i = 0, L = str.length, c = 0, d = 0; i < L;) { + c = str.charCodeAt(i++); + if(c < 0x80) { + C = (C>>>8) ^ T0[(C^c)&0xFF]; + } else if(c < 0x800) { + C = (C>>>8) ^ T0[(C ^ (192|((c>>6)&31)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|(c&63)))&0xFF]; + } else if(c >= 0xD800 && c < 0xE000) { + c = (c&1023)+64; d = str.charCodeAt(i++)&1023; + C = (C>>>8) ^ T0[(C ^ (240|((c>>8)&7)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|((c>>2)&63)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|((d>>6)&15)|((c&3)<<4)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|(d&63)))&0xFF]; + } else { + C = (C>>>8) ^ T0[(C ^ (224|((c>>12)&15)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|((c>>6)&63)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|(c&63)))&0xFF]; + } + } + return ~C; +} +CRC32.table = T0; +// $FlowIgnore +CRC32.bstr = crc32_bstr; +// $FlowIgnore +CRC32.buf = crc32_buf; +// $FlowIgnore +CRC32.str = crc32_str; +})); + + /***/ }), /***/ 36863: @@ -27430,6 +41532,469 @@ function generateOptions(options, defaults) { //# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy91dGlsL3BhcmFtcy5qcyJdLCJuYW1lcyI6WyJnZW5lcmF0ZU9wdGlvbnMiLCJvcHRpb25zIiwiZGVmYXVsdHMiLCJjYWxsYmFjayIsIm5hbWUiLCJoYXNPd25Qcm9wZXJ0eSJdLCJtYXBwaW5ncyI6Ijs7Ozs7Ozs7O0FBQU8sU0FBU0EsZUFBVCxDQUF5QkMsT0FBekIsRUFBa0NDLFFBQWxDLEVBQTRDO0FBQ2pELE1BQUksT0FBT0QsT0FBUCxLQUFtQixVQUF2QixFQUFtQztBQUNqQ0MsSUFBQUEsUUFBUSxDQUFDQyxRQUFULEdBQW9CRixPQUFwQjtBQUNELEdBRkQsTUFFTyxJQUFJQSxPQUFKLEVBQWE7QUFDbEIsU0FBSyxJQUFJRyxJQUFULElBQWlCSCxPQUFqQixFQUEwQjtBQUN4QjtBQUNBLFVBQUlBLE9BQU8sQ0FBQ0ksY0FBUixDQUF1QkQsSUFBdkIsQ0FBSixFQUFrQztBQUNoQ0YsUUFBQUEsUUFBUSxDQUFDRSxJQUFELENBQVIsR0FBaUJILE9BQU8sQ0FBQ0csSUFBRCxDQUF4QjtBQUNEO0FBQ0Y7QUFDRjs7QUFDRCxTQUFPRixRQUFQO0FBQ0QiLCJzb3VyY2VzQ29udGVudCI6WyJleHBvcnQgZnVuY3Rpb24gZ2VuZXJhdGVPcHRpb25zKG9wdGlvbnMsIGRlZmF1bHRzKSB7XG4gIGlmICh0eXBlb2Ygb3B0aW9ucyA9PT0gJ2Z1bmN0aW9uJykge1xuICAgIGRlZmF1bHRzLmNhbGxiYWNrID0gb3B0aW9ucztcbiAgfSBlbHNlIGlmIChvcHRpb25zKSB7XG4gICAgZm9yIChsZXQgbmFtZSBpbiBvcHRpb25zKSB7XG4gICAgICAvKiBpc3RhbmJ1bCBpZ25vcmUgZWxzZSAqL1xuICAgICAgaWYgKG9wdGlvbnMuaGFzT3duUHJvcGVydHkobmFtZSkpIHtcbiAgICAgICAgZGVmYXVsdHNbbmFtZV0gPSBvcHRpb25zW25hbWVdO1xuICAgICAgfVxuICAgIH1cbiAgfVxuICByZXR1cm4gZGVmYXVsdHM7XG59XG4iXX0= +/***/ }), + +/***/ 25211: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +// Copyright (c) 2006, 2008 Tony Garnock-Jones +// Copyright (c) 2006, 2008 LShift Ltd. +// +// Permission is hereby granted, free of charge, to any person +// obtaining a copy of this software and associated documentation files +// (the "Software"), to deal in the Software without restriction, +// including without limitation the rights to use, copy, modify, merge, +// publish, distribute, sublicense, and/or sell copies of the Software, +// and to permit persons to whom the Software is furnished to do so, +// subject to the following conditions: +// +// The above copyright notice and this permission notice shall be +// included in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +// BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +// ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +var onp = __nccwpck_require__(18101); + +function longestCommonSubsequence(file1, file2) { + var diff = new onp(file1, file2); + diff.compose(); + var ses = diff.getses(); + + var root; + var prev; + var file1RevIdx = file1.length - 1, + file2RevIdx = file2.length - 1; + for (var i = ses.length - 1; i >= 0; --i) { + if (ses[i].t === diff.SES_COMMON) { + if (prev) { + prev.chain = { + file1index: file1RevIdx, + file2index: file2RevIdx, + chain: null + }; + prev = prev.chain; + } else { + root = { + file1index: file1RevIdx, + file2index: file2RevIdx, + chain: null + }; + prev = root; + } + file1RevIdx--; + file2RevIdx--; + } else if (ses[i].t === diff.SES_DELETE) { + file1RevIdx--; + } else if (ses[i].t === diff.SES_ADD) { + file2RevIdx--; + } + } + + var tail = { + file1index: -1, + file2index: -1, + chain: null + }; + + if (!prev) { + return tail; + } + + prev.chain = tail; + + return root; +} + +function diffIndices(file1, file2) { + // We apply the LCS to give a simple representation of the + // offsets and lengths of mismatched chunks in the input + // files. This is used by diff3_merge_indices below. + + var result = []; + var tail1 = file1.length; + var tail2 = file2.length; + + for (var candidate = longestCommonSubsequence(file1, file2); candidate !== null; candidate = candidate.chain) { + var mismatchLength1 = tail1 - candidate.file1index - 1; + var mismatchLength2 = tail2 - candidate.file2index - 1; + tail1 = candidate.file1index; + tail2 = candidate.file2index; + + if (mismatchLength1 || mismatchLength2) { + result.push({ + file1: [tail1 + 1, mismatchLength1], + file2: [tail2 + 1, mismatchLength2] + }); + } + } + + result.reverse(); + return result; +} + +function diff3MergeIndices(a, o, b) { + // Given three files, A, O, and B, where both A and B are + // independently derived from O, returns a fairly complicated + // internal representation of merge decisions it's taken. The + // interested reader may wish to consult + // + // Sanjeev Khanna, Keshav Kunal, and Benjamin C. Pierce. "A + // Formal Investigation of Diff3." In Arvind and Prasad, + // editors, Foundations of Software Technology and Theoretical + // Computer Science (FSTTCS), December 2007. + // + // (http://www.cis.upenn.edu/~bcpierce/papers/diff3-short.pdf) + var i; + + var m1 = diffIndices(o, a); + var m2 = diffIndices(o, b); + + var hunks = []; + + function addHunk(h, side) { + hunks.push([h.file1[0], side, h.file1[1], h.file2[0], h.file2[1]]); + } + for (i = 0; i < m1.length; i++) { + addHunk(m1[i], 0); + } + for (i = 0; i < m2.length; i++) { + addHunk(m2[i], 2); + } + hunks.sort(function(x, y) { + return x[0] - y[0] + }); + + var result = []; + var commonOffset = 0; + + function copyCommon(targetOffset) { + if (targetOffset > commonOffset) { + result.push([1, commonOffset, targetOffset - commonOffset]); + commonOffset = targetOffset; + } + } + + for (var hunkIndex = 0; hunkIndex < hunks.length; hunkIndex++) { + var firstHunkIndex = hunkIndex; + var hunk = hunks[hunkIndex]; + var regionLhs = hunk[0]; + var regionRhs = regionLhs + hunk[2]; + while (hunkIndex < hunks.length - 1) { + var maybeOverlapping = hunks[hunkIndex + 1]; + var maybeLhs = maybeOverlapping[0]; + if (maybeLhs > regionRhs) break; + regionRhs = Math.max(regionRhs, maybeLhs + maybeOverlapping[2]); + hunkIndex++; + } + + copyCommon(regionLhs); + if (firstHunkIndex == hunkIndex) { + // The "overlap" was only one hunk long, meaning that + // there's no conflict here. Either a and o were the + // same, or b and o were the same. + if (hunk[4] > 0) { + result.push([hunk[1], hunk[3], hunk[4]]); + } + } else { + // A proper conflict. Determine the extents of the + // regions involved from a, o and b. Effectively merge + // all the hunks on the left into one giant hunk, and + // do the same for the right; then, correct for skew + // in the regions of o that each side changed, and + // report appropriate spans for the three sides. + var regions = { + 0: [a.length, -1, o.length, -1], + 2: [b.length, -1, o.length, -1] + }; + for (i = firstHunkIndex; i <= hunkIndex; i++) { + hunk = hunks[i]; + var side = hunk[1]; + var r = regions[side]; + var oLhs = hunk[0]; + var oRhs = oLhs + hunk[2]; + var abLhs = hunk[3]; + var abRhs = abLhs + hunk[4]; + r[0] = Math.min(abLhs, r[0]); + r[1] = Math.max(abRhs, r[1]); + r[2] = Math.min(oLhs, r[2]); + r[3] = Math.max(oRhs, r[3]); + } + var aLhs = regions[0][0] + (regionLhs - regions[0][2]); + var aRhs = regions[0][1] + (regionRhs - regions[0][3]); + var bLhs = regions[2][0] + (regionLhs - regions[2][2]); + var bRhs = regions[2][1] + (regionRhs - regions[2][3]); + result.push([-1, + aLhs, aRhs - aLhs, + regionLhs, regionRhs - regionLhs, + bLhs, bRhs - bLhs + ]); + } + commonOffset = regionRhs; + } + + copyCommon(o.length); + return result; +} + +function diff3Merge(a, o, b) { + // Applies the output of Diff.diff3_merge_indices to actually + // construct the merged file; the returned result alternates + // between "ok" and "conflict" blocks. + + var result = []; + var files = [a, o, b]; + var indices = diff3MergeIndices(a, o, b); + + var okLines = []; + + function flushOk() { + if (okLines.length) { + result.push({ + ok: okLines + }); + } + okLines = []; + } + + function pushOk(xs) { + for (var j = 0; j < xs.length; j++) { + okLines.push(xs[j]); + } + } + + function isTrueConflict(rec) { + if (rec[2] != rec[6]) return true; + var aoff = rec[1]; + var boff = rec[5]; + for (var j = 0; j < rec[2]; j++) { + if (a[j + aoff] != b[j + boff]) return true; + } + return false; + } + + for (var i = 0; i < indices.length; i++) { + var x = indices[i]; + var side = x[0]; + if (side == -1) { + if (!isTrueConflict(x)) { + pushOk(files[0].slice(x[1], x[1] + x[2])); + } else { + flushOk(); + result.push({ + conflict: { + a: a.slice(x[1], x[1] + x[2]), + aIndex: x[1], + o: o.slice(x[3], x[3] + x[4]), + oIndex: x[3], + b: b.slice(x[5], x[5] + x[6]), + bIndex: x[5] + } + }); + } + } else { + pushOk(files[side].slice(x[1], x[1] + x[2])); + } + } + + flushOk(); + return result; +} + +module.exports = diff3Merge; + + +/***/ }), + +/***/ 18101: +/***/ ((module) => { + +/* + * URL: https://github.com/cubicdaiya/onp + * + * Copyright (c) 2013 Tatsuhiko Kubo + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + * THE SOFTWARE. + */ + +/** + * The algorithm implemented here is based on "An O(NP) Sequence Comparison Algorithm" + * by described by Sun Wu, Udi Manber and Gene Myers +*/ +module.exports = function (a_, b_) { + var a = a_, + b = b_, + m = a.length, + n = b.length, + reverse = false, + ed = null, + offset = m + 1, + path = [], + pathposi = [], + ses = [], + lcs = "", + SES_DELETE = -1, + SES_COMMON = 0, + SES_ADD = 1; + + var tmp1, + tmp2; + + var init = function () { + if (m >= n) { + tmp1 = a; + tmp2 = m; + a = b; + b = tmp1; + m = n; + n = tmp2; + reverse = true; + offset = m + 1; + } + }; + + var P = function (x, y, k) { + return { + 'x' : x, + 'y' : y, + 'k' : k, + }; + }; + + var seselem = function (elem, t) { + return { + 'elem' : elem, + 't' : t, + }; + }; + + var snake = function (k, p, pp) { + var r, x, y; + if (p > pp) { + r = path[k-1+offset]; + } else { + r = path[k+1+offset]; + } + + y = Math.max(p, pp); + x = y - k; + while (x < m && y < n && a[x] === b[y]) { + ++x; + ++y; + } + + path[k+offset] = pathposi.length; + pathposi[pathposi.length] = new P(x, y, r); + return y; + }; + + var recordseq = function (epc) { + var x_idx, y_idx, px_idx, py_idx, i; + x_idx = y_idx = 1; + px_idx = py_idx = 0; + for (i=epc.length-1;i>=0;--i) { + while(px_idx < epc[i].x || py_idx < epc[i].y) { + if (epc[i].y - epc[i].x > py_idx - px_idx) { + if (reverse) { + ses[ses.length] = new seselem(b[py_idx], SES_DELETE); + } else { + ses[ses.length] = new seselem(b[py_idx], SES_ADD); + } + ++y_idx; + ++py_idx; + } else if (epc[i].y - epc[i].x < py_idx - px_idx) { + if (reverse) { + ses[ses.length] = new seselem(a[px_idx], SES_ADD); + } else { + ses[ses.length] = new seselem(a[px_idx], SES_DELETE); + } + ++x_idx; + ++px_idx; + } else { + ses[ses.length] = new seselem(a[px_idx], SES_COMMON); + lcs += a[px_idx]; + ++x_idx; + ++y_idx; + ++px_idx; + ++py_idx; + } + } + } + }; + + init(); + + return { + SES_DELETE : -1, + SES_COMMON : 0, + SES_ADD : 1, + editdistance : function () { + return ed; + }, + getlcs : function () { + return lcs; + }, + getses : function () { + return ses; + }, + compose : function () { + var delta, size, fp, p, r, epc, i, k; + delta = n - m; + size = m + n + 3; + fp = {}; + for (i=0;i=delta+1;--k) { + fp[k+offset] = snake(k, fp[k-1+offset]+1, fp[k+1+offset]); + } + fp[delta+offset] = snake(delta, fp[delta-1+offset]+1, fp[delta+1+offset]); + } while (fp[delta+offset] !== n); + + ed = delta + 2 * p; + + r = path[delta+offset]; + + epc = []; + while (r !== -1) { + epc[epc.length] = new P(pathposi[r].x, pathposi[r].y, null); + r = pathposi[r].k; + } + recordseq(epc); + } + }; +}; + + /***/ }), /***/ 14802: @@ -39996,6 +54561,681 @@ function parseProxyResponse(socket) { exports.parseProxyResponse = parseProxyResponse; //# sourceMappingURL=parse-proxy-response.js.map +/***/ }), + +/***/ 91230: +/***/ ((module) => { + +// A simple implementation of make-array +function makeArray (subject) { + return Array.isArray(subject) + ? subject + : [subject] +} + +const EMPTY = '' +const SPACE = ' ' +const ESCAPE = '\\' +const REGEX_TEST_BLANK_LINE = /^\s+$/ +const REGEX_INVALID_TRAILING_BACKSLASH = /(?:[^\\]|^)\\$/ +const REGEX_REPLACE_LEADING_EXCAPED_EXCLAMATION = /^\\!/ +const REGEX_REPLACE_LEADING_EXCAPED_HASH = /^\\#/ +const REGEX_SPLITALL_CRLF = /\r?\n/g +// /foo, +// ./foo, +// ../foo, +// . +// .. +const REGEX_TEST_INVALID_PATH = /^\.*\/|^\.+$/ + +const SLASH = '/' + +// Do not use ternary expression here, since "istanbul ignore next" is buggy +let TMP_KEY_IGNORE = 'node-ignore' +/* istanbul ignore else */ +if (typeof Symbol !== 'undefined') { + TMP_KEY_IGNORE = Symbol.for('node-ignore') +} +const KEY_IGNORE = TMP_KEY_IGNORE + +const define = (object, key, value) => + Object.defineProperty(object, key, {value}) + +const REGEX_REGEXP_RANGE = /([0-z])-([0-z])/g + +const RETURN_FALSE = () => false + +// Sanitize the range of a regular expression +// The cases are complicated, see test cases for details +const sanitizeRange = range => range.replace( + REGEX_REGEXP_RANGE, + (match, from, to) => from.charCodeAt(0) <= to.charCodeAt(0) + ? match + // Invalid range (out of order) which is ok for gitignore rules but + // fatal for JavaScript regular expression, so eliminate it. + : EMPTY +) + +// See fixtures #59 +const cleanRangeBackSlash = slashes => { + const {length} = slashes + return slashes.slice(0, length - length % 2) +} + +// > If the pattern ends with a slash, +// > it is removed for the purpose of the following description, +// > but it would only find a match with a directory. +// > In other words, foo/ will match a directory foo and paths underneath it, +// > but will not match a regular file or a symbolic link foo +// > (this is consistent with the way how pathspec works in general in Git). +// '`foo/`' will not match regular file '`foo`' or symbolic link '`foo`' +// -> ignore-rules will not deal with it, because it costs extra `fs.stat` call +// you could use option `mark: true` with `glob` + +// '`foo/`' should not continue with the '`..`' +const REPLACERS = [ + + // > Trailing spaces are ignored unless they are quoted with backslash ("\") + [ + // (a\ ) -> (a ) + // (a ) -> (a) + // (a \ ) -> (a ) + /\\?\s+$/, + match => match.indexOf('\\') === 0 + ? SPACE + : EMPTY + ], + + // replace (\ ) with ' ' + [ + /\\\s/g, + () => SPACE + ], + + // Escape metacharacters + // which is written down by users but means special for regular expressions. + + // > There are 12 characters with special meanings: + // > - the backslash \, + // > - the caret ^, + // > - the dollar sign $, + // > - the period or dot ., + // > - the vertical bar or pipe symbol |, + // > - the question mark ?, + // > - the asterisk or star *, + // > - the plus sign +, + // > - the opening parenthesis (, + // > - the closing parenthesis ), + // > - and the opening square bracket [, + // > - the opening curly brace {, + // > These special characters are often called "metacharacters". + [ + /[\\$.|*+(){^]/g, + match => `\\${match}` + ], + + [ + // > a question mark (?) matches a single character + /(?!\\)\?/g, + () => '[^/]' + ], + + // leading slash + [ + + // > A leading slash matches the beginning of the pathname. + // > For example, "/*.c" matches "cat-file.c" but not "mozilla-sha1/sha1.c". + // A leading slash matches the beginning of the pathname + /^\//, + () => '^' + ], + + // replace special metacharacter slash after the leading slash + [ + /\//g, + () => '\\/' + ], + + [ + // > A leading "**" followed by a slash means match in all directories. + // > For example, "**/foo" matches file or directory "foo" anywhere, + // > the same as pattern "foo". + // > "**/foo/bar" matches file or directory "bar" anywhere that is directly + // > under directory "foo". + // Notice that the '*'s have been replaced as '\\*' + /^\^*\\\*\\\*\\\//, + + // '**/foo' <-> 'foo' + () => '^(?:.*\\/)?' + ], + + // starting + [ + // there will be no leading '/' + // (which has been replaced by section "leading slash") + // If starts with '**', adding a '^' to the regular expression also works + /^(?=[^^])/, + function startingReplacer () { + // If has a slash `/` at the beginning or middle + return !/\/(?!$)/.test(this) + // > Prior to 2.22.1 + // > If the pattern does not contain a slash /, + // > Git treats it as a shell glob pattern + // Actually, if there is only a trailing slash, + // git also treats it as a shell glob pattern + + // After 2.22.1 (compatible but clearer) + // > If there is a separator at the beginning or middle (or both) + // > of the pattern, then the pattern is relative to the directory + // > level of the particular .gitignore file itself. + // > Otherwise the pattern may also match at any level below + // > the .gitignore level. + ? '(?:^|\\/)' + + // > Otherwise, Git treats the pattern as a shell glob suitable for + // > consumption by fnmatch(3) + : '^' + } + ], + + // two globstars + [ + // Use lookahead assertions so that we could match more than one `'/**'` + /\\\/\\\*\\\*(?=\\\/|$)/g, + + // Zero, one or several directories + // should not use '*', or it will be replaced by the next replacer + + // Check if it is not the last `'/**'` + (_, index, str) => index + 6 < str.length + + // case: /**/ + // > A slash followed by two consecutive asterisks then a slash matches + // > zero or more directories. + // > For example, "a/**/b" matches "a/b", "a/x/b", "a/x/y/b" and so on. + // '/**/' + ? '(?:\\/[^\\/]+)*' + + // case: /** + // > A trailing `"/**"` matches everything inside. + + // #21: everything inside but it should not include the current folder + : '\\/.+' + ], + + // normal intermediate wildcards + [ + // Never replace escaped '*' + // ignore rule '\*' will match the path '*' + + // 'abc.*/' -> go + // 'abc.*' -> skip this rule, + // coz trailing single wildcard will be handed by [trailing wildcard] + /(^|[^\\]+)(\\\*)+(?=.+)/g, + + // '*.js' matches '.js' + // '*.js' doesn't match 'abc' + (_, p1, p2) => { + // 1. + // > An asterisk "*" matches anything except a slash. + // 2. + // > Other consecutive asterisks are considered regular asterisks + // > and will match according to the previous rules. + const unescaped = p2.replace(/\\\*/g, '[^\\/]*') + return p1 + unescaped + } + ], + + [ + // unescape, revert step 3 except for back slash + // For example, if a user escape a '\\*', + // after step 3, the result will be '\\\\\\*' + /\\\\\\(?=[$.|*+(){^])/g, + () => ESCAPE + ], + + [ + // '\\\\' -> '\\' + /\\\\/g, + () => ESCAPE + ], + + [ + // > The range notation, e.g. [a-zA-Z], + // > can be used to match one of the characters in a range. + + // `\` is escaped by step 3 + /(\\)?\[([^\]/]*?)(\\*)($|\])/g, + (match, leadEscape, range, endEscape, close) => leadEscape === ESCAPE + // '\\[bar]' -> '\\\\[bar\\]' + ? `\\[${range}${cleanRangeBackSlash(endEscape)}${close}` + : close === ']' + ? endEscape.length % 2 === 0 + // A normal case, and it is a range notation + // '[bar]' + // '[bar\\\\]' + ? `[${sanitizeRange(range)}${endEscape}]` + // Invalid range notaton + // '[bar\\]' -> '[bar\\\\]' + : '[]' + : '[]' + ], + + // ending + [ + // 'js' will not match 'js.' + // 'ab' will not match 'abc' + /(?:[^*])$/, + + // WTF! + // https://git-scm.com/docs/gitignore + // changes in [2.22.1](https://git-scm.com/docs/gitignore/2.22.1) + // which re-fixes #24, #38 + + // > If there is a separator at the end of the pattern then the pattern + // > will only match directories, otherwise the pattern can match both + // > files and directories. + + // 'js*' will not match 'a.js' + // 'js/' will not match 'a.js' + // 'js' will match 'a.js' and 'a.js/' + match => /\/$/.test(match) + // foo/ will not match 'foo' + ? `${match}$` + // foo matches 'foo' and 'foo/' + : `${match}(?=$|\\/$)` + ], + + // trailing wildcard + [ + /(\^|\\\/)?\\\*$/, + (_, p1) => { + const prefix = p1 + // '\^': + // '/*' does not match EMPTY + // '/*' does not match everything + + // '\\\/': + // 'abc/*' does not match 'abc/' + ? `${p1}[^/]+` + + // 'a*' matches 'a' + // 'a*' matches 'aa' + : '[^/]*' + + return `${prefix}(?=$|\\/$)` + } + ], +] + +// A simple cache, because an ignore rule only has only one certain meaning +const regexCache = Object.create(null) + +// @param {pattern} +const makeRegex = (pattern, ignoreCase) => { + let source = regexCache[pattern] + + if (!source) { + source = REPLACERS.reduce( + (prev, current) => prev.replace(current[0], current[1].bind(pattern)), + pattern + ) + regexCache[pattern] = source + } + + return ignoreCase + ? new RegExp(source, 'i') + : new RegExp(source) +} + +const isString = subject => typeof subject === 'string' + +// > A blank line matches no files, so it can serve as a separator for readability. +const checkPattern = pattern => pattern + && isString(pattern) + && !REGEX_TEST_BLANK_LINE.test(pattern) + && !REGEX_INVALID_TRAILING_BACKSLASH.test(pattern) + + // > A line starting with # serves as a comment. + && pattern.indexOf('#') !== 0 + +const splitPattern = pattern => pattern.split(REGEX_SPLITALL_CRLF) + +class IgnoreRule { + constructor ( + origin, + pattern, + negative, + regex + ) { + this.origin = origin + this.pattern = pattern + this.negative = negative + this.regex = regex + } +} + +const createRule = (pattern, ignoreCase) => { + const origin = pattern + let negative = false + + // > An optional prefix "!" which negates the pattern; + if (pattern.indexOf('!') === 0) { + negative = true + pattern = pattern.substr(1) + } + + pattern = pattern + // > Put a backslash ("\") in front of the first "!" for patterns that + // > begin with a literal "!", for example, `"\!important!.txt"`. + .replace(REGEX_REPLACE_LEADING_EXCAPED_EXCLAMATION, '!') + // > Put a backslash ("\") in front of the first hash for patterns that + // > begin with a hash. + .replace(REGEX_REPLACE_LEADING_EXCAPED_HASH, '#') + + const regex = makeRegex(pattern, ignoreCase) + + return new IgnoreRule( + origin, + pattern, + negative, + regex + ) +} + +const throwError = (message, Ctor) => { + throw new Ctor(message) +} + +const checkPath = (path, originalPath, doThrow) => { + if (!isString(path)) { + return doThrow( + `path must be a string, but got \`${originalPath}\``, + TypeError + ) + } + + // We don't know if we should ignore EMPTY, so throw + if (!path) { + return doThrow(`path must not be empty`, TypeError) + } + + // Check if it is a relative path + if (checkPath.isNotRelative(path)) { + const r = '`path.relative()`d' + return doThrow( + `path should be a ${r} string, but got "${originalPath}"`, + RangeError + ) + } + + return true +} + +const isNotRelative = path => REGEX_TEST_INVALID_PATH.test(path) + +checkPath.isNotRelative = isNotRelative +checkPath.convert = p => p + +class Ignore { + constructor ({ + ignorecase = true, + ignoreCase = ignorecase, + allowRelativePaths = false + } = {}) { + define(this, KEY_IGNORE, true) + + this._rules = [] + this._ignoreCase = ignoreCase + this._allowRelativePaths = allowRelativePaths + this._initCache() + } + + _initCache () { + this._ignoreCache = Object.create(null) + this._testCache = Object.create(null) + } + + _addPattern (pattern) { + // #32 + if (pattern && pattern[KEY_IGNORE]) { + this._rules = this._rules.concat(pattern._rules) + this._added = true + return + } + + if (checkPattern(pattern)) { + const rule = createRule(pattern, this._ignoreCase) + this._added = true + this._rules.push(rule) + } + } + + // @param {Array | string | Ignore} pattern + add (pattern) { + this._added = false + + makeArray( + isString(pattern) + ? splitPattern(pattern) + : pattern + ).forEach(this._addPattern, this) + + // Some rules have just added to the ignore, + // making the behavior changed. + if (this._added) { + this._initCache() + } + + return this + } + + // legacy + addPattern (pattern) { + return this.add(pattern) + } + + // | ignored : unignored + // negative | 0:0 | 0:1 | 1:0 | 1:1 + // -------- | ------- | ------- | ------- | -------- + // 0 | TEST | TEST | SKIP | X + // 1 | TESTIF | SKIP | TEST | X + + // - SKIP: always skip + // - TEST: always test + // - TESTIF: only test if checkUnignored + // - X: that never happen + + // @param {boolean} whether should check if the path is unignored, + // setting `checkUnignored` to `false` could reduce additional + // path matching. + + // @returns {TestResult} true if a file is ignored + _testOne (path, checkUnignored) { + let ignored = false + let unignored = false + + this._rules.forEach(rule => { + const {negative} = rule + if ( + unignored === negative && ignored !== unignored + || negative && !ignored && !unignored && !checkUnignored + ) { + return + } + + const matched = rule.regex.test(path) + + if (matched) { + ignored = !negative + unignored = negative + } + }) + + return { + ignored, + unignored + } + } + + // @returns {TestResult} + _test (originalPath, cache, checkUnignored, slices) { + const path = originalPath + // Supports nullable path + && checkPath.convert(originalPath) + + checkPath( + path, + originalPath, + this._allowRelativePaths + ? RETURN_FALSE + : throwError + ) + + return this._t(path, cache, checkUnignored, slices) + } + + _t (path, cache, checkUnignored, slices) { + if (path in cache) { + return cache[path] + } + + if (!slices) { + // path/to/a.js + // ['path', 'to', 'a.js'] + slices = path.split(SLASH) + } + + slices.pop() + + // If the path has no parent directory, just test it + if (!slices.length) { + return cache[path] = this._testOne(path, checkUnignored) + } + + const parent = this._t( + slices.join(SLASH) + SLASH, + cache, + checkUnignored, + slices + ) + + // If the path contains a parent directory, check the parent first + return cache[path] = parent.ignored + // > It is not possible to re-include a file if a parent directory of + // > that file is excluded. + ? parent + : this._testOne(path, checkUnignored) + } + + ignores (path) { + return this._test(path, this._ignoreCache, false).ignored + } + + createFilter () { + return path => !this.ignores(path) + } + + filter (paths) { + return makeArray(paths).filter(this.createFilter()) + } + + // @returns {TestResult} + test (path) { + return this._test(path, this._testCache, true) + } +} + +const factory = options => new Ignore(options) + +const isPathValid = path => + checkPath(path && checkPath.convert(path), path, RETURN_FALSE) + +factory.isPathValid = isPathValid + +// Fixes typescript +factory.default = factory + +module.exports = factory + +// Windows +// -------------------------------------------------------------- +/* istanbul ignore if */ +if ( + // Detect `process` so that it can run in browsers. + typeof process !== 'undefined' + && ( + process.env && process.env.IGNORE_TEST_WIN32 + || process.platform === 'win32' + ) +) { + /* eslint no-control-regex: "off" */ + const makePosix = str => /^\\\\\?\\/.test(str) + || /["<>|\u0000-\u001F]+/u.test(str) + ? str + : str.replace(/\\/g, '/') + + checkPath.convert = makePosix + + // 'C:\\foo' <- 'C:\\foo' has been converted to 'C:/' + // 'd:\\foo' + const REGIX_IS_WINDOWS_PATH_ABSOLUTE = /^[a-z]:\//i + checkPath.isNotRelative = path => + REGIX_IS_WINDOWS_PATH_ABSOLUTE.test(path) + || isNotRelative(path) +} + + +/***/ }), + +/***/ 44124: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +try { + var util = __nccwpck_require__(73837); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = __nccwpck_require__(8544); +} + + +/***/ }), + +/***/ 8544: +/***/ ((module) => { + +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }) + } + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } + } +} + + /***/ }), /***/ 51389: @@ -50363,6 +65603,6918 @@ function onceStrict (fn) { } +/***/ }), + +/***/ 31726: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Top level file is just a mixin of submodules & constants + + +var assign = (__nccwpck_require__(5483).assign); + +var deflate = __nccwpck_require__(17265); +var inflate = __nccwpck_require__(96522); +var constants = __nccwpck_require__(58282); + +var pako = {}; + +assign(pako, deflate, inflate, constants); + +module.exports = pako; + + +/***/ }), + +/***/ 17265: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + + +var zlib_deflate = __nccwpck_require__(70978); +var utils = __nccwpck_require__(5483); +var strings = __nccwpck_require__(42380); +var msg = __nccwpck_require__(1890); +var ZStream = __nccwpck_require__(86442); + +var toString = Object.prototype.toString; + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + +var Z_NO_FLUSH = 0; +var Z_FINISH = 4; + +var Z_OK = 0; +var Z_STREAM_END = 1; +var Z_SYNC_FLUSH = 2; + +var Z_DEFAULT_COMPRESSION = -1; + +var Z_DEFAULT_STRATEGY = 0; + +var Z_DEFLATED = 8; + +/* ===========================================================================*/ + + +/** + * class Deflate + * + * Generic JS-style wrapper for zlib calls. If you don't need + * streaming behaviour - use more simple functions: [[deflate]], + * [[deflateRaw]] and [[gzip]]. + **/ + +/* internal + * Deflate.chunks -> Array + * + * Chunks of output data, if [[Deflate#onData]] not overridden. + **/ + +/** + * Deflate.result -> Uint8Array|Array + * + * Compressed result, generated by default [[Deflate#onData]] + * and [[Deflate#onEnd]] handlers. Filled after you push last chunk + * (call [[Deflate#push]] with `Z_FINISH` / `true` param) or if you + * push a chunk with explicit flush (call [[Deflate#push]] with + * `Z_SYNC_FLUSH` param). + **/ + +/** + * Deflate.err -> Number + * + * Error code after deflate finished. 0 (Z_OK) on success. + * You will not need it in real life, because deflate errors + * are possible only on wrong options or bad `onData` / `onEnd` + * custom handlers. + **/ + +/** + * Deflate.msg -> String + * + * Error message, if [[Deflate.err]] != 0 + **/ + + +/** + * new Deflate(options) + * - options (Object): zlib deflate options. + * + * Creates new deflator instance with specified params. Throws exception + * on bad params. Supported options: + * + * - `level` + * - `windowBits` + * - `memLevel` + * - `strategy` + * - `dictionary` + * + * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) + * for more information on these. + * + * Additional options, for internal needs: + * + * - `chunkSize` - size of generated data chunks (16K by default) + * - `raw` (Boolean) - do raw deflate + * - `gzip` (Boolean) - create gzip wrapper + * - `to` (String) - if equal to 'string', then result will be "binary string" + * (each char code [0..255]) + * - `header` (Object) - custom header for gzip + * - `text` (Boolean) - true if compressed data believed to be text + * - `time` (Number) - modification time, unix timestamp + * - `os` (Number) - operation system code + * - `extra` (Array) - array of bytes with extra data (max 65536) + * - `name` (String) - file name (binary string) + * - `comment` (String) - comment (binary string) + * - `hcrc` (Boolean) - true if header crc should be added + * + * ##### Example: + * + * ```javascript + * var pako = require('pako') + * , chunk1 = Uint8Array([1,2,3,4,5,6,7,8,9]) + * , chunk2 = Uint8Array([10,11,12,13,14,15,16,17,18,19]); + * + * var deflate = new pako.Deflate({ level: 3}); + * + * deflate.push(chunk1, false); + * deflate.push(chunk2, true); // true -> last chunk + * + * if (deflate.err) { throw new Error(deflate.err); } + * + * console.log(deflate.result); + * ``` + **/ +function Deflate(options) { + if (!(this instanceof Deflate)) return new Deflate(options); + + this.options = utils.assign({ + level: Z_DEFAULT_COMPRESSION, + method: Z_DEFLATED, + chunkSize: 16384, + windowBits: 15, + memLevel: 8, + strategy: Z_DEFAULT_STRATEGY, + to: '' + }, options || {}); + + var opt = this.options; + + if (opt.raw && (opt.windowBits > 0)) { + opt.windowBits = -opt.windowBits; + } + + else if (opt.gzip && (opt.windowBits > 0) && (opt.windowBits < 16)) { + opt.windowBits += 16; + } + + this.err = 0; // error code, if happens (0 = Z_OK) + this.msg = ''; // error message + this.ended = false; // used to avoid multiple onEnd() calls + this.chunks = []; // chunks of compressed data + + this.strm = new ZStream(); + this.strm.avail_out = 0; + + var status = zlib_deflate.deflateInit2( + this.strm, + opt.level, + opt.method, + opt.windowBits, + opt.memLevel, + opt.strategy + ); + + if (status !== Z_OK) { + throw new Error(msg[status]); + } + + if (opt.header) { + zlib_deflate.deflateSetHeader(this.strm, opt.header); + } + + if (opt.dictionary) { + var dict; + // Convert data if needed + if (typeof opt.dictionary === 'string') { + // If we need to compress text, change encoding to utf8. + dict = strings.string2buf(opt.dictionary); + } else if (toString.call(opt.dictionary) === '[object ArrayBuffer]') { + dict = new Uint8Array(opt.dictionary); + } else { + dict = opt.dictionary; + } + + status = zlib_deflate.deflateSetDictionary(this.strm, dict); + + if (status !== Z_OK) { + throw new Error(msg[status]); + } + + this._dict_set = true; + } +} + +/** + * Deflate#push(data[, mode]) -> Boolean + * - data (Uint8Array|Array|ArrayBuffer|String): input data. Strings will be + * converted to utf8 byte sequence. + * - mode (Number|Boolean): 0..6 for corresponding Z_NO_FLUSH..Z_TREE modes. + * See constants. Skipped or `false` means Z_NO_FLUSH, `true` means Z_FINISH. + * + * Sends input data to deflate pipe, generating [[Deflate#onData]] calls with + * new compressed chunks. Returns `true` on success. The last data block must have + * mode Z_FINISH (or `true`). That will flush internal pending buffers and call + * [[Deflate#onEnd]]. For interim explicit flushes (without ending the stream) you + * can use mode Z_SYNC_FLUSH, keeping the compression context. + * + * On fail call [[Deflate#onEnd]] with error code and return false. + * + * We strongly recommend to use `Uint8Array` on input for best speed (output + * array format is detected automatically). Also, don't skip last param and always + * use the same type in your code (boolean or number). That will improve JS speed. + * + * For regular `Array`-s make sure all elements are [0..255]. + * + * ##### Example + * + * ```javascript + * push(chunk, false); // push one of data chunks + * ... + * push(chunk, true); // push last chunk + * ``` + **/ +Deflate.prototype.push = function (data, mode) { + var strm = this.strm; + var chunkSize = this.options.chunkSize; + var status, _mode; + + if (this.ended) { return false; } + + _mode = (mode === ~~mode) ? mode : ((mode === true) ? Z_FINISH : Z_NO_FLUSH); + + // Convert data if needed + if (typeof data === 'string') { + // If we need to compress text, change encoding to utf8. + strm.input = strings.string2buf(data); + } else if (toString.call(data) === '[object ArrayBuffer]') { + strm.input = new Uint8Array(data); + } else { + strm.input = data; + } + + strm.next_in = 0; + strm.avail_in = strm.input.length; + + do { + if (strm.avail_out === 0) { + strm.output = new utils.Buf8(chunkSize); + strm.next_out = 0; + strm.avail_out = chunkSize; + } + status = zlib_deflate.deflate(strm, _mode); /* no bad return value */ + + if (status !== Z_STREAM_END && status !== Z_OK) { + this.onEnd(status); + this.ended = true; + return false; + } + if (strm.avail_out === 0 || (strm.avail_in === 0 && (_mode === Z_FINISH || _mode === Z_SYNC_FLUSH))) { + if (this.options.to === 'string') { + this.onData(strings.buf2binstring(utils.shrinkBuf(strm.output, strm.next_out))); + } else { + this.onData(utils.shrinkBuf(strm.output, strm.next_out)); + } + } + } while ((strm.avail_in > 0 || strm.avail_out === 0) && status !== Z_STREAM_END); + + // Finalize on the last chunk. + if (_mode === Z_FINISH) { + status = zlib_deflate.deflateEnd(this.strm); + this.onEnd(status); + this.ended = true; + return status === Z_OK; + } + + // callback interim results if Z_SYNC_FLUSH. + if (_mode === Z_SYNC_FLUSH) { + this.onEnd(Z_OK); + strm.avail_out = 0; + return true; + } + + return true; +}; + + +/** + * Deflate#onData(chunk) -> Void + * - chunk (Uint8Array|Array|String): output data. Type of array depends + * on js engine support. When string output requested, each chunk + * will be string. + * + * By default, stores data blocks in `chunks[]` property and glue + * those in `onEnd`. Override this handler, if you need another behaviour. + **/ +Deflate.prototype.onData = function (chunk) { + this.chunks.push(chunk); +}; + + +/** + * Deflate#onEnd(status) -> Void + * - status (Number): deflate status. 0 (Z_OK) on success, + * other if not. + * + * Called once after you tell deflate that the input stream is + * complete (Z_FINISH) or should be flushed (Z_SYNC_FLUSH) + * or if an error happened. By default - join collected chunks, + * free memory and fill `results` / `err` properties. + **/ +Deflate.prototype.onEnd = function (status) { + // On success - join + if (status === Z_OK) { + if (this.options.to === 'string') { + this.result = this.chunks.join(''); + } else { + this.result = utils.flattenChunks(this.chunks); + } + } + this.chunks = []; + this.err = status; + this.msg = this.strm.msg; +}; + + +/** + * deflate(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to compress. + * - options (Object): zlib deflate options. + * + * Compress `data` with deflate algorithm and `options`. + * + * Supported options are: + * + * - level + * - windowBits + * - memLevel + * - strategy + * - dictionary + * + * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) + * for more information on these. + * + * Sugar (options): + * + * - `raw` (Boolean) - say that we work with raw stream, if you don't wish to specify + * negative windowBits implicitly. + * - `to` (String) - if equal to 'string', then result will be "binary string" + * (each char code [0..255]) + * + * ##### Example: + * + * ```javascript + * var pako = require('pako') + * , data = Uint8Array([1,2,3,4,5,6,7,8,9]); + * + * console.log(pako.deflate(data)); + * ``` + **/ +function deflate(input, options) { + var deflator = new Deflate(options); + + deflator.push(input, true); + + // That will never happens, if you don't cheat with options :) + if (deflator.err) { throw deflator.msg || msg[deflator.err]; } + + return deflator.result; +} + + +/** + * deflateRaw(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to compress. + * - options (Object): zlib deflate options. + * + * The same as [[deflate]], but creates raw data, without wrapper + * (header and adler32 crc). + **/ +function deflateRaw(input, options) { + options = options || {}; + options.raw = true; + return deflate(input, options); +} + + +/** + * gzip(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to compress. + * - options (Object): zlib deflate options. + * + * The same as [[deflate]], but create gzip wrapper instead of + * deflate one. + **/ +function gzip(input, options) { + options = options || {}; + options.gzip = true; + return deflate(input, options); +} + + +exports.Deflate = Deflate; +exports.deflate = deflate; +exports.deflateRaw = deflateRaw; +exports.gzip = gzip; + + +/***/ }), + +/***/ 96522: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + + +var zlib_inflate = __nccwpck_require__(90409); +var utils = __nccwpck_require__(5483); +var strings = __nccwpck_require__(42380); +var c = __nccwpck_require__(58282); +var msg = __nccwpck_require__(1890); +var ZStream = __nccwpck_require__(86442); +var GZheader = __nccwpck_require__(35105); + +var toString = Object.prototype.toString; + +/** + * class Inflate + * + * Generic JS-style wrapper for zlib calls. If you don't need + * streaming behaviour - use more simple functions: [[inflate]] + * and [[inflateRaw]]. + **/ + +/* internal + * inflate.chunks -> Array + * + * Chunks of output data, if [[Inflate#onData]] not overridden. + **/ + +/** + * Inflate.result -> Uint8Array|Array|String + * + * Uncompressed result, generated by default [[Inflate#onData]] + * and [[Inflate#onEnd]] handlers. Filled after you push last chunk + * (call [[Inflate#push]] with `Z_FINISH` / `true` param) or if you + * push a chunk with explicit flush (call [[Inflate#push]] with + * `Z_SYNC_FLUSH` param). + **/ + +/** + * Inflate.err -> Number + * + * Error code after inflate finished. 0 (Z_OK) on success. + * Should be checked if broken data possible. + **/ + +/** + * Inflate.msg -> String + * + * Error message, if [[Inflate.err]] != 0 + **/ + + +/** + * new Inflate(options) + * - options (Object): zlib inflate options. + * + * Creates new inflator instance with specified params. Throws exception + * on bad params. Supported options: + * + * - `windowBits` + * - `dictionary` + * + * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) + * for more information on these. + * + * Additional options, for internal needs: + * + * - `chunkSize` - size of generated data chunks (16K by default) + * - `raw` (Boolean) - do raw inflate + * - `to` (String) - if equal to 'string', then result will be converted + * from utf8 to utf16 (javascript) string. When string output requested, + * chunk length can differ from `chunkSize`, depending on content. + * + * By default, when no options set, autodetect deflate/gzip data format via + * wrapper header. + * + * ##### Example: + * + * ```javascript + * var pako = require('pako') + * , chunk1 = Uint8Array([1,2,3,4,5,6,7,8,9]) + * , chunk2 = Uint8Array([10,11,12,13,14,15,16,17,18,19]); + * + * var inflate = new pako.Inflate({ level: 3}); + * + * inflate.push(chunk1, false); + * inflate.push(chunk2, true); // true -> last chunk + * + * if (inflate.err) { throw new Error(inflate.err); } + * + * console.log(inflate.result); + * ``` + **/ +function Inflate(options) { + if (!(this instanceof Inflate)) return new Inflate(options); + + this.options = utils.assign({ + chunkSize: 16384, + windowBits: 0, + to: '' + }, options || {}); + + var opt = this.options; + + // Force window size for `raw` data, if not set directly, + // because we have no header for autodetect. + if (opt.raw && (opt.windowBits >= 0) && (opt.windowBits < 16)) { + opt.windowBits = -opt.windowBits; + if (opt.windowBits === 0) { opt.windowBits = -15; } + } + + // If `windowBits` not defined (and mode not raw) - set autodetect flag for gzip/deflate + if ((opt.windowBits >= 0) && (opt.windowBits < 16) && + !(options && options.windowBits)) { + opt.windowBits += 32; + } + + // Gzip header has no info about windows size, we can do autodetect only + // for deflate. So, if window size not set, force it to max when gzip possible + if ((opt.windowBits > 15) && (opt.windowBits < 48)) { + // bit 3 (16) -> gzipped data + // bit 4 (32) -> autodetect gzip/deflate + if ((opt.windowBits & 15) === 0) { + opt.windowBits |= 15; + } + } + + this.err = 0; // error code, if happens (0 = Z_OK) + this.msg = ''; // error message + this.ended = false; // used to avoid multiple onEnd() calls + this.chunks = []; // chunks of compressed data + + this.strm = new ZStream(); + this.strm.avail_out = 0; + + var status = zlib_inflate.inflateInit2( + this.strm, + opt.windowBits + ); + + if (status !== c.Z_OK) { + throw new Error(msg[status]); + } + + this.header = new GZheader(); + + zlib_inflate.inflateGetHeader(this.strm, this.header); + + // Setup dictionary + if (opt.dictionary) { + // Convert data if needed + if (typeof opt.dictionary === 'string') { + opt.dictionary = strings.string2buf(opt.dictionary); + } else if (toString.call(opt.dictionary) === '[object ArrayBuffer]') { + opt.dictionary = new Uint8Array(opt.dictionary); + } + if (opt.raw) { //In raw mode we need to set the dictionary early + status = zlib_inflate.inflateSetDictionary(this.strm, opt.dictionary); + if (status !== c.Z_OK) { + throw new Error(msg[status]); + } + } + } +} + +/** + * Inflate#push(data[, mode]) -> Boolean + * - data (Uint8Array|Array|ArrayBuffer|String): input data + * - mode (Number|Boolean): 0..6 for corresponding Z_NO_FLUSH..Z_TREE modes. + * See constants. Skipped or `false` means Z_NO_FLUSH, `true` means Z_FINISH. + * + * Sends input data to inflate pipe, generating [[Inflate#onData]] calls with + * new output chunks. Returns `true` on success. The last data block must have + * mode Z_FINISH (or `true`). That will flush internal pending buffers and call + * [[Inflate#onEnd]]. For interim explicit flushes (without ending the stream) you + * can use mode Z_SYNC_FLUSH, keeping the decompression context. + * + * On fail call [[Inflate#onEnd]] with error code and return false. + * + * We strongly recommend to use `Uint8Array` on input for best speed (output + * format is detected automatically). Also, don't skip last param and always + * use the same type in your code (boolean or number). That will improve JS speed. + * + * For regular `Array`-s make sure all elements are [0..255]. + * + * ##### Example + * + * ```javascript + * push(chunk, false); // push one of data chunks + * ... + * push(chunk, true); // push last chunk + * ``` + **/ +Inflate.prototype.push = function (data, mode) { + var strm = this.strm; + var chunkSize = this.options.chunkSize; + var dictionary = this.options.dictionary; + var status, _mode; + var next_out_utf8, tail, utf8str; + + // Flag to properly process Z_BUF_ERROR on testing inflate call + // when we check that all output data was flushed. + var allowBufError = false; + + if (this.ended) { return false; } + _mode = (mode === ~~mode) ? mode : ((mode === true) ? c.Z_FINISH : c.Z_NO_FLUSH); + + // Convert data if needed + if (typeof data === 'string') { + // Only binary strings can be decompressed on practice + strm.input = strings.binstring2buf(data); + } else if (toString.call(data) === '[object ArrayBuffer]') { + strm.input = new Uint8Array(data); + } else { + strm.input = data; + } + + strm.next_in = 0; + strm.avail_in = strm.input.length; + + do { + if (strm.avail_out === 0) { + strm.output = new utils.Buf8(chunkSize); + strm.next_out = 0; + strm.avail_out = chunkSize; + } + + status = zlib_inflate.inflate(strm, c.Z_NO_FLUSH); /* no bad return value */ + + if (status === c.Z_NEED_DICT && dictionary) { + status = zlib_inflate.inflateSetDictionary(this.strm, dictionary); + } + + if (status === c.Z_BUF_ERROR && allowBufError === true) { + status = c.Z_OK; + allowBufError = false; + } + + if (status !== c.Z_STREAM_END && status !== c.Z_OK) { + this.onEnd(status); + this.ended = true; + return false; + } + + if (strm.next_out) { + if (strm.avail_out === 0 || status === c.Z_STREAM_END || (strm.avail_in === 0 && (_mode === c.Z_FINISH || _mode === c.Z_SYNC_FLUSH))) { + + if (this.options.to === 'string') { + + next_out_utf8 = strings.utf8border(strm.output, strm.next_out); + + tail = strm.next_out - next_out_utf8; + utf8str = strings.buf2string(strm.output, next_out_utf8); + + // move tail + strm.next_out = tail; + strm.avail_out = chunkSize - tail; + if (tail) { utils.arraySet(strm.output, strm.output, next_out_utf8, tail, 0); } + + this.onData(utf8str); + + } else { + this.onData(utils.shrinkBuf(strm.output, strm.next_out)); + } + } + } + + // When no more input data, we should check that internal inflate buffers + // are flushed. The only way to do it when avail_out = 0 - run one more + // inflate pass. But if output data not exists, inflate return Z_BUF_ERROR. + // Here we set flag to process this error properly. + // + // NOTE. Deflate does not return error in this case and does not needs such + // logic. + if (strm.avail_in === 0 && strm.avail_out === 0) { + allowBufError = true; + } + + } while ((strm.avail_in > 0 || strm.avail_out === 0) && status !== c.Z_STREAM_END); + + if (status === c.Z_STREAM_END) { + _mode = c.Z_FINISH; + } + + // Finalize on the last chunk. + if (_mode === c.Z_FINISH) { + status = zlib_inflate.inflateEnd(this.strm); + this.onEnd(status); + this.ended = true; + return status === c.Z_OK; + } + + // callback interim results if Z_SYNC_FLUSH. + if (_mode === c.Z_SYNC_FLUSH) { + this.onEnd(c.Z_OK); + strm.avail_out = 0; + return true; + } + + return true; +}; + + +/** + * Inflate#onData(chunk) -> Void + * - chunk (Uint8Array|Array|String): output data. Type of array depends + * on js engine support. When string output requested, each chunk + * will be string. + * + * By default, stores data blocks in `chunks[]` property and glue + * those in `onEnd`. Override this handler, if you need another behaviour. + **/ +Inflate.prototype.onData = function (chunk) { + this.chunks.push(chunk); +}; + + +/** + * Inflate#onEnd(status) -> Void + * - status (Number): inflate status. 0 (Z_OK) on success, + * other if not. + * + * Called either after you tell inflate that the input stream is + * complete (Z_FINISH) or should be flushed (Z_SYNC_FLUSH) + * or if an error happened. By default - join collected chunks, + * free memory and fill `results` / `err` properties. + **/ +Inflate.prototype.onEnd = function (status) { + // On success - join + if (status === c.Z_OK) { + if (this.options.to === 'string') { + // Glue & convert here, until we teach pako to send + // utf8 aligned strings to onData + this.result = this.chunks.join(''); + } else { + this.result = utils.flattenChunks(this.chunks); + } + } + this.chunks = []; + this.err = status; + this.msg = this.strm.msg; +}; + + +/** + * inflate(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to decompress. + * - options (Object): zlib inflate options. + * + * Decompress `data` with inflate/ungzip and `options`. Autodetect + * format via wrapper header by default. That's why we don't provide + * separate `ungzip` method. + * + * Supported options are: + * + * - windowBits + * + * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) + * for more information. + * + * Sugar (options): + * + * - `raw` (Boolean) - say that we work with raw stream, if you don't wish to specify + * negative windowBits implicitly. + * - `to` (String) - if equal to 'string', then result will be converted + * from utf8 to utf16 (javascript) string. When string output requested, + * chunk length can differ from `chunkSize`, depending on content. + * + * + * ##### Example: + * + * ```javascript + * var pako = require('pako') + * , input = pako.deflate([1,2,3,4,5,6,7,8,9]) + * , output; + * + * try { + * output = pako.inflate(input); + * } catch (err) + * console.log(err); + * } + * ``` + **/ +function inflate(input, options) { + var inflator = new Inflate(options); + + inflator.push(input, true); + + // That will never happens, if you don't cheat with options :) + if (inflator.err) { throw inflator.msg || msg[inflator.err]; } + + return inflator.result; +} + + +/** + * inflateRaw(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to decompress. + * - options (Object): zlib inflate options. + * + * The same as [[inflate]], but creates raw data, without wrapper + * (header and adler32 crc). + **/ +function inflateRaw(input, options) { + options = options || {}; + options.raw = true; + return inflate(input, options); +} + + +/** + * ungzip(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to decompress. + * - options (Object): zlib inflate options. + * + * Just shortcut to [[inflate]], because it autodetects format + * by header.content. Done for convenience. + **/ + + +exports.Inflate = Inflate; +exports.inflate = inflate; +exports.inflateRaw = inflateRaw; +exports.ungzip = inflate; + + +/***/ }), + +/***/ 5483: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + + +var TYPED_OK = (typeof Uint8Array !== 'undefined') && + (typeof Uint16Array !== 'undefined') && + (typeof Int32Array !== 'undefined'); + +function _has(obj, key) { + return Object.prototype.hasOwnProperty.call(obj, key); +} + +exports.assign = function (obj /*from1, from2, from3, ...*/) { + var sources = Array.prototype.slice.call(arguments, 1); + while (sources.length) { + var source = sources.shift(); + if (!source) { continue; } + + if (typeof source !== 'object') { + throw new TypeError(source + 'must be non-object'); + } + + for (var p in source) { + if (_has(source, p)) { + obj[p] = source[p]; + } + } + } + + return obj; +}; + + +// reduce buffer size, avoiding mem copy +exports.shrinkBuf = function (buf, size) { + if (buf.length === size) { return buf; } + if (buf.subarray) { return buf.subarray(0, size); } + buf.length = size; + return buf; +}; + + +var fnTyped = { + arraySet: function (dest, src, src_offs, len, dest_offs) { + if (src.subarray && dest.subarray) { + dest.set(src.subarray(src_offs, src_offs + len), dest_offs); + return; + } + // Fallback to ordinary array + for (var i = 0; i < len; i++) { + dest[dest_offs + i] = src[src_offs + i]; + } + }, + // Join array of chunks to single array. + flattenChunks: function (chunks) { + var i, l, len, pos, chunk, result; + + // calculate data length + len = 0; + for (i = 0, l = chunks.length; i < l; i++) { + len += chunks[i].length; + } + + // join chunks + result = new Uint8Array(len); + pos = 0; + for (i = 0, l = chunks.length; i < l; i++) { + chunk = chunks[i]; + result.set(chunk, pos); + pos += chunk.length; + } + + return result; + } +}; + +var fnUntyped = { + arraySet: function (dest, src, src_offs, len, dest_offs) { + for (var i = 0; i < len; i++) { + dest[dest_offs + i] = src[src_offs + i]; + } + }, + // Join array of chunks to single array. + flattenChunks: function (chunks) { + return [].concat.apply([], chunks); + } +}; + + +// Enable/Disable typed arrays use, for testing +// +exports.setTyped = function (on) { + if (on) { + exports.Buf8 = Uint8Array; + exports.Buf16 = Uint16Array; + exports.Buf32 = Int32Array; + exports.assign(exports, fnTyped); + } else { + exports.Buf8 = Array; + exports.Buf16 = Array; + exports.Buf32 = Array; + exports.assign(exports, fnUntyped); + } +}; + +exports.setTyped(TYPED_OK); + + +/***/ }), + +/***/ 42380: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; +// String encode/decode helpers + + + +var utils = __nccwpck_require__(5483); + + +// Quick check if we can use fast array to bin string conversion +// +// - apply(Array) can fail on Android 2.2 +// - apply(Uint8Array) can fail on iOS 5.1 Safari +// +var STR_APPLY_OK = true; +var STR_APPLY_UIA_OK = true; + +try { String.fromCharCode.apply(null, [ 0 ]); } catch (__) { STR_APPLY_OK = false; } +try { String.fromCharCode.apply(null, new Uint8Array(1)); } catch (__) { STR_APPLY_UIA_OK = false; } + + +// Table with utf8 lengths (calculated by first byte of sequence) +// Note, that 5 & 6-byte values and some 4-byte values can not be represented in JS, +// because max possible codepoint is 0x10ffff +var _utf8len = new utils.Buf8(256); +for (var q = 0; q < 256; q++) { + _utf8len[q] = (q >= 252 ? 6 : q >= 248 ? 5 : q >= 240 ? 4 : q >= 224 ? 3 : q >= 192 ? 2 : 1); +} +_utf8len[254] = _utf8len[254] = 1; // Invalid sequence start + + +// convert string to array (typed, when possible) +exports.string2buf = function (str) { + var buf, c, c2, m_pos, i, str_len = str.length, buf_len = 0; + + // count binary size + for (m_pos = 0; m_pos < str_len; m_pos++) { + c = str.charCodeAt(m_pos); + if ((c & 0xfc00) === 0xd800 && (m_pos + 1 < str_len)) { + c2 = str.charCodeAt(m_pos + 1); + if ((c2 & 0xfc00) === 0xdc00) { + c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00); + m_pos++; + } + } + buf_len += c < 0x80 ? 1 : c < 0x800 ? 2 : c < 0x10000 ? 3 : 4; + } + + // allocate buffer + buf = new utils.Buf8(buf_len); + + // convert + for (i = 0, m_pos = 0; i < buf_len; m_pos++) { + c = str.charCodeAt(m_pos); + if ((c & 0xfc00) === 0xd800 && (m_pos + 1 < str_len)) { + c2 = str.charCodeAt(m_pos + 1); + if ((c2 & 0xfc00) === 0xdc00) { + c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00); + m_pos++; + } + } + if (c < 0x80) { + /* one byte */ + buf[i++] = c; + } else if (c < 0x800) { + /* two bytes */ + buf[i++] = 0xC0 | (c >>> 6); + buf[i++] = 0x80 | (c & 0x3f); + } else if (c < 0x10000) { + /* three bytes */ + buf[i++] = 0xE0 | (c >>> 12); + buf[i++] = 0x80 | (c >>> 6 & 0x3f); + buf[i++] = 0x80 | (c & 0x3f); + } else { + /* four bytes */ + buf[i++] = 0xf0 | (c >>> 18); + buf[i++] = 0x80 | (c >>> 12 & 0x3f); + buf[i++] = 0x80 | (c >>> 6 & 0x3f); + buf[i++] = 0x80 | (c & 0x3f); + } + } + + return buf; +}; + +// Helper (used in 2 places) +function buf2binstring(buf, len) { + // On Chrome, the arguments in a function call that are allowed is `65534`. + // If the length of the buffer is smaller than that, we can use this optimization, + // otherwise we will take a slower path. + if (len < 65534) { + if ((buf.subarray && STR_APPLY_UIA_OK) || (!buf.subarray && STR_APPLY_OK)) { + return String.fromCharCode.apply(null, utils.shrinkBuf(buf, len)); + } + } + + var result = ''; + for (var i = 0; i < len; i++) { + result += String.fromCharCode(buf[i]); + } + return result; +} + + +// Convert byte array to binary string +exports.buf2binstring = function (buf) { + return buf2binstring(buf, buf.length); +}; + + +// Convert binary string (typed, when possible) +exports.binstring2buf = function (str) { + var buf = new utils.Buf8(str.length); + for (var i = 0, len = buf.length; i < len; i++) { + buf[i] = str.charCodeAt(i); + } + return buf; +}; + + +// convert array to string +exports.buf2string = function (buf, max) { + var i, out, c, c_len; + var len = max || buf.length; + + // Reserve max possible length (2 words per char) + // NB: by unknown reasons, Array is significantly faster for + // String.fromCharCode.apply than Uint16Array. + var utf16buf = new Array(len * 2); + + for (out = 0, i = 0; i < len;) { + c = buf[i++]; + // quick process ascii + if (c < 0x80) { utf16buf[out++] = c; continue; } + + c_len = _utf8len[c]; + // skip 5 & 6 byte codes + if (c_len > 4) { utf16buf[out++] = 0xfffd; i += c_len - 1; continue; } + + // apply mask on first byte + c &= c_len === 2 ? 0x1f : c_len === 3 ? 0x0f : 0x07; + // join the rest + while (c_len > 1 && i < len) { + c = (c << 6) | (buf[i++] & 0x3f); + c_len--; + } + + // terminated by end of string? + if (c_len > 1) { utf16buf[out++] = 0xfffd; continue; } + + if (c < 0x10000) { + utf16buf[out++] = c; + } else { + c -= 0x10000; + utf16buf[out++] = 0xd800 | ((c >> 10) & 0x3ff); + utf16buf[out++] = 0xdc00 | (c & 0x3ff); + } + } + + return buf2binstring(utf16buf, out); +}; + + +// Calculate max possible position in utf8 buffer, +// that will not break sequence. If that's not possible +// - (very small limits) return max size as is. +// +// buf[] - utf8 bytes array +// max - length limit (mandatory); +exports.utf8border = function (buf, max) { + var pos; + + max = max || buf.length; + if (max > buf.length) { max = buf.length; } + + // go back from last position, until start of sequence found + pos = max - 1; + while (pos >= 0 && (buf[pos] & 0xC0) === 0x80) { pos--; } + + // Very small and broken sequence, + // return max, because we should return something anyway. + if (pos < 0) { return max; } + + // If we came to start of buffer - that means buffer is too small, + // return max too. + if (pos === 0) { return max; } + + return (pos + _utf8len[buf[pos]] > max) ? pos : max; +}; + + +/***/ }), + +/***/ 86924: +/***/ ((module) => { + +"use strict"; + + +// Note: adler32 takes 12% for level 0 and 2% for level 6. +// It isn't worth it to make additional optimizations as in original. +// Small size is preferable. + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +function adler32(adler, buf, len, pos) { + var s1 = (adler & 0xffff) |0, + s2 = ((adler >>> 16) & 0xffff) |0, + n = 0; + + while (len !== 0) { + // Set limit ~ twice less than 5552, to keep + // s2 in 31-bits, because we force signed ints. + // in other case %= will fail. + n = len > 2000 ? 2000 : len; + len -= n; + + do { + s1 = (s1 + buf[pos++]) |0; + s2 = (s2 + s1) |0; + } while (--n); + + s1 %= 65521; + s2 %= 65521; + } + + return (s1 | (s2 << 16)) |0; +} + + +module.exports = adler32; + + +/***/ }), + +/***/ 58282: +/***/ ((module) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +module.exports = { + + /* Allowed flush values; see deflate() and inflate() below for details */ + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_TREES: 6, + + /* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. + */ + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + //Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + //Z_VERSION_ERROR: -6, + + /* compression levels */ + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + + + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + + /* Possible values of the data_type field (though see inflate()) */ + Z_BINARY: 0, + Z_TEXT: 1, + //Z_ASCII: 1, // = Z_TEXT (deprecated) + Z_UNKNOWN: 2, + + /* The deflate compression method */ + Z_DEFLATED: 8 + //Z_NULL: null // Use -1 or null inline, depending on var type +}; + + +/***/ }), + +/***/ 87242: +/***/ ((module) => { + +"use strict"; + + +// Note: we can't get significant speed boost here. +// So write code to minimize size - no pregenerated tables +// and array tools dependencies. + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +// Use ordinary array, since untyped makes no boost here +function makeTable() { + var c, table = []; + + for (var n = 0; n < 256; n++) { + c = n; + for (var k = 0; k < 8; k++) { + c = ((c & 1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1)); + } + table[n] = c; + } + + return table; +} + +// Create table on load. Just 255 signed longs. Not a problem. +var crcTable = makeTable(); + + +function crc32(crc, buf, len, pos) { + var t = crcTable, + end = pos + len; + + crc ^= -1; + + for (var i = pos; i < end; i++) { + crc = (crc >>> 8) ^ t[(crc ^ buf[i]) & 0xFF]; + } + + return (crc ^ (-1)); // >>> 0; +} + + +module.exports = crc32; + + +/***/ }), + +/***/ 70978: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +var utils = __nccwpck_require__(5483); +var trees = __nccwpck_require__(78754); +var adler32 = __nccwpck_require__(86924); +var crc32 = __nccwpck_require__(87242); +var msg = __nccwpck_require__(1890); + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + + +/* Allowed flush values; see deflate() and inflate() below for details */ +var Z_NO_FLUSH = 0; +var Z_PARTIAL_FLUSH = 1; +//var Z_SYNC_FLUSH = 2; +var Z_FULL_FLUSH = 3; +var Z_FINISH = 4; +var Z_BLOCK = 5; +//var Z_TREES = 6; + + +/* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. + */ +var Z_OK = 0; +var Z_STREAM_END = 1; +//var Z_NEED_DICT = 2; +//var Z_ERRNO = -1; +var Z_STREAM_ERROR = -2; +var Z_DATA_ERROR = -3; +//var Z_MEM_ERROR = -4; +var Z_BUF_ERROR = -5; +//var Z_VERSION_ERROR = -6; + + +/* compression levels */ +//var Z_NO_COMPRESSION = 0; +//var Z_BEST_SPEED = 1; +//var Z_BEST_COMPRESSION = 9; +var Z_DEFAULT_COMPRESSION = -1; + + +var Z_FILTERED = 1; +var Z_HUFFMAN_ONLY = 2; +var Z_RLE = 3; +var Z_FIXED = 4; +var Z_DEFAULT_STRATEGY = 0; + +/* Possible values of the data_type field (though see inflate()) */ +//var Z_BINARY = 0; +//var Z_TEXT = 1; +//var Z_ASCII = 1; // = Z_TEXT +var Z_UNKNOWN = 2; + + +/* The deflate compression method */ +var Z_DEFLATED = 8; + +/*============================================================================*/ + + +var MAX_MEM_LEVEL = 9; +/* Maximum value for memLevel in deflateInit2 */ +var MAX_WBITS = 15; +/* 32K LZ77 window */ +var DEF_MEM_LEVEL = 8; + + +var LENGTH_CODES = 29; +/* number of length codes, not counting the special END_BLOCK code */ +var LITERALS = 256; +/* number of literal bytes 0..255 */ +var L_CODES = LITERALS + 1 + LENGTH_CODES; +/* number of Literal or Length codes, including the END_BLOCK code */ +var D_CODES = 30; +/* number of distance codes */ +var BL_CODES = 19; +/* number of codes used to transfer the bit lengths */ +var HEAP_SIZE = 2 * L_CODES + 1; +/* maximum heap size */ +var MAX_BITS = 15; +/* All codes must not exceed MAX_BITS bits */ + +var MIN_MATCH = 3; +var MAX_MATCH = 258; +var MIN_LOOKAHEAD = (MAX_MATCH + MIN_MATCH + 1); + +var PRESET_DICT = 0x20; + +var INIT_STATE = 42; +var EXTRA_STATE = 69; +var NAME_STATE = 73; +var COMMENT_STATE = 91; +var HCRC_STATE = 103; +var BUSY_STATE = 113; +var FINISH_STATE = 666; + +var BS_NEED_MORE = 1; /* block not completed, need more input or more output */ +var BS_BLOCK_DONE = 2; /* block flush performed */ +var BS_FINISH_STARTED = 3; /* finish started, need only more output at next deflate */ +var BS_FINISH_DONE = 4; /* finish done, accept no more input or output */ + +var OS_CODE = 0x03; // Unix :) . Don't detect, use this default. + +function err(strm, errorCode) { + strm.msg = msg[errorCode]; + return errorCode; +} + +function rank(f) { + return ((f) << 1) - ((f) > 4 ? 9 : 0); +} + +function zero(buf) { var len = buf.length; while (--len >= 0) { buf[len] = 0; } } + + +/* ========================================================================= + * Flush as much pending output as possible. All deflate() output goes + * through this function so some applications may wish to modify it + * to avoid allocating a large strm->output buffer and copying into it. + * (See also read_buf()). + */ +function flush_pending(strm) { + var s = strm.state; + + //_tr_flush_bits(s); + var len = s.pending; + if (len > strm.avail_out) { + len = strm.avail_out; + } + if (len === 0) { return; } + + utils.arraySet(strm.output, s.pending_buf, s.pending_out, len, strm.next_out); + strm.next_out += len; + s.pending_out += len; + strm.total_out += len; + strm.avail_out -= len; + s.pending -= len; + if (s.pending === 0) { + s.pending_out = 0; + } +} + + +function flush_block_only(s, last) { + trees._tr_flush_block(s, (s.block_start >= 0 ? s.block_start : -1), s.strstart - s.block_start, last); + s.block_start = s.strstart; + flush_pending(s.strm); +} + + +function put_byte(s, b) { + s.pending_buf[s.pending++] = b; +} + + +/* ========================================================================= + * Put a short in the pending buffer. The 16-bit value is put in MSB order. + * IN assertion: the stream state is correct and there is enough room in + * pending_buf. + */ +function putShortMSB(s, b) { +// put_byte(s, (Byte)(b >> 8)); +// put_byte(s, (Byte)(b & 0xff)); + s.pending_buf[s.pending++] = (b >>> 8) & 0xff; + s.pending_buf[s.pending++] = b & 0xff; +} + + +/* =========================================================================== + * Read a new buffer from the current input stream, update the adler32 + * and total number of bytes read. All deflate() input goes through + * this function so some applications may wish to modify it to avoid + * allocating a large strm->input buffer and copying from it. + * (See also flush_pending()). + */ +function read_buf(strm, buf, start, size) { + var len = strm.avail_in; + + if (len > size) { len = size; } + if (len === 0) { return 0; } + + strm.avail_in -= len; + + // zmemcpy(buf, strm->next_in, len); + utils.arraySet(buf, strm.input, strm.next_in, len, start); + if (strm.state.wrap === 1) { + strm.adler = adler32(strm.adler, buf, len, start); + } + + else if (strm.state.wrap === 2) { + strm.adler = crc32(strm.adler, buf, len, start); + } + + strm.next_in += len; + strm.total_in += len; + + return len; +} + + +/* =========================================================================== + * Set match_start to the longest match starting at the given string and + * return its length. Matches shorter or equal to prev_length are discarded, + * in which case the result is equal to prev_length and match_start is + * garbage. + * IN assertions: cur_match is the head of the hash chain for the current + * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1 + * OUT assertion: the match length is not greater than s->lookahead. + */ +function longest_match(s, cur_match) { + var chain_length = s.max_chain_length; /* max hash chain length */ + var scan = s.strstart; /* current string */ + var match; /* matched string */ + var len; /* length of current match */ + var best_len = s.prev_length; /* best match length so far */ + var nice_match = s.nice_match; /* stop if match long enough */ + var limit = (s.strstart > (s.w_size - MIN_LOOKAHEAD)) ? + s.strstart - (s.w_size - MIN_LOOKAHEAD) : 0/*NIL*/; + + var _win = s.window; // shortcut + + var wmask = s.w_mask; + var prev = s.prev; + + /* Stop when cur_match becomes <= limit. To simplify the code, + * we prevent matches with the string of window index 0. + */ + + var strend = s.strstart + MAX_MATCH; + var scan_end1 = _win[scan + best_len - 1]; + var scan_end = _win[scan + best_len]; + + /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16. + * It is easy to get rid of this optimization if necessary. + */ + // Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever"); + + /* Do not waste too much time if we already have a good match: */ + if (s.prev_length >= s.good_match) { + chain_length >>= 2; + } + /* Do not look for matches beyond the end of the input. This is necessary + * to make deflate deterministic. + */ + if (nice_match > s.lookahead) { nice_match = s.lookahead; } + + // Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead"); + + do { + // Assert(cur_match < s->strstart, "no future"); + match = cur_match; + + /* Skip to next match if the match length cannot increase + * or if the match length is less than 2. Note that the checks below + * for insufficient lookahead only occur occasionally for performance + * reasons. Therefore uninitialized memory will be accessed, and + * conditional jumps will be made that depend on those values. + * However the length of the match is limited to the lookahead, so + * the output of deflate is not affected by the uninitialized values. + */ + + if (_win[match + best_len] !== scan_end || + _win[match + best_len - 1] !== scan_end1 || + _win[match] !== _win[scan] || + _win[++match] !== _win[scan + 1]) { + continue; + } + + /* The check at best_len-1 can be removed because it will be made + * again later. (This heuristic is not always a win.) + * It is not necessary to compare scan[2] and match[2] since they + * are always equal when the other bytes match, given that + * the hash keys are equal and that HASH_BITS >= 8. + */ + scan += 2; + match++; + // Assert(*scan == *match, "match[2]?"); + + /* We check for insufficient lookahead only every 8th comparison; + * the 256th check will be made at strstart+258. + */ + do { + /*jshint noempty:false*/ + } while (_win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + scan < strend); + + // Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); + + len = MAX_MATCH - (strend - scan); + scan = strend - MAX_MATCH; + + if (len > best_len) { + s.match_start = cur_match; + best_len = len; + if (len >= nice_match) { + break; + } + scan_end1 = _win[scan + best_len - 1]; + scan_end = _win[scan + best_len]; + } + } while ((cur_match = prev[cur_match & wmask]) > limit && --chain_length !== 0); + + if (best_len <= s.lookahead) { + return best_len; + } + return s.lookahead; +} + + +/* =========================================================================== + * Fill the window when the lookahead becomes insufficient. + * Updates strstart and lookahead. + * + * IN assertion: lookahead < MIN_LOOKAHEAD + * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD + * At least one byte has been read, or avail_in == 0; reads are + * performed for at least two bytes (required for the zip translate_eol + * option -- not supported here). + */ +function fill_window(s) { + var _w_size = s.w_size; + var p, n, m, more, str; + + //Assert(s->lookahead < MIN_LOOKAHEAD, "already enough lookahead"); + + do { + more = s.window_size - s.lookahead - s.strstart; + + // JS ints have 32 bit, block below not needed + /* Deal with !@#$% 64K limit: */ + //if (sizeof(int) <= 2) { + // if (more == 0 && s->strstart == 0 && s->lookahead == 0) { + // more = wsize; + // + // } else if (more == (unsigned)(-1)) { + // /* Very unlikely, but possible on 16 bit machine if + // * strstart == 0 && lookahead == 1 (input done a byte at time) + // */ + // more--; + // } + //} + + + /* If the window is almost full and there is insufficient lookahead, + * move the upper half to the lower one to make room in the upper half. + */ + if (s.strstart >= _w_size + (_w_size - MIN_LOOKAHEAD)) { + + utils.arraySet(s.window, s.window, _w_size, _w_size, 0); + s.match_start -= _w_size; + s.strstart -= _w_size; + /* we now have strstart >= MAX_DIST */ + s.block_start -= _w_size; + + /* Slide the hash table (could be avoided with 32 bit values + at the expense of memory usage). We slide even when level == 0 + to keep the hash table consistent if we switch back to level > 0 + later. (Using level 0 permanently is not an optimal usage of + zlib, so we don't care about this pathological case.) + */ + + n = s.hash_size; + p = n; + do { + m = s.head[--p]; + s.head[p] = (m >= _w_size ? m - _w_size : 0); + } while (--n); + + n = _w_size; + p = n; + do { + m = s.prev[--p]; + s.prev[p] = (m >= _w_size ? m - _w_size : 0); + /* If n is not on any hash chain, prev[n] is garbage but + * its value will never be used. + */ + } while (--n); + + more += _w_size; + } + if (s.strm.avail_in === 0) { + break; + } + + /* If there was no sliding: + * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 && + * more == window_size - lookahead - strstart + * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1) + * => more >= window_size - 2*WSIZE + 2 + * In the BIG_MEM or MMAP case (not yet supported), + * window_size == input_size + MIN_LOOKAHEAD && + * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD. + * Otherwise, window_size == 2*WSIZE so more >= 2. + * If there was sliding, more >= WSIZE. So in all cases, more >= 2. + */ + //Assert(more >= 2, "more < 2"); + n = read_buf(s.strm, s.window, s.strstart + s.lookahead, more); + s.lookahead += n; + + /* Initialize the hash value now that we have some input: */ + if (s.lookahead + s.insert >= MIN_MATCH) { + str = s.strstart - s.insert; + s.ins_h = s.window[str]; + + /* UPDATE_HASH(s, s->ins_h, s->window[str + 1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + 1]) & s.hash_mask; +//#if MIN_MATCH != 3 +// Call update_hash() MIN_MATCH-3 more times +//#endif + while (s.insert) { + /* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + MIN_MATCH - 1]) & s.hash_mask; + + s.prev[str & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = str; + str++; + s.insert--; + if (s.lookahead + s.insert < MIN_MATCH) { + break; + } + } + } + /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage, + * but this is not important since only literal bytes will be emitted. + */ + + } while (s.lookahead < MIN_LOOKAHEAD && s.strm.avail_in !== 0); + + /* If the WIN_INIT bytes after the end of the current data have never been + * written, then zero those bytes in order to avoid memory check reports of + * the use of uninitialized (or uninitialised as Julian writes) bytes by + * the longest match routines. Update the high water mark for the next + * time through here. WIN_INIT is set to MAX_MATCH since the longest match + * routines allow scanning to strstart + MAX_MATCH, ignoring lookahead. + */ +// if (s.high_water < s.window_size) { +// var curr = s.strstart + s.lookahead; +// var init = 0; +// +// if (s.high_water < curr) { +// /* Previous high water mark below current data -- zero WIN_INIT +// * bytes or up to end of window, whichever is less. +// */ +// init = s.window_size - curr; +// if (init > WIN_INIT) +// init = WIN_INIT; +// zmemzero(s->window + curr, (unsigned)init); +// s->high_water = curr + init; +// } +// else if (s->high_water < (ulg)curr + WIN_INIT) { +// /* High water mark at or above current data, but below current data +// * plus WIN_INIT -- zero out to current data plus WIN_INIT, or up +// * to end of window, whichever is less. +// */ +// init = (ulg)curr + WIN_INIT - s->high_water; +// if (init > s->window_size - s->high_water) +// init = s->window_size - s->high_water; +// zmemzero(s->window + s->high_water, (unsigned)init); +// s->high_water += init; +// } +// } +// +// Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD, +// "not enough room for search"); +} + +/* =========================================================================== + * Copy without compression as much as possible from the input stream, return + * the current block state. + * This function does not insert new strings in the dictionary since + * uncompressible data is probably not useful. This function is used + * only for the level=0 compression option. + * NOTE: this function should be optimized to avoid extra copying from + * window to pending_buf. + */ +function deflate_stored(s, flush) { + /* Stored blocks are limited to 0xffff bytes, pending_buf is limited + * to pending_buf_size, and each stored block has a 5 byte header: + */ + var max_block_size = 0xffff; + + if (max_block_size > s.pending_buf_size - 5) { + max_block_size = s.pending_buf_size - 5; + } + + /* Copy as much as possible from input to output: */ + for (;;) { + /* Fill the window as much as possible: */ + if (s.lookahead <= 1) { + + //Assert(s->strstart < s->w_size+MAX_DIST(s) || + // s->block_start >= (long)s->w_size, "slide too late"); +// if (!(s.strstart < s.w_size + (s.w_size - MIN_LOOKAHEAD) || +// s.block_start >= s.w_size)) { +// throw new Error("slide too late"); +// } + + fill_window(s); + if (s.lookahead === 0 && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + + if (s.lookahead === 0) { + break; + } + /* flush the current block */ + } + //Assert(s->block_start >= 0L, "block gone"); +// if (s.block_start < 0) throw new Error("block gone"); + + s.strstart += s.lookahead; + s.lookahead = 0; + + /* Emit a stored block if pending_buf will be full: */ + var max_start = s.block_start + max_block_size; + + if (s.strstart === 0 || s.strstart >= max_start) { + /* strstart == 0 is possible when wraparound on 16-bit machine */ + s.lookahead = s.strstart - max_start; + s.strstart = max_start; + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + + + } + /* Flush if we may have to slide, otherwise block_start may become + * negative and the data will be gone: + */ + if (s.strstart - s.block_start >= (s.w_size - MIN_LOOKAHEAD)) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + + s.insert = 0; + + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + + if (s.strstart > s.block_start) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + + return BS_NEED_MORE; +} + +/* =========================================================================== + * Compress as much as possible from the input stream, return the current + * block state. + * This function does not perform lazy evaluation of matches and inserts + * new strings in the dictionary only for unmatched strings or for short + * matches. It is used only for the fast compression options. + */ +function deflate_fast(s, flush) { + var hash_head; /* head of the hash chain */ + var bflush; /* set if current block must be flushed */ + + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the next match, plus MIN_MATCH bytes to insert the + * string following the next match. + */ + if (s.lookahead < MIN_LOOKAHEAD) { + fill_window(s); + if (s.lookahead < MIN_LOOKAHEAD && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + if (s.lookahead === 0) { + break; /* flush the current block */ + } + } + + /* Insert the string window[strstart .. strstart+2] in the + * dictionary, and set hash_head to the head of the hash chain: + */ + hash_head = 0/*NIL*/; + if (s.lookahead >= MIN_MATCH) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + } + + /* Find the longest match, discarding those <= prev_length. + * At this point we have always match_length < MIN_MATCH + */ + if (hash_head !== 0/*NIL*/ && ((s.strstart - hash_head) <= (s.w_size - MIN_LOOKAHEAD))) { + /* To simplify the code, we prevent matches with the string + * of window index 0 (in particular we have to avoid a match + * of the string with itself at the start of the input file). + */ + s.match_length = longest_match(s, hash_head); + /* longest_match() sets match_start */ + } + if (s.match_length >= MIN_MATCH) { + // check_match(s, s.strstart, s.match_start, s.match_length); // for debug only + + /*** _tr_tally_dist(s, s.strstart - s.match_start, + s.match_length - MIN_MATCH, bflush); ***/ + bflush = trees._tr_tally(s, s.strstart - s.match_start, s.match_length - MIN_MATCH); + + s.lookahead -= s.match_length; + + /* Insert new strings in the hash table only if the match length + * is not too large. This saves time but degrades compression. + */ + if (s.match_length <= s.max_lazy_match/*max_insert_length*/ && s.lookahead >= MIN_MATCH) { + s.match_length--; /* string at strstart already in table */ + do { + s.strstart++; + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + /* strstart never exceeds WSIZE-MAX_MATCH, so there are + * always MIN_MATCH bytes ahead. + */ + } while (--s.match_length !== 0); + s.strstart++; + } else + { + s.strstart += s.match_length; + s.match_length = 0; + s.ins_h = s.window[s.strstart]; + /* UPDATE_HASH(s, s.ins_h, s.window[s.strstart+1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + 1]) & s.hash_mask; + +//#if MIN_MATCH != 3 +// Call UPDATE_HASH() MIN_MATCH-3 more times +//#endif + /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not + * matter since it will be recomputed at next deflate call. + */ + } + } else { + /* No match, output a literal byte */ + //Tracevv((stderr,"%c", s.window[s.strstart])); + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart]); + + s.lookahead--; + s.strstart++; + } + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + s.insert = ((s.strstart < (MIN_MATCH - 1)) ? s.strstart : MIN_MATCH - 1); + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + return BS_BLOCK_DONE; +} + +/* =========================================================================== + * Same as above, but achieves better compression. We use a lazy + * evaluation for matches: a match is finally adopted only if there is + * no better match at the next window position. + */ +function deflate_slow(s, flush) { + var hash_head; /* head of hash chain */ + var bflush; /* set if current block must be flushed */ + + var max_insert; + + /* Process the input block. */ + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the next match, plus MIN_MATCH bytes to insert the + * string following the next match. + */ + if (s.lookahead < MIN_LOOKAHEAD) { + fill_window(s); + if (s.lookahead < MIN_LOOKAHEAD && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + if (s.lookahead === 0) { break; } /* flush the current block */ + } + + /* Insert the string window[strstart .. strstart+2] in the + * dictionary, and set hash_head to the head of the hash chain: + */ + hash_head = 0/*NIL*/; + if (s.lookahead >= MIN_MATCH) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + } + + /* Find the longest match, discarding those <= prev_length. + */ + s.prev_length = s.match_length; + s.prev_match = s.match_start; + s.match_length = MIN_MATCH - 1; + + if (hash_head !== 0/*NIL*/ && s.prev_length < s.max_lazy_match && + s.strstart - hash_head <= (s.w_size - MIN_LOOKAHEAD)/*MAX_DIST(s)*/) { + /* To simplify the code, we prevent matches with the string + * of window index 0 (in particular we have to avoid a match + * of the string with itself at the start of the input file). + */ + s.match_length = longest_match(s, hash_head); + /* longest_match() sets match_start */ + + if (s.match_length <= 5 && + (s.strategy === Z_FILTERED || (s.match_length === MIN_MATCH && s.strstart - s.match_start > 4096/*TOO_FAR*/))) { + + /* If prev_match is also MIN_MATCH, match_start is garbage + * but we will ignore the current match anyway. + */ + s.match_length = MIN_MATCH - 1; + } + } + /* If there was a match at the previous step and the current + * match is not better, output the previous match: + */ + if (s.prev_length >= MIN_MATCH && s.match_length <= s.prev_length) { + max_insert = s.strstart + s.lookahead - MIN_MATCH; + /* Do not insert strings in hash table beyond this. */ + + //check_match(s, s.strstart-1, s.prev_match, s.prev_length); + + /***_tr_tally_dist(s, s.strstart - 1 - s.prev_match, + s.prev_length - MIN_MATCH, bflush);***/ + bflush = trees._tr_tally(s, s.strstart - 1 - s.prev_match, s.prev_length - MIN_MATCH); + /* Insert in hash table all strings up to the end of the match. + * strstart-1 and strstart are already inserted. If there is not + * enough lookahead, the last two strings are not inserted in + * the hash table. + */ + s.lookahead -= s.prev_length - 1; + s.prev_length -= 2; + do { + if (++s.strstart <= max_insert) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + } + } while (--s.prev_length !== 0); + s.match_available = 0; + s.match_length = MIN_MATCH - 1; + s.strstart++; + + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + + } else if (s.match_available) { + /* If there was no match at the previous position, output a + * single literal. If there was a match but the current match + * is longer, truncate the previous match to a single literal. + */ + //Tracevv((stderr,"%c", s->window[s->strstart-1])); + /*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart - 1]); + + if (bflush) { + /*** FLUSH_BLOCK_ONLY(s, 0) ***/ + flush_block_only(s, false); + /***/ + } + s.strstart++; + s.lookahead--; + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + } else { + /* There is no previous match to compare with, wait for + * the next step to decide. + */ + s.match_available = 1; + s.strstart++; + s.lookahead--; + } + } + //Assert (flush != Z_NO_FLUSH, "no flush?"); + if (s.match_available) { + //Tracevv((stderr,"%c", s->window[s->strstart-1])); + /*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart - 1]); + + s.match_available = 0; + } + s.insert = s.strstart < MIN_MATCH - 1 ? s.strstart : MIN_MATCH - 1; + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + + return BS_BLOCK_DONE; +} + + +/* =========================================================================== + * For Z_RLE, simply look for runs of bytes, generate matches only of distance + * one. Do not maintain a hash table. (It will be regenerated if this run of + * deflate switches away from Z_RLE.) + */ +function deflate_rle(s, flush) { + var bflush; /* set if current block must be flushed */ + var prev; /* byte at distance one to match */ + var scan, strend; /* scan goes up to strend for length of run */ + + var _win = s.window; + + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the longest run, plus one for the unrolled loop. + */ + if (s.lookahead <= MAX_MATCH) { + fill_window(s); + if (s.lookahead <= MAX_MATCH && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + if (s.lookahead === 0) { break; } /* flush the current block */ + } + + /* See how many times the previous byte repeats */ + s.match_length = 0; + if (s.lookahead >= MIN_MATCH && s.strstart > 0) { + scan = s.strstart - 1; + prev = _win[scan]; + if (prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan]) { + strend = s.strstart + MAX_MATCH; + do { + /*jshint noempty:false*/ + } while (prev === _win[++scan] && prev === _win[++scan] && + prev === _win[++scan] && prev === _win[++scan] && + prev === _win[++scan] && prev === _win[++scan] && + prev === _win[++scan] && prev === _win[++scan] && + scan < strend); + s.match_length = MAX_MATCH - (strend - scan); + if (s.match_length > s.lookahead) { + s.match_length = s.lookahead; + } + } + //Assert(scan <= s->window+(uInt)(s->window_size-1), "wild scan"); + } + + /* Emit match if have run of MIN_MATCH or longer, else emit literal */ + if (s.match_length >= MIN_MATCH) { + //check_match(s, s.strstart, s.strstart - 1, s.match_length); + + /*** _tr_tally_dist(s, 1, s.match_length - MIN_MATCH, bflush); ***/ + bflush = trees._tr_tally(s, 1, s.match_length - MIN_MATCH); + + s.lookahead -= s.match_length; + s.strstart += s.match_length; + s.match_length = 0; + } else { + /* No match, output a literal byte */ + //Tracevv((stderr,"%c", s->window[s->strstart])); + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart]); + + s.lookahead--; + s.strstart++; + } + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + s.insert = 0; + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + return BS_BLOCK_DONE; +} + +/* =========================================================================== + * For Z_HUFFMAN_ONLY, do not look for matches. Do not maintain a hash table. + * (It will be regenerated if this run of deflate switches away from Huffman.) + */ +function deflate_huff(s, flush) { + var bflush; /* set if current block must be flushed */ + + for (;;) { + /* Make sure that we have a literal to write. */ + if (s.lookahead === 0) { + fill_window(s); + if (s.lookahead === 0) { + if (flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + break; /* flush the current block */ + } + } + + /* Output a literal byte */ + s.match_length = 0; + //Tracevv((stderr,"%c", s->window[s->strstart])); + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart]); + s.lookahead--; + s.strstart++; + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + s.insert = 0; + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + return BS_BLOCK_DONE; +} + +/* Values for max_lazy_match, good_match and max_chain_length, depending on + * the desired pack level (0..9). The values given below have been tuned to + * exclude worst case performance for pathological files. Better values may be + * found for specific files. + */ +function Config(good_length, max_lazy, nice_length, max_chain, func) { + this.good_length = good_length; + this.max_lazy = max_lazy; + this.nice_length = nice_length; + this.max_chain = max_chain; + this.func = func; +} + +var configuration_table; + +configuration_table = [ + /* good lazy nice chain */ + new Config(0, 0, 0, 0, deflate_stored), /* 0 store only */ + new Config(4, 4, 8, 4, deflate_fast), /* 1 max speed, no lazy matches */ + new Config(4, 5, 16, 8, deflate_fast), /* 2 */ + new Config(4, 6, 32, 32, deflate_fast), /* 3 */ + + new Config(4, 4, 16, 16, deflate_slow), /* 4 lazy matches */ + new Config(8, 16, 32, 32, deflate_slow), /* 5 */ + new Config(8, 16, 128, 128, deflate_slow), /* 6 */ + new Config(8, 32, 128, 256, deflate_slow), /* 7 */ + new Config(32, 128, 258, 1024, deflate_slow), /* 8 */ + new Config(32, 258, 258, 4096, deflate_slow) /* 9 max compression */ +]; + + +/* =========================================================================== + * Initialize the "longest match" routines for a new zlib stream + */ +function lm_init(s) { + s.window_size = 2 * s.w_size; + + /*** CLEAR_HASH(s); ***/ + zero(s.head); // Fill with NIL (= 0); + + /* Set the default configuration parameters: + */ + s.max_lazy_match = configuration_table[s.level].max_lazy; + s.good_match = configuration_table[s.level].good_length; + s.nice_match = configuration_table[s.level].nice_length; + s.max_chain_length = configuration_table[s.level].max_chain; + + s.strstart = 0; + s.block_start = 0; + s.lookahead = 0; + s.insert = 0; + s.match_length = s.prev_length = MIN_MATCH - 1; + s.match_available = 0; + s.ins_h = 0; +} + + +function DeflateState() { + this.strm = null; /* pointer back to this zlib stream */ + this.status = 0; /* as the name implies */ + this.pending_buf = null; /* output still pending */ + this.pending_buf_size = 0; /* size of pending_buf */ + this.pending_out = 0; /* next pending byte to output to the stream */ + this.pending = 0; /* nb of bytes in the pending buffer */ + this.wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip */ + this.gzhead = null; /* gzip header information to write */ + this.gzindex = 0; /* where in extra, name, or comment */ + this.method = Z_DEFLATED; /* can only be DEFLATED */ + this.last_flush = -1; /* value of flush param for previous deflate call */ + + this.w_size = 0; /* LZ77 window size (32K by default) */ + this.w_bits = 0; /* log2(w_size) (8..16) */ + this.w_mask = 0; /* w_size - 1 */ + + this.window = null; + /* Sliding window. Input bytes are read into the second half of the window, + * and move to the first half later to keep a dictionary of at least wSize + * bytes. With this organization, matches are limited to a distance of + * wSize-MAX_MATCH bytes, but this ensures that IO is always + * performed with a length multiple of the block size. + */ + + this.window_size = 0; + /* Actual size of window: 2*wSize, except when the user input buffer + * is directly used as sliding window. + */ + + this.prev = null; + /* Link to older string with same hash index. To limit the size of this + * array to 64K, this link is maintained only for the last 32K strings. + * An index in this array is thus a window index modulo 32K. + */ + + this.head = null; /* Heads of the hash chains or NIL. */ + + this.ins_h = 0; /* hash index of string to be inserted */ + this.hash_size = 0; /* number of elements in hash table */ + this.hash_bits = 0; /* log2(hash_size) */ + this.hash_mask = 0; /* hash_size-1 */ + + this.hash_shift = 0; + /* Number of bits by which ins_h must be shifted at each input + * step. It must be such that after MIN_MATCH steps, the oldest + * byte no longer takes part in the hash key, that is: + * hash_shift * MIN_MATCH >= hash_bits + */ + + this.block_start = 0; + /* Window position at the beginning of the current output block. Gets + * negative when the window is moved backwards. + */ + + this.match_length = 0; /* length of best match */ + this.prev_match = 0; /* previous match */ + this.match_available = 0; /* set if previous match exists */ + this.strstart = 0; /* start of string to insert */ + this.match_start = 0; /* start of matching string */ + this.lookahead = 0; /* number of valid bytes ahead in window */ + + this.prev_length = 0; + /* Length of the best match at previous step. Matches not greater than this + * are discarded. This is used in the lazy match evaluation. + */ + + this.max_chain_length = 0; + /* To speed up deflation, hash chains are never searched beyond this + * length. A higher limit improves compression ratio but degrades the + * speed. + */ + + this.max_lazy_match = 0; + /* Attempt to find a better match only when the current match is strictly + * smaller than this value. This mechanism is used only for compression + * levels >= 4. + */ + // That's alias to max_lazy_match, don't use directly + //this.max_insert_length = 0; + /* Insert new strings in the hash table only if the match length is not + * greater than this length. This saves time but degrades compression. + * max_insert_length is used only for compression levels <= 3. + */ + + this.level = 0; /* compression level (1..9) */ + this.strategy = 0; /* favor or force Huffman coding*/ + + this.good_match = 0; + /* Use a faster search when the previous match is longer than this */ + + this.nice_match = 0; /* Stop searching when current match exceeds this */ + + /* used by trees.c: */ + + /* Didn't use ct_data typedef below to suppress compiler warning */ + + // struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */ + // struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */ + // struct ct_data_s bl_tree[2*BL_CODES+1]; /* Huffman tree for bit lengths */ + + // Use flat array of DOUBLE size, with interleaved fata, + // because JS does not support effective + this.dyn_ltree = new utils.Buf16(HEAP_SIZE * 2); + this.dyn_dtree = new utils.Buf16((2 * D_CODES + 1) * 2); + this.bl_tree = new utils.Buf16((2 * BL_CODES + 1) * 2); + zero(this.dyn_ltree); + zero(this.dyn_dtree); + zero(this.bl_tree); + + this.l_desc = null; /* desc. for literal tree */ + this.d_desc = null; /* desc. for distance tree */ + this.bl_desc = null; /* desc. for bit length tree */ + + //ush bl_count[MAX_BITS+1]; + this.bl_count = new utils.Buf16(MAX_BITS + 1); + /* number of codes at each bit length for an optimal tree */ + + //int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */ + this.heap = new utils.Buf16(2 * L_CODES + 1); /* heap used to build the Huffman trees */ + zero(this.heap); + + this.heap_len = 0; /* number of elements in the heap */ + this.heap_max = 0; /* element of largest frequency */ + /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used. + * The same heap array is used to build all trees. + */ + + this.depth = new utils.Buf16(2 * L_CODES + 1); //uch depth[2*L_CODES+1]; + zero(this.depth); + /* Depth of each subtree used as tie breaker for trees of equal frequency + */ + + this.l_buf = 0; /* buffer index for literals or lengths */ + + this.lit_bufsize = 0; + /* Size of match buffer for literals/lengths. There are 4 reasons for + * limiting lit_bufsize to 64K: + * - frequencies can be kept in 16 bit counters + * - if compression is not successful for the first block, all input + * data is still in the window so we can still emit a stored block even + * when input comes from standard input. (This can also be done for + * all blocks if lit_bufsize is not greater than 32K.) + * - if compression is not successful for a file smaller than 64K, we can + * even emit a stored file instead of a stored block (saving 5 bytes). + * This is applicable only for zip (not gzip or zlib). + * - creating new Huffman trees less frequently may not provide fast + * adaptation to changes in the input data statistics. (Take for + * example a binary file with poorly compressible code followed by + * a highly compressible string table.) Smaller buffer sizes give + * fast adaptation but have of course the overhead of transmitting + * trees more frequently. + * - I can't count above 4 + */ + + this.last_lit = 0; /* running index in l_buf */ + + this.d_buf = 0; + /* Buffer index for distances. To simplify the code, d_buf and l_buf have + * the same number of elements. To use different lengths, an extra flag + * array would be necessary. + */ + + this.opt_len = 0; /* bit length of current block with optimal trees */ + this.static_len = 0; /* bit length of current block with static trees */ + this.matches = 0; /* number of string matches in current block */ + this.insert = 0; /* bytes at end of window left to insert */ + + + this.bi_buf = 0; + /* Output buffer. bits are inserted starting at the bottom (least + * significant bits). + */ + this.bi_valid = 0; + /* Number of valid bits in bi_buf. All bits above the last valid bit + * are always zero. + */ + + // Used for window memory init. We safely ignore it for JS. That makes + // sense only for pointers and memory check tools. + //this.high_water = 0; + /* High water mark offset in window for initialized bytes -- bytes above + * this are set to zero in order to avoid memory check warnings when + * longest match routines access bytes past the input. This is then + * updated to the new high water mark. + */ +} + + +function deflateResetKeep(strm) { + var s; + + if (!strm || !strm.state) { + return err(strm, Z_STREAM_ERROR); + } + + strm.total_in = strm.total_out = 0; + strm.data_type = Z_UNKNOWN; + + s = strm.state; + s.pending = 0; + s.pending_out = 0; + + if (s.wrap < 0) { + s.wrap = -s.wrap; + /* was made negative by deflate(..., Z_FINISH); */ + } + s.status = (s.wrap ? INIT_STATE : BUSY_STATE); + strm.adler = (s.wrap === 2) ? + 0 // crc32(0, Z_NULL, 0) + : + 1; // adler32(0, Z_NULL, 0) + s.last_flush = Z_NO_FLUSH; + trees._tr_init(s); + return Z_OK; +} + + +function deflateReset(strm) { + var ret = deflateResetKeep(strm); + if (ret === Z_OK) { + lm_init(strm.state); + } + return ret; +} + + +function deflateSetHeader(strm, head) { + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + if (strm.state.wrap !== 2) { return Z_STREAM_ERROR; } + strm.state.gzhead = head; + return Z_OK; +} + + +function deflateInit2(strm, level, method, windowBits, memLevel, strategy) { + if (!strm) { // === Z_NULL + return Z_STREAM_ERROR; + } + var wrap = 1; + + if (level === Z_DEFAULT_COMPRESSION) { + level = 6; + } + + if (windowBits < 0) { /* suppress zlib wrapper */ + wrap = 0; + windowBits = -windowBits; + } + + else if (windowBits > 15) { + wrap = 2; /* write gzip wrapper instead */ + windowBits -= 16; + } + + + if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method !== Z_DEFLATED || + windowBits < 8 || windowBits > 15 || level < 0 || level > 9 || + strategy < 0 || strategy > Z_FIXED) { + return err(strm, Z_STREAM_ERROR); + } + + + if (windowBits === 8) { + windowBits = 9; + } + /* until 256-byte window bug fixed */ + + var s = new DeflateState(); + + strm.state = s; + s.strm = strm; + + s.wrap = wrap; + s.gzhead = null; + s.w_bits = windowBits; + s.w_size = 1 << s.w_bits; + s.w_mask = s.w_size - 1; + + s.hash_bits = memLevel + 7; + s.hash_size = 1 << s.hash_bits; + s.hash_mask = s.hash_size - 1; + s.hash_shift = ~~((s.hash_bits + MIN_MATCH - 1) / MIN_MATCH); + + s.window = new utils.Buf8(s.w_size * 2); + s.head = new utils.Buf16(s.hash_size); + s.prev = new utils.Buf16(s.w_size); + + // Don't need mem init magic for JS. + //s.high_water = 0; /* nothing written to s->window yet */ + + s.lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */ + + s.pending_buf_size = s.lit_bufsize * 4; + + //overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2); + //s->pending_buf = (uchf *) overlay; + s.pending_buf = new utils.Buf8(s.pending_buf_size); + + // It is offset from `s.pending_buf` (size is `s.lit_bufsize * 2`) + //s->d_buf = overlay + s->lit_bufsize/sizeof(ush); + s.d_buf = 1 * s.lit_bufsize; + + //s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize; + s.l_buf = (1 + 2) * s.lit_bufsize; + + s.level = level; + s.strategy = strategy; + s.method = method; + + return deflateReset(strm); +} + +function deflateInit(strm, level) { + return deflateInit2(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL, Z_DEFAULT_STRATEGY); +} + + +function deflate(strm, flush) { + var old_flush, s; + var beg, val; // for gzip header write only + + if (!strm || !strm.state || + flush > Z_BLOCK || flush < 0) { + return strm ? err(strm, Z_STREAM_ERROR) : Z_STREAM_ERROR; + } + + s = strm.state; + + if (!strm.output || + (!strm.input && strm.avail_in !== 0) || + (s.status === FINISH_STATE && flush !== Z_FINISH)) { + return err(strm, (strm.avail_out === 0) ? Z_BUF_ERROR : Z_STREAM_ERROR); + } + + s.strm = strm; /* just in case */ + old_flush = s.last_flush; + s.last_flush = flush; + + /* Write the header */ + if (s.status === INIT_STATE) { + + if (s.wrap === 2) { // GZIP header + strm.adler = 0; //crc32(0L, Z_NULL, 0); + put_byte(s, 31); + put_byte(s, 139); + put_byte(s, 8); + if (!s.gzhead) { // s->gzhead == Z_NULL + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, s.level === 9 ? 2 : + (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ? + 4 : 0)); + put_byte(s, OS_CODE); + s.status = BUSY_STATE; + } + else { + put_byte(s, (s.gzhead.text ? 1 : 0) + + (s.gzhead.hcrc ? 2 : 0) + + (!s.gzhead.extra ? 0 : 4) + + (!s.gzhead.name ? 0 : 8) + + (!s.gzhead.comment ? 0 : 16) + ); + put_byte(s, s.gzhead.time & 0xff); + put_byte(s, (s.gzhead.time >> 8) & 0xff); + put_byte(s, (s.gzhead.time >> 16) & 0xff); + put_byte(s, (s.gzhead.time >> 24) & 0xff); + put_byte(s, s.level === 9 ? 2 : + (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ? + 4 : 0)); + put_byte(s, s.gzhead.os & 0xff); + if (s.gzhead.extra && s.gzhead.extra.length) { + put_byte(s, s.gzhead.extra.length & 0xff); + put_byte(s, (s.gzhead.extra.length >> 8) & 0xff); + } + if (s.gzhead.hcrc) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending, 0); + } + s.gzindex = 0; + s.status = EXTRA_STATE; + } + } + else // DEFLATE header + { + var header = (Z_DEFLATED + ((s.w_bits - 8) << 4)) << 8; + var level_flags = -1; + + if (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2) { + level_flags = 0; + } else if (s.level < 6) { + level_flags = 1; + } else if (s.level === 6) { + level_flags = 2; + } else { + level_flags = 3; + } + header |= (level_flags << 6); + if (s.strstart !== 0) { header |= PRESET_DICT; } + header += 31 - (header % 31); + + s.status = BUSY_STATE; + putShortMSB(s, header); + + /* Save the adler32 of the preset dictionary: */ + if (s.strstart !== 0) { + putShortMSB(s, strm.adler >>> 16); + putShortMSB(s, strm.adler & 0xffff); + } + strm.adler = 1; // adler32(0L, Z_NULL, 0); + } + } + +//#ifdef GZIP + if (s.status === EXTRA_STATE) { + if (s.gzhead.extra/* != Z_NULL*/) { + beg = s.pending; /* start of bytes to update crc */ + + while (s.gzindex < (s.gzhead.extra.length & 0xffff)) { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + flush_pending(strm); + beg = s.pending; + if (s.pending === s.pending_buf_size) { + break; + } + } + put_byte(s, s.gzhead.extra[s.gzindex] & 0xff); + s.gzindex++; + } + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + if (s.gzindex === s.gzhead.extra.length) { + s.gzindex = 0; + s.status = NAME_STATE; + } + } + else { + s.status = NAME_STATE; + } + } + if (s.status === NAME_STATE) { + if (s.gzhead.name/* != Z_NULL*/) { + beg = s.pending; /* start of bytes to update crc */ + //int val; + + do { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + flush_pending(strm); + beg = s.pending; + if (s.pending === s.pending_buf_size) { + val = 1; + break; + } + } + // JS specific: little magic to add zero terminator to end of string + if (s.gzindex < s.gzhead.name.length) { + val = s.gzhead.name.charCodeAt(s.gzindex++) & 0xff; + } else { + val = 0; + } + put_byte(s, val); + } while (val !== 0); + + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + if (val === 0) { + s.gzindex = 0; + s.status = COMMENT_STATE; + } + } + else { + s.status = COMMENT_STATE; + } + } + if (s.status === COMMENT_STATE) { + if (s.gzhead.comment/* != Z_NULL*/) { + beg = s.pending; /* start of bytes to update crc */ + //int val; + + do { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + flush_pending(strm); + beg = s.pending; + if (s.pending === s.pending_buf_size) { + val = 1; + break; + } + } + // JS specific: little magic to add zero terminator to end of string + if (s.gzindex < s.gzhead.comment.length) { + val = s.gzhead.comment.charCodeAt(s.gzindex++) & 0xff; + } else { + val = 0; + } + put_byte(s, val); + } while (val !== 0); + + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + if (val === 0) { + s.status = HCRC_STATE; + } + } + else { + s.status = HCRC_STATE; + } + } + if (s.status === HCRC_STATE) { + if (s.gzhead.hcrc) { + if (s.pending + 2 > s.pending_buf_size) { + flush_pending(strm); + } + if (s.pending + 2 <= s.pending_buf_size) { + put_byte(s, strm.adler & 0xff); + put_byte(s, (strm.adler >> 8) & 0xff); + strm.adler = 0; //crc32(0L, Z_NULL, 0); + s.status = BUSY_STATE; + } + } + else { + s.status = BUSY_STATE; + } + } +//#endif + + /* Flush as much pending output as possible */ + if (s.pending !== 0) { + flush_pending(strm); + if (strm.avail_out === 0) { + /* Since avail_out is 0, deflate will be called again with + * more output space, but possibly with both pending and + * avail_in equal to zero. There won't be anything to do, + * but this is not an error situation so make sure we + * return OK instead of BUF_ERROR at next call of deflate: + */ + s.last_flush = -1; + return Z_OK; + } + + /* Make sure there is something to do and avoid duplicate consecutive + * flushes. For repeated and useless calls with Z_FINISH, we keep + * returning Z_STREAM_END instead of Z_BUF_ERROR. + */ + } else if (strm.avail_in === 0 && rank(flush) <= rank(old_flush) && + flush !== Z_FINISH) { + return err(strm, Z_BUF_ERROR); + } + + /* User must not provide more input after the first FINISH: */ + if (s.status === FINISH_STATE && strm.avail_in !== 0) { + return err(strm, Z_BUF_ERROR); + } + + /* Start a new block or continue the current one. + */ + if (strm.avail_in !== 0 || s.lookahead !== 0 || + (flush !== Z_NO_FLUSH && s.status !== FINISH_STATE)) { + var bstate = (s.strategy === Z_HUFFMAN_ONLY) ? deflate_huff(s, flush) : + (s.strategy === Z_RLE ? deflate_rle(s, flush) : + configuration_table[s.level].func(s, flush)); + + if (bstate === BS_FINISH_STARTED || bstate === BS_FINISH_DONE) { + s.status = FINISH_STATE; + } + if (bstate === BS_NEED_MORE || bstate === BS_FINISH_STARTED) { + if (strm.avail_out === 0) { + s.last_flush = -1; + /* avoid BUF_ERROR next call, see above */ + } + return Z_OK; + /* If flush != Z_NO_FLUSH && avail_out == 0, the next call + * of deflate should use the same flush parameter to make sure + * that the flush is complete. So we don't have to output an + * empty block here, this will be done at next call. This also + * ensures that for a very small output buffer, we emit at most + * one empty block. + */ + } + if (bstate === BS_BLOCK_DONE) { + if (flush === Z_PARTIAL_FLUSH) { + trees._tr_align(s); + } + else if (flush !== Z_BLOCK) { /* FULL_FLUSH or SYNC_FLUSH */ + + trees._tr_stored_block(s, 0, 0, false); + /* For a full flush, this empty block will be recognized + * as a special marker by inflate_sync(). + */ + if (flush === Z_FULL_FLUSH) { + /*** CLEAR_HASH(s); ***/ /* forget history */ + zero(s.head); // Fill with NIL (= 0); + + if (s.lookahead === 0) { + s.strstart = 0; + s.block_start = 0; + s.insert = 0; + } + } + } + flush_pending(strm); + if (strm.avail_out === 0) { + s.last_flush = -1; /* avoid BUF_ERROR at next call, see above */ + return Z_OK; + } + } + } + //Assert(strm->avail_out > 0, "bug2"); + //if (strm.avail_out <= 0) { throw new Error("bug2");} + + if (flush !== Z_FINISH) { return Z_OK; } + if (s.wrap <= 0) { return Z_STREAM_END; } + + /* Write the trailer */ + if (s.wrap === 2) { + put_byte(s, strm.adler & 0xff); + put_byte(s, (strm.adler >> 8) & 0xff); + put_byte(s, (strm.adler >> 16) & 0xff); + put_byte(s, (strm.adler >> 24) & 0xff); + put_byte(s, strm.total_in & 0xff); + put_byte(s, (strm.total_in >> 8) & 0xff); + put_byte(s, (strm.total_in >> 16) & 0xff); + put_byte(s, (strm.total_in >> 24) & 0xff); + } + else + { + putShortMSB(s, strm.adler >>> 16); + putShortMSB(s, strm.adler & 0xffff); + } + + flush_pending(strm); + /* If avail_out is zero, the application will call deflate again + * to flush the rest. + */ + if (s.wrap > 0) { s.wrap = -s.wrap; } + /* write the trailer only once! */ + return s.pending !== 0 ? Z_OK : Z_STREAM_END; +} + +function deflateEnd(strm) { + var status; + + if (!strm/*== Z_NULL*/ || !strm.state/*== Z_NULL*/) { + return Z_STREAM_ERROR; + } + + status = strm.state.status; + if (status !== INIT_STATE && + status !== EXTRA_STATE && + status !== NAME_STATE && + status !== COMMENT_STATE && + status !== HCRC_STATE && + status !== BUSY_STATE && + status !== FINISH_STATE + ) { + return err(strm, Z_STREAM_ERROR); + } + + strm.state = null; + + return status === BUSY_STATE ? err(strm, Z_DATA_ERROR) : Z_OK; +} + + +/* ========================================================================= + * Initializes the compression dictionary from the given byte + * sequence without producing any compressed output. + */ +function deflateSetDictionary(strm, dictionary) { + var dictLength = dictionary.length; + + var s; + var str, n; + var wrap; + var avail; + var next; + var input; + var tmpDict; + + if (!strm/*== Z_NULL*/ || !strm.state/*== Z_NULL*/) { + return Z_STREAM_ERROR; + } + + s = strm.state; + wrap = s.wrap; + + if (wrap === 2 || (wrap === 1 && s.status !== INIT_STATE) || s.lookahead) { + return Z_STREAM_ERROR; + } + + /* when using zlib wrappers, compute Adler-32 for provided dictionary */ + if (wrap === 1) { + /* adler32(strm->adler, dictionary, dictLength); */ + strm.adler = adler32(strm.adler, dictionary, dictLength, 0); + } + + s.wrap = 0; /* avoid computing Adler-32 in read_buf */ + + /* if dictionary would fill window, just replace the history */ + if (dictLength >= s.w_size) { + if (wrap === 0) { /* already empty otherwise */ + /*** CLEAR_HASH(s); ***/ + zero(s.head); // Fill with NIL (= 0); + s.strstart = 0; + s.block_start = 0; + s.insert = 0; + } + /* use the tail */ + // dictionary = dictionary.slice(dictLength - s.w_size); + tmpDict = new utils.Buf8(s.w_size); + utils.arraySet(tmpDict, dictionary, dictLength - s.w_size, s.w_size, 0); + dictionary = tmpDict; + dictLength = s.w_size; + } + /* insert dictionary into window and hash */ + avail = strm.avail_in; + next = strm.next_in; + input = strm.input; + strm.avail_in = dictLength; + strm.next_in = 0; + strm.input = dictionary; + fill_window(s); + while (s.lookahead >= MIN_MATCH) { + str = s.strstart; + n = s.lookahead - (MIN_MATCH - 1); + do { + /* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + MIN_MATCH - 1]) & s.hash_mask; + + s.prev[str & s.w_mask] = s.head[s.ins_h]; + + s.head[s.ins_h] = str; + str++; + } while (--n); + s.strstart = str; + s.lookahead = MIN_MATCH - 1; + fill_window(s); + } + s.strstart += s.lookahead; + s.block_start = s.strstart; + s.insert = s.lookahead; + s.lookahead = 0; + s.match_length = s.prev_length = MIN_MATCH - 1; + s.match_available = 0; + strm.next_in = next; + strm.input = input; + strm.avail_in = avail; + s.wrap = wrap; + return Z_OK; +} + + +exports.deflateInit = deflateInit; +exports.deflateInit2 = deflateInit2; +exports.deflateReset = deflateReset; +exports.deflateResetKeep = deflateResetKeep; +exports.deflateSetHeader = deflateSetHeader; +exports.deflate = deflate; +exports.deflateEnd = deflateEnd; +exports.deflateSetDictionary = deflateSetDictionary; +exports.deflateInfo = 'pako deflate (from Nodeca project)'; + +/* Not implemented +exports.deflateBound = deflateBound; +exports.deflateCopy = deflateCopy; +exports.deflateParams = deflateParams; +exports.deflatePending = deflatePending; +exports.deflatePrime = deflatePrime; +exports.deflateTune = deflateTune; +*/ + + +/***/ }), + +/***/ 35105: +/***/ ((module) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +function GZheader() { + /* true if compressed data believed to be text */ + this.text = 0; + /* modification time */ + this.time = 0; + /* extra flags (not used when writing a gzip file) */ + this.xflags = 0; + /* operating system */ + this.os = 0; + /* pointer to extra field or Z_NULL if none */ + this.extra = null; + /* extra field length (valid if extra != Z_NULL) */ + this.extra_len = 0; // Actually, we don't need it in JS, + // but leave for few code modifications + + // + // Setup limits is not necessary because in js we should not preallocate memory + // for inflate use constant limit in 65536 bytes + // + + /* space at extra (only when reading header) */ + // this.extra_max = 0; + /* pointer to zero-terminated file name or Z_NULL */ + this.name = ''; + /* space at name (only when reading header) */ + // this.name_max = 0; + /* pointer to zero-terminated comment or Z_NULL */ + this.comment = ''; + /* space at comment (only when reading header) */ + // this.comm_max = 0; + /* true if there was or will be a header crc */ + this.hcrc = 0; + /* true when done reading gzip header (not used when writing a gzip file) */ + this.done = false; +} + +module.exports = GZheader; + + +/***/ }), + +/***/ 65349: +/***/ ((module) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +// See state defs from inflate.js +var BAD = 30; /* got a data error -- remain here until reset */ +var TYPE = 12; /* i: waiting for type bits, including last-flag bit */ + +/* + Decode literal, length, and distance codes and write out the resulting + literal and match bytes until either not enough input or output is + available, an end-of-block is encountered, or a data error is encountered. + When large enough input and output buffers are supplied to inflate(), for + example, a 16K input buffer and a 64K output buffer, more than 95% of the + inflate execution time is spent in this routine. + + Entry assumptions: + + state.mode === LEN + strm.avail_in >= 6 + strm.avail_out >= 258 + start >= strm.avail_out + state.bits < 8 + + On return, state.mode is one of: + + LEN -- ran out of enough output space or enough available input + TYPE -- reached end of block code, inflate() to interpret next block + BAD -- error in block data + + Notes: + + - The maximum input bits used by a length/distance pair is 15 bits for the + length code, 5 bits for the length extra, 15 bits for the distance code, + and 13 bits for the distance extra. This totals 48 bits, or six bytes. + Therefore if strm.avail_in >= 6, then there is enough input to avoid + checking for available input while decoding. + + - The maximum bytes that a single length/distance pair can output is 258 + bytes, which is the maximum length that can be coded. inflate_fast() + requires strm.avail_out >= 258 for each loop to avoid checking for + output space. + */ +module.exports = function inflate_fast(strm, start) { + var state; + var _in; /* local strm.input */ + var last; /* have enough input while in < last */ + var _out; /* local strm.output */ + var beg; /* inflate()'s initial strm.output */ + var end; /* while out < end, enough space available */ +//#ifdef INFLATE_STRICT + var dmax; /* maximum distance from zlib header */ +//#endif + var wsize; /* window size or zero if not using window */ + var whave; /* valid bytes in the window */ + var wnext; /* window write index */ + // Use `s_window` instead `window`, avoid conflict with instrumentation tools + var s_window; /* allocated sliding window, if wsize != 0 */ + var hold; /* local strm.hold */ + var bits; /* local strm.bits */ + var lcode; /* local strm.lencode */ + var dcode; /* local strm.distcode */ + var lmask; /* mask for first level of length codes */ + var dmask; /* mask for first level of distance codes */ + var here; /* retrieved table entry */ + var op; /* code bits, operation, extra bits, or */ + /* window position, window bytes to copy */ + var len; /* match length, unused bytes */ + var dist; /* match distance */ + var from; /* where to copy match from */ + var from_source; + + + var input, output; // JS specific, because we have no pointers + + /* copy state to local variables */ + state = strm.state; + //here = state.here; + _in = strm.next_in; + input = strm.input; + last = _in + (strm.avail_in - 5); + _out = strm.next_out; + output = strm.output; + beg = _out - (start - strm.avail_out); + end = _out + (strm.avail_out - 257); +//#ifdef INFLATE_STRICT + dmax = state.dmax; +//#endif + wsize = state.wsize; + whave = state.whave; + wnext = state.wnext; + s_window = state.window; + hold = state.hold; + bits = state.bits; + lcode = state.lencode; + dcode = state.distcode; + lmask = (1 << state.lenbits) - 1; + dmask = (1 << state.distbits) - 1; + + + /* decode literals and length/distances until end-of-block or not enough + input data or output space */ + + top: + do { + if (bits < 15) { + hold += input[_in++] << bits; + bits += 8; + hold += input[_in++] << bits; + bits += 8; + } + + here = lcode[hold & lmask]; + + dolen: + for (;;) { // Goto emulation + op = here >>> 24/*here.bits*/; + hold >>>= op; + bits -= op; + op = (here >>> 16) & 0xff/*here.op*/; + if (op === 0) { /* literal */ + //Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ? + // "inflate: literal '%c'\n" : + // "inflate: literal 0x%02x\n", here.val)); + output[_out++] = here & 0xffff/*here.val*/; + } + else if (op & 16) { /* length base */ + len = here & 0xffff/*here.val*/; + op &= 15; /* number of extra bits */ + if (op) { + if (bits < op) { + hold += input[_in++] << bits; + bits += 8; + } + len += hold & ((1 << op) - 1); + hold >>>= op; + bits -= op; + } + //Tracevv((stderr, "inflate: length %u\n", len)); + if (bits < 15) { + hold += input[_in++] << bits; + bits += 8; + hold += input[_in++] << bits; + bits += 8; + } + here = dcode[hold & dmask]; + + dodist: + for (;;) { // goto emulation + op = here >>> 24/*here.bits*/; + hold >>>= op; + bits -= op; + op = (here >>> 16) & 0xff/*here.op*/; + + if (op & 16) { /* distance base */ + dist = here & 0xffff/*here.val*/; + op &= 15; /* number of extra bits */ + if (bits < op) { + hold += input[_in++] << bits; + bits += 8; + if (bits < op) { + hold += input[_in++] << bits; + bits += 8; + } + } + dist += hold & ((1 << op) - 1); +//#ifdef INFLATE_STRICT + if (dist > dmax) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break top; + } +//#endif + hold >>>= op; + bits -= op; + //Tracevv((stderr, "inflate: distance %u\n", dist)); + op = _out - beg; /* max distance in output */ + if (dist > op) { /* see if copy from window */ + op = dist - op; /* distance back in window */ + if (op > whave) { + if (state.sane) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break top; + } + +// (!) This block is disabled in zlib defaults, +// don't enable it for binary compatibility +//#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR +// if (len <= op - whave) { +// do { +// output[_out++] = 0; +// } while (--len); +// continue top; +// } +// len -= op - whave; +// do { +// output[_out++] = 0; +// } while (--op > whave); +// if (op === 0) { +// from = _out - dist; +// do { +// output[_out++] = output[from++]; +// } while (--len); +// continue top; +// } +//#endif + } + from = 0; // window index + from_source = s_window; + if (wnext === 0) { /* very common case */ + from += wsize - op; + if (op < len) { /* some from window */ + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = _out - dist; /* rest from output */ + from_source = output; + } + } + else if (wnext < op) { /* wrap around window */ + from += wsize + wnext - op; + op -= wnext; + if (op < len) { /* some from end of window */ + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = 0; + if (wnext < len) { /* some from start of window */ + op = wnext; + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = _out - dist; /* rest from output */ + from_source = output; + } + } + } + else { /* contiguous in window */ + from += wnext - op; + if (op < len) { /* some from window */ + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = _out - dist; /* rest from output */ + from_source = output; + } + } + while (len > 2) { + output[_out++] = from_source[from++]; + output[_out++] = from_source[from++]; + output[_out++] = from_source[from++]; + len -= 3; + } + if (len) { + output[_out++] = from_source[from++]; + if (len > 1) { + output[_out++] = from_source[from++]; + } + } + } + else { + from = _out - dist; /* copy direct from output */ + do { /* minimum length is three */ + output[_out++] = output[from++]; + output[_out++] = output[from++]; + output[_out++] = output[from++]; + len -= 3; + } while (len > 2); + if (len) { + output[_out++] = output[from++]; + if (len > 1) { + output[_out++] = output[from++]; + } + } + } + } + else if ((op & 64) === 0) { /* 2nd level distance code */ + here = dcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))]; + continue dodist; + } + else { + strm.msg = 'invalid distance code'; + state.mode = BAD; + break top; + } + + break; // need to emulate goto via "continue" + } + } + else if ((op & 64) === 0) { /* 2nd level length code */ + here = lcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))]; + continue dolen; + } + else if (op & 32) { /* end-of-block */ + //Tracevv((stderr, "inflate: end of block\n")); + state.mode = TYPE; + break top; + } + else { + strm.msg = 'invalid literal/length code'; + state.mode = BAD; + break top; + } + + break; // need to emulate goto via "continue" + } + } while (_in < last && _out < end); + + /* return unused bytes (on entry, bits < 8, so in won't go too far back) */ + len = bits >> 3; + _in -= len; + bits -= len << 3; + hold &= (1 << bits) - 1; + + /* update state and return */ + strm.next_in = _in; + strm.next_out = _out; + strm.avail_in = (_in < last ? 5 + (last - _in) : 5 - (_in - last)); + strm.avail_out = (_out < end ? 257 + (end - _out) : 257 - (_out - end)); + state.hold = hold; + state.bits = bits; + return; +}; + + +/***/ }), + +/***/ 90409: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +var utils = __nccwpck_require__(5483); +var adler32 = __nccwpck_require__(86924); +var crc32 = __nccwpck_require__(87242); +var inflate_fast = __nccwpck_require__(65349); +var inflate_table = __nccwpck_require__(56895); + +var CODES = 0; +var LENS = 1; +var DISTS = 2; + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + + +/* Allowed flush values; see deflate() and inflate() below for details */ +//var Z_NO_FLUSH = 0; +//var Z_PARTIAL_FLUSH = 1; +//var Z_SYNC_FLUSH = 2; +//var Z_FULL_FLUSH = 3; +var Z_FINISH = 4; +var Z_BLOCK = 5; +var Z_TREES = 6; + + +/* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. + */ +var Z_OK = 0; +var Z_STREAM_END = 1; +var Z_NEED_DICT = 2; +//var Z_ERRNO = -1; +var Z_STREAM_ERROR = -2; +var Z_DATA_ERROR = -3; +var Z_MEM_ERROR = -4; +var Z_BUF_ERROR = -5; +//var Z_VERSION_ERROR = -6; + +/* The deflate compression method */ +var Z_DEFLATED = 8; + + +/* STATES ====================================================================*/ +/* ===========================================================================*/ + + +var HEAD = 1; /* i: waiting for magic header */ +var FLAGS = 2; /* i: waiting for method and flags (gzip) */ +var TIME = 3; /* i: waiting for modification time (gzip) */ +var OS = 4; /* i: waiting for extra flags and operating system (gzip) */ +var EXLEN = 5; /* i: waiting for extra length (gzip) */ +var EXTRA = 6; /* i: waiting for extra bytes (gzip) */ +var NAME = 7; /* i: waiting for end of file name (gzip) */ +var COMMENT = 8; /* i: waiting for end of comment (gzip) */ +var HCRC = 9; /* i: waiting for header crc (gzip) */ +var DICTID = 10; /* i: waiting for dictionary check value */ +var DICT = 11; /* waiting for inflateSetDictionary() call */ +var TYPE = 12; /* i: waiting for type bits, including last-flag bit */ +var TYPEDO = 13; /* i: same, but skip check to exit inflate on new block */ +var STORED = 14; /* i: waiting for stored size (length and complement) */ +var COPY_ = 15; /* i/o: same as COPY below, but only first time in */ +var COPY = 16; /* i/o: waiting for input or output to copy stored block */ +var TABLE = 17; /* i: waiting for dynamic block table lengths */ +var LENLENS = 18; /* i: waiting for code length code lengths */ +var CODELENS = 19; /* i: waiting for length/lit and distance code lengths */ +var LEN_ = 20; /* i: same as LEN below, but only first time in */ +var LEN = 21; /* i: waiting for length/lit/eob code */ +var LENEXT = 22; /* i: waiting for length extra bits */ +var DIST = 23; /* i: waiting for distance code */ +var DISTEXT = 24; /* i: waiting for distance extra bits */ +var MATCH = 25; /* o: waiting for output space to copy string */ +var LIT = 26; /* o: waiting for output space to write literal */ +var CHECK = 27; /* i: waiting for 32-bit check value */ +var LENGTH = 28; /* i: waiting for 32-bit length (gzip) */ +var DONE = 29; /* finished check, done -- remain here until reset */ +var BAD = 30; /* got a data error -- remain here until reset */ +var MEM = 31; /* got an inflate() memory error -- remain here until reset */ +var SYNC = 32; /* looking for synchronization bytes to restart inflate() */ + +/* ===========================================================================*/ + + + +var ENOUGH_LENS = 852; +var ENOUGH_DISTS = 592; +//var ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS); + +var MAX_WBITS = 15; +/* 32K LZ77 window */ +var DEF_WBITS = MAX_WBITS; + + +function zswap32(q) { + return (((q >>> 24) & 0xff) + + ((q >>> 8) & 0xff00) + + ((q & 0xff00) << 8) + + ((q & 0xff) << 24)); +} + + +function InflateState() { + this.mode = 0; /* current inflate mode */ + this.last = false; /* true if processing last block */ + this.wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip */ + this.havedict = false; /* true if dictionary provided */ + this.flags = 0; /* gzip header method and flags (0 if zlib) */ + this.dmax = 0; /* zlib header max distance (INFLATE_STRICT) */ + this.check = 0; /* protected copy of check value */ + this.total = 0; /* protected copy of output count */ + // TODO: may be {} + this.head = null; /* where to save gzip header information */ + + /* sliding window */ + this.wbits = 0; /* log base 2 of requested window size */ + this.wsize = 0; /* window size or zero if not using window */ + this.whave = 0; /* valid bytes in the window */ + this.wnext = 0; /* window write index */ + this.window = null; /* allocated sliding window, if needed */ + + /* bit accumulator */ + this.hold = 0; /* input bit accumulator */ + this.bits = 0; /* number of bits in "in" */ + + /* for string and stored block copying */ + this.length = 0; /* literal or length of data to copy */ + this.offset = 0; /* distance back to copy string from */ + + /* for table and code decoding */ + this.extra = 0; /* extra bits needed */ + + /* fixed and dynamic code tables */ + this.lencode = null; /* starting table for length/literal codes */ + this.distcode = null; /* starting table for distance codes */ + this.lenbits = 0; /* index bits for lencode */ + this.distbits = 0; /* index bits for distcode */ + + /* dynamic table building */ + this.ncode = 0; /* number of code length code lengths */ + this.nlen = 0; /* number of length code lengths */ + this.ndist = 0; /* number of distance code lengths */ + this.have = 0; /* number of code lengths in lens[] */ + this.next = null; /* next available space in codes[] */ + + this.lens = new utils.Buf16(320); /* temporary storage for code lengths */ + this.work = new utils.Buf16(288); /* work area for code table building */ + + /* + because we don't have pointers in js, we use lencode and distcode directly + as buffers so we don't need codes + */ + //this.codes = new utils.Buf32(ENOUGH); /* space for code tables */ + this.lendyn = null; /* dynamic table for length/literal codes (JS specific) */ + this.distdyn = null; /* dynamic table for distance codes (JS specific) */ + this.sane = 0; /* if false, allow invalid distance too far */ + this.back = 0; /* bits back of last unprocessed length/lit */ + this.was = 0; /* initial length of match */ +} + +function inflateResetKeep(strm) { + var state; + + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + strm.total_in = strm.total_out = state.total = 0; + strm.msg = ''; /*Z_NULL*/ + if (state.wrap) { /* to support ill-conceived Java test suite */ + strm.adler = state.wrap & 1; + } + state.mode = HEAD; + state.last = 0; + state.havedict = 0; + state.dmax = 32768; + state.head = null/*Z_NULL*/; + state.hold = 0; + state.bits = 0; + //state.lencode = state.distcode = state.next = state.codes; + state.lencode = state.lendyn = new utils.Buf32(ENOUGH_LENS); + state.distcode = state.distdyn = new utils.Buf32(ENOUGH_DISTS); + + state.sane = 1; + state.back = -1; + //Tracev((stderr, "inflate: reset\n")); + return Z_OK; +} + +function inflateReset(strm) { + var state; + + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + state.wsize = 0; + state.whave = 0; + state.wnext = 0; + return inflateResetKeep(strm); + +} + +function inflateReset2(strm, windowBits) { + var wrap; + var state; + + /* get the state */ + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + + /* extract wrap request from windowBits parameter */ + if (windowBits < 0) { + wrap = 0; + windowBits = -windowBits; + } + else { + wrap = (windowBits >> 4) + 1; + if (windowBits < 48) { + windowBits &= 15; + } + } + + /* set number of window bits, free window if different */ + if (windowBits && (windowBits < 8 || windowBits > 15)) { + return Z_STREAM_ERROR; + } + if (state.window !== null && state.wbits !== windowBits) { + state.window = null; + } + + /* update state and reset the rest of it */ + state.wrap = wrap; + state.wbits = windowBits; + return inflateReset(strm); +} + +function inflateInit2(strm, windowBits) { + var ret; + var state; + + if (!strm) { return Z_STREAM_ERROR; } + //strm.msg = Z_NULL; /* in case we return an error */ + + state = new InflateState(); + + //if (state === Z_NULL) return Z_MEM_ERROR; + //Tracev((stderr, "inflate: allocated\n")); + strm.state = state; + state.window = null/*Z_NULL*/; + ret = inflateReset2(strm, windowBits); + if (ret !== Z_OK) { + strm.state = null/*Z_NULL*/; + } + return ret; +} + +function inflateInit(strm) { + return inflateInit2(strm, DEF_WBITS); +} + + +/* + Return state with length and distance decoding tables and index sizes set to + fixed code decoding. Normally this returns fixed tables from inffixed.h. + If BUILDFIXED is defined, then instead this routine builds the tables the + first time it's called, and returns those tables the first time and + thereafter. This reduces the size of the code by about 2K bytes, in + exchange for a little execution time. However, BUILDFIXED should not be + used for threaded applications, since the rewriting of the tables and virgin + may not be thread-safe. + */ +var virgin = true; + +var lenfix, distfix; // We have no pointers in JS, so keep tables separate + +function fixedtables(state) { + /* build fixed huffman tables if first call (may not be thread safe) */ + if (virgin) { + var sym; + + lenfix = new utils.Buf32(512); + distfix = new utils.Buf32(32); + + /* literal/length table */ + sym = 0; + while (sym < 144) { state.lens[sym++] = 8; } + while (sym < 256) { state.lens[sym++] = 9; } + while (sym < 280) { state.lens[sym++] = 7; } + while (sym < 288) { state.lens[sym++] = 8; } + + inflate_table(LENS, state.lens, 0, 288, lenfix, 0, state.work, { bits: 9 }); + + /* distance table */ + sym = 0; + while (sym < 32) { state.lens[sym++] = 5; } + + inflate_table(DISTS, state.lens, 0, 32, distfix, 0, state.work, { bits: 5 }); + + /* do this just once */ + virgin = false; + } + + state.lencode = lenfix; + state.lenbits = 9; + state.distcode = distfix; + state.distbits = 5; +} + + +/* + Update the window with the last wsize (normally 32K) bytes written before + returning. If window does not exist yet, create it. This is only called + when a window is already in use, or when output has been written during this + inflate call, but the end of the deflate stream has not been reached yet. + It is also called to create a window for dictionary data when a dictionary + is loaded. + + Providing output buffers larger than 32K to inflate() should provide a speed + advantage, since only the last 32K of output is copied to the sliding window + upon return from inflate(), and since all distances after the first 32K of + output will fall in the output data, making match copies simpler and faster. + The advantage may be dependent on the size of the processor's data caches. + */ +function updatewindow(strm, src, end, copy) { + var dist; + var state = strm.state; + + /* if it hasn't been done already, allocate space for the window */ + if (state.window === null) { + state.wsize = 1 << state.wbits; + state.wnext = 0; + state.whave = 0; + + state.window = new utils.Buf8(state.wsize); + } + + /* copy state->wsize or less output bytes into the circular window */ + if (copy >= state.wsize) { + utils.arraySet(state.window, src, end - state.wsize, state.wsize, 0); + state.wnext = 0; + state.whave = state.wsize; + } + else { + dist = state.wsize - state.wnext; + if (dist > copy) { + dist = copy; + } + //zmemcpy(state->window + state->wnext, end - copy, dist); + utils.arraySet(state.window, src, end - copy, dist, state.wnext); + copy -= dist; + if (copy) { + //zmemcpy(state->window, end - copy, copy); + utils.arraySet(state.window, src, end - copy, copy, 0); + state.wnext = copy; + state.whave = state.wsize; + } + else { + state.wnext += dist; + if (state.wnext === state.wsize) { state.wnext = 0; } + if (state.whave < state.wsize) { state.whave += dist; } + } + } + return 0; +} + +function inflate(strm, flush) { + var state; + var input, output; // input/output buffers + var next; /* next input INDEX */ + var put; /* next output INDEX */ + var have, left; /* available input and output */ + var hold; /* bit buffer */ + var bits; /* bits in bit buffer */ + var _in, _out; /* save starting available input and output */ + var copy; /* number of stored or match bytes to copy */ + var from; /* where to copy match bytes from */ + var from_source; + var here = 0; /* current decoding table entry */ + var here_bits, here_op, here_val; // paked "here" denormalized (JS specific) + //var last; /* parent table entry */ + var last_bits, last_op, last_val; // paked "last" denormalized (JS specific) + var len; /* length to copy for repeats, bits to drop */ + var ret; /* return code */ + var hbuf = new utils.Buf8(4); /* buffer for gzip header crc calculation */ + var opts; + + var n; // temporary var for NEED_BITS + + var order = /* permutation of code lengths */ + [ 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 ]; + + + if (!strm || !strm.state || !strm.output || + (!strm.input && strm.avail_in !== 0)) { + return Z_STREAM_ERROR; + } + + state = strm.state; + if (state.mode === TYPE) { state.mode = TYPEDO; } /* skip check */ + + + //--- LOAD() --- + put = strm.next_out; + output = strm.output; + left = strm.avail_out; + next = strm.next_in; + input = strm.input; + have = strm.avail_in; + hold = state.hold; + bits = state.bits; + //--- + + _in = have; + _out = left; + ret = Z_OK; + + inf_leave: // goto emulation + for (;;) { + switch (state.mode) { + case HEAD: + if (state.wrap === 0) { + state.mode = TYPEDO; + break; + } + //=== NEEDBITS(16); + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if ((state.wrap & 2) && hold === 0x8b1f) { /* gzip header */ + state.check = 0/*crc32(0L, Z_NULL, 0)*/; + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = FLAGS; + break; + } + state.flags = 0; /* expect zlib header */ + if (state.head) { + state.head.done = false; + } + if (!(state.wrap & 1) || /* check if zlib header allowed */ + (((hold & 0xff)/*BITS(8)*/ << 8) + (hold >> 8)) % 31) { + strm.msg = 'incorrect header check'; + state.mode = BAD; + break; + } + if ((hold & 0x0f)/*BITS(4)*/ !== Z_DEFLATED) { + strm.msg = 'unknown compression method'; + state.mode = BAD; + break; + } + //--- DROPBITS(4) ---// + hold >>>= 4; + bits -= 4; + //---// + len = (hold & 0x0f)/*BITS(4)*/ + 8; + if (state.wbits === 0) { + state.wbits = len; + } + else if (len > state.wbits) { + strm.msg = 'invalid window size'; + state.mode = BAD; + break; + } + state.dmax = 1 << len; + //Tracev((stderr, "inflate: zlib header ok\n")); + strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/; + state.mode = hold & 0x200 ? DICTID : TYPE; + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + break; + case FLAGS: + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.flags = hold; + if ((state.flags & 0xff) !== Z_DEFLATED) { + strm.msg = 'unknown compression method'; + state.mode = BAD; + break; + } + if (state.flags & 0xe000) { + strm.msg = 'unknown header flags set'; + state.mode = BAD; + break; + } + if (state.head) { + state.head.text = ((hold >> 8) & 1); + } + if (state.flags & 0x0200) { + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = TIME; + /* falls through */ + case TIME: + //=== NEEDBITS(32); */ + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (state.head) { + state.head.time = hold; + } + if (state.flags & 0x0200) { + //=== CRC4(state.check, hold) + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + hbuf[2] = (hold >>> 16) & 0xff; + hbuf[3] = (hold >>> 24) & 0xff; + state.check = crc32(state.check, hbuf, 4, 0); + //=== + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = OS; + /* falls through */ + case OS: + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (state.head) { + state.head.xflags = (hold & 0xff); + state.head.os = (hold >> 8); + } + if (state.flags & 0x0200) { + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = EXLEN; + /* falls through */ + case EXLEN: + if (state.flags & 0x0400) { + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.length = hold; + if (state.head) { + state.head.extra_len = hold; + } + if (state.flags & 0x0200) { + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + } + else if (state.head) { + state.head.extra = null/*Z_NULL*/; + } + state.mode = EXTRA; + /* falls through */ + case EXTRA: + if (state.flags & 0x0400) { + copy = state.length; + if (copy > have) { copy = have; } + if (copy) { + if (state.head) { + len = state.head.extra_len - state.length; + if (!state.head.extra) { + // Use untyped array for more convenient processing later + state.head.extra = new Array(state.head.extra_len); + } + utils.arraySet( + state.head.extra, + input, + next, + // extra field is limited to 65536 bytes + // - no need for additional size check + copy, + /*len + copy > state.head.extra_max - len ? state.head.extra_max : copy,*/ + len + ); + //zmemcpy(state.head.extra + len, next, + // len + copy > state.head.extra_max ? + // state.head.extra_max - len : copy); + } + if (state.flags & 0x0200) { + state.check = crc32(state.check, input, copy, next); + } + have -= copy; + next += copy; + state.length -= copy; + } + if (state.length) { break inf_leave; } + } + state.length = 0; + state.mode = NAME; + /* falls through */ + case NAME: + if (state.flags & 0x0800) { + if (have === 0) { break inf_leave; } + copy = 0; + do { + // TODO: 2 or 1 bytes? + len = input[next + copy++]; + /* use constant limit because in js we should not preallocate memory */ + if (state.head && len && + (state.length < 65536 /*state.head.name_max*/)) { + state.head.name += String.fromCharCode(len); + } + } while (len && copy < have); + + if (state.flags & 0x0200) { + state.check = crc32(state.check, input, copy, next); + } + have -= copy; + next += copy; + if (len) { break inf_leave; } + } + else if (state.head) { + state.head.name = null; + } + state.length = 0; + state.mode = COMMENT; + /* falls through */ + case COMMENT: + if (state.flags & 0x1000) { + if (have === 0) { break inf_leave; } + copy = 0; + do { + len = input[next + copy++]; + /* use constant limit because in js we should not preallocate memory */ + if (state.head && len && + (state.length < 65536 /*state.head.comm_max*/)) { + state.head.comment += String.fromCharCode(len); + } + } while (len && copy < have); + if (state.flags & 0x0200) { + state.check = crc32(state.check, input, copy, next); + } + have -= copy; + next += copy; + if (len) { break inf_leave; } + } + else if (state.head) { + state.head.comment = null; + } + state.mode = HCRC; + /* falls through */ + case HCRC: + if (state.flags & 0x0200) { + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (hold !== (state.check & 0xffff)) { + strm.msg = 'header crc mismatch'; + state.mode = BAD; + break; + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + } + if (state.head) { + state.head.hcrc = ((state.flags >> 9) & 1); + state.head.done = true; + } + strm.adler = state.check = 0; + state.mode = TYPE; + break; + case DICTID: + //=== NEEDBITS(32); */ + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + strm.adler = state.check = zswap32(hold); + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = DICT; + /* falls through */ + case DICT: + if (state.havedict === 0) { + //--- RESTORE() --- + strm.next_out = put; + strm.avail_out = left; + strm.next_in = next; + strm.avail_in = have; + state.hold = hold; + state.bits = bits; + //--- + return Z_NEED_DICT; + } + strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/; + state.mode = TYPE; + /* falls through */ + case TYPE: + if (flush === Z_BLOCK || flush === Z_TREES) { break inf_leave; } + /* falls through */ + case TYPEDO: + if (state.last) { + //--- BYTEBITS() ---// + hold >>>= bits & 7; + bits -= bits & 7; + //---// + state.mode = CHECK; + break; + } + //=== NEEDBITS(3); */ + while (bits < 3) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.last = (hold & 0x01)/*BITS(1)*/; + //--- DROPBITS(1) ---// + hold >>>= 1; + bits -= 1; + //---// + + switch ((hold & 0x03)/*BITS(2)*/) { + case 0: /* stored block */ + //Tracev((stderr, "inflate: stored block%s\n", + // state.last ? " (last)" : "")); + state.mode = STORED; + break; + case 1: /* fixed block */ + fixedtables(state); + //Tracev((stderr, "inflate: fixed codes block%s\n", + // state.last ? " (last)" : "")); + state.mode = LEN_; /* decode codes */ + if (flush === Z_TREES) { + //--- DROPBITS(2) ---// + hold >>>= 2; + bits -= 2; + //---// + break inf_leave; + } + break; + case 2: /* dynamic block */ + //Tracev((stderr, "inflate: dynamic codes block%s\n", + // state.last ? " (last)" : "")); + state.mode = TABLE; + break; + case 3: + strm.msg = 'invalid block type'; + state.mode = BAD; + } + //--- DROPBITS(2) ---// + hold >>>= 2; + bits -= 2; + //---// + break; + case STORED: + //--- BYTEBITS() ---// /* go to byte boundary */ + hold >>>= bits & 7; + bits -= bits & 7; + //---// + //=== NEEDBITS(32); */ + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if ((hold & 0xffff) !== ((hold >>> 16) ^ 0xffff)) { + strm.msg = 'invalid stored block lengths'; + state.mode = BAD; + break; + } + state.length = hold & 0xffff; + //Tracev((stderr, "inflate: stored length %u\n", + // state.length)); + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = COPY_; + if (flush === Z_TREES) { break inf_leave; } + /* falls through */ + case COPY_: + state.mode = COPY; + /* falls through */ + case COPY: + copy = state.length; + if (copy) { + if (copy > have) { copy = have; } + if (copy > left) { copy = left; } + if (copy === 0) { break inf_leave; } + //--- zmemcpy(put, next, copy); --- + utils.arraySet(output, input, next, copy, put); + //---// + have -= copy; + next += copy; + left -= copy; + put += copy; + state.length -= copy; + break; + } + //Tracev((stderr, "inflate: stored end\n")); + state.mode = TYPE; + break; + case TABLE: + //=== NEEDBITS(14); */ + while (bits < 14) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.nlen = (hold & 0x1f)/*BITS(5)*/ + 257; + //--- DROPBITS(5) ---// + hold >>>= 5; + bits -= 5; + //---// + state.ndist = (hold & 0x1f)/*BITS(5)*/ + 1; + //--- DROPBITS(5) ---// + hold >>>= 5; + bits -= 5; + //---// + state.ncode = (hold & 0x0f)/*BITS(4)*/ + 4; + //--- DROPBITS(4) ---// + hold >>>= 4; + bits -= 4; + //---// +//#ifndef PKZIP_BUG_WORKAROUND + if (state.nlen > 286 || state.ndist > 30) { + strm.msg = 'too many length or distance symbols'; + state.mode = BAD; + break; + } +//#endif + //Tracev((stderr, "inflate: table sizes ok\n")); + state.have = 0; + state.mode = LENLENS; + /* falls through */ + case LENLENS: + while (state.have < state.ncode) { + //=== NEEDBITS(3); + while (bits < 3) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.lens[order[state.have++]] = (hold & 0x07);//BITS(3); + //--- DROPBITS(3) ---// + hold >>>= 3; + bits -= 3; + //---// + } + while (state.have < 19) { + state.lens[order[state.have++]] = 0; + } + // We have separate tables & no pointers. 2 commented lines below not needed. + //state.next = state.codes; + //state.lencode = state.next; + // Switch to use dynamic table + state.lencode = state.lendyn; + state.lenbits = 7; + + opts = { bits: state.lenbits }; + ret = inflate_table(CODES, state.lens, 0, 19, state.lencode, 0, state.work, opts); + state.lenbits = opts.bits; + + if (ret) { + strm.msg = 'invalid code lengths set'; + state.mode = BAD; + break; + } + //Tracev((stderr, "inflate: code lengths ok\n")); + state.have = 0; + state.mode = CODELENS; + /* falls through */ + case CODELENS: + while (state.have < state.nlen + state.ndist) { + for (;;) { + here = state.lencode[hold & ((1 << state.lenbits) - 1)];/*BITS(state.lenbits)*/ + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + if (here_val < 16) { + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + state.lens[state.have++] = here_val; + } + else { + if (here_val === 16) { + //=== NEEDBITS(here.bits + 2); + n = here_bits + 2; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + if (state.have === 0) { + strm.msg = 'invalid bit length repeat'; + state.mode = BAD; + break; + } + len = state.lens[state.have - 1]; + copy = 3 + (hold & 0x03);//BITS(2); + //--- DROPBITS(2) ---// + hold >>>= 2; + bits -= 2; + //---// + } + else if (here_val === 17) { + //=== NEEDBITS(here.bits + 3); + n = here_bits + 3; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + len = 0; + copy = 3 + (hold & 0x07);//BITS(3); + //--- DROPBITS(3) ---// + hold >>>= 3; + bits -= 3; + //---// + } + else { + //=== NEEDBITS(here.bits + 7); + n = here_bits + 7; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + len = 0; + copy = 11 + (hold & 0x7f);//BITS(7); + //--- DROPBITS(7) ---// + hold >>>= 7; + bits -= 7; + //---// + } + if (state.have + copy > state.nlen + state.ndist) { + strm.msg = 'invalid bit length repeat'; + state.mode = BAD; + break; + } + while (copy--) { + state.lens[state.have++] = len; + } + } + } + + /* handle error breaks in while */ + if (state.mode === BAD) { break; } + + /* check for end-of-block code (better have one) */ + if (state.lens[256] === 0) { + strm.msg = 'invalid code -- missing end-of-block'; + state.mode = BAD; + break; + } + + /* build code tables -- note: do not change the lenbits or distbits + values here (9 and 6) without reading the comments in inftrees.h + concerning the ENOUGH constants, which depend on those values */ + state.lenbits = 9; + + opts = { bits: state.lenbits }; + ret = inflate_table(LENS, state.lens, 0, state.nlen, state.lencode, 0, state.work, opts); + // We have separate tables & no pointers. 2 commented lines below not needed. + // state.next_index = opts.table_index; + state.lenbits = opts.bits; + // state.lencode = state.next; + + if (ret) { + strm.msg = 'invalid literal/lengths set'; + state.mode = BAD; + break; + } + + state.distbits = 6; + //state.distcode.copy(state.codes); + // Switch to use dynamic table + state.distcode = state.distdyn; + opts = { bits: state.distbits }; + ret = inflate_table(DISTS, state.lens, state.nlen, state.ndist, state.distcode, 0, state.work, opts); + // We have separate tables & no pointers. 2 commented lines below not needed. + // state.next_index = opts.table_index; + state.distbits = opts.bits; + // state.distcode = state.next; + + if (ret) { + strm.msg = 'invalid distances set'; + state.mode = BAD; + break; + } + //Tracev((stderr, 'inflate: codes ok\n')); + state.mode = LEN_; + if (flush === Z_TREES) { break inf_leave; } + /* falls through */ + case LEN_: + state.mode = LEN; + /* falls through */ + case LEN: + if (have >= 6 && left >= 258) { + //--- RESTORE() --- + strm.next_out = put; + strm.avail_out = left; + strm.next_in = next; + strm.avail_in = have; + state.hold = hold; + state.bits = bits; + //--- + inflate_fast(strm, _out); + //--- LOAD() --- + put = strm.next_out; + output = strm.output; + left = strm.avail_out; + next = strm.next_in; + input = strm.input; + have = strm.avail_in; + hold = state.hold; + bits = state.bits; + //--- + + if (state.mode === TYPE) { + state.back = -1; + } + break; + } + state.back = 0; + for (;;) { + here = state.lencode[hold & ((1 << state.lenbits) - 1)]; /*BITS(state.lenbits)*/ + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if (here_bits <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + if (here_op && (here_op & 0xf0) === 0) { + last_bits = here_bits; + last_op = here_op; + last_val = here_val; + for (;;) { + here = state.lencode[last_val + + ((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)]; + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((last_bits + here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + //--- DROPBITS(last.bits) ---// + hold >>>= last_bits; + bits -= last_bits; + //---// + state.back += last_bits; + } + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + state.back += here_bits; + state.length = here_val; + if (here_op === 0) { + //Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ? + // "inflate: literal '%c'\n" : + // "inflate: literal 0x%02x\n", here.val)); + state.mode = LIT; + break; + } + if (here_op & 32) { + //Tracevv((stderr, "inflate: end of block\n")); + state.back = -1; + state.mode = TYPE; + break; + } + if (here_op & 64) { + strm.msg = 'invalid literal/length code'; + state.mode = BAD; + break; + } + state.extra = here_op & 15; + state.mode = LENEXT; + /* falls through */ + case LENEXT: + if (state.extra) { + //=== NEEDBITS(state.extra); + n = state.extra; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.length += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/; + //--- DROPBITS(state.extra) ---// + hold >>>= state.extra; + bits -= state.extra; + //---// + state.back += state.extra; + } + //Tracevv((stderr, "inflate: length %u\n", state.length)); + state.was = state.length; + state.mode = DIST; + /* falls through */ + case DIST: + for (;;) { + here = state.distcode[hold & ((1 << state.distbits) - 1)];/*BITS(state.distbits)*/ + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + if ((here_op & 0xf0) === 0) { + last_bits = here_bits; + last_op = here_op; + last_val = here_val; + for (;;) { + here = state.distcode[last_val + + ((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)]; + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((last_bits + here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + //--- DROPBITS(last.bits) ---// + hold >>>= last_bits; + bits -= last_bits; + //---// + state.back += last_bits; + } + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + state.back += here_bits; + if (here_op & 64) { + strm.msg = 'invalid distance code'; + state.mode = BAD; + break; + } + state.offset = here_val; + state.extra = (here_op) & 15; + state.mode = DISTEXT; + /* falls through */ + case DISTEXT: + if (state.extra) { + //=== NEEDBITS(state.extra); + n = state.extra; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.offset += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/; + //--- DROPBITS(state.extra) ---// + hold >>>= state.extra; + bits -= state.extra; + //---// + state.back += state.extra; + } +//#ifdef INFLATE_STRICT + if (state.offset > state.dmax) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break; + } +//#endif + //Tracevv((stderr, "inflate: distance %u\n", state.offset)); + state.mode = MATCH; + /* falls through */ + case MATCH: + if (left === 0) { break inf_leave; } + copy = _out - left; + if (state.offset > copy) { /* copy from window */ + copy = state.offset - copy; + if (copy > state.whave) { + if (state.sane) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break; + } +// (!) This block is disabled in zlib defaults, +// don't enable it for binary compatibility +//#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR +// Trace((stderr, "inflate.c too far\n")); +// copy -= state.whave; +// if (copy > state.length) { copy = state.length; } +// if (copy > left) { copy = left; } +// left -= copy; +// state.length -= copy; +// do { +// output[put++] = 0; +// } while (--copy); +// if (state.length === 0) { state.mode = LEN; } +// break; +//#endif + } + if (copy > state.wnext) { + copy -= state.wnext; + from = state.wsize - copy; + } + else { + from = state.wnext - copy; + } + if (copy > state.length) { copy = state.length; } + from_source = state.window; + } + else { /* copy from output */ + from_source = output; + from = put - state.offset; + copy = state.length; + } + if (copy > left) { copy = left; } + left -= copy; + state.length -= copy; + do { + output[put++] = from_source[from++]; + } while (--copy); + if (state.length === 0) { state.mode = LEN; } + break; + case LIT: + if (left === 0) { break inf_leave; } + output[put++] = state.length; + left--; + state.mode = LEN; + break; + case CHECK: + if (state.wrap) { + //=== NEEDBITS(32); + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + // Use '|' instead of '+' to make sure that result is signed + hold |= input[next++] << bits; + bits += 8; + } + //===// + _out -= left; + strm.total_out += _out; + state.total += _out; + if (_out) { + strm.adler = state.check = + /*UPDATE(state.check, put - _out, _out);*/ + (state.flags ? crc32(state.check, output, _out, put - _out) : adler32(state.check, output, _out, put - _out)); + + } + _out = left; + // NB: crc32 stored as signed 32-bit int, zswap32 returns signed too + if ((state.flags ? hold : zswap32(hold)) !== state.check) { + strm.msg = 'incorrect data check'; + state.mode = BAD; + break; + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + //Tracev((stderr, "inflate: check matches trailer\n")); + } + state.mode = LENGTH; + /* falls through */ + case LENGTH: + if (state.wrap && state.flags) { + //=== NEEDBITS(32); + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (hold !== (state.total & 0xffffffff)) { + strm.msg = 'incorrect length check'; + state.mode = BAD; + break; + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + //Tracev((stderr, "inflate: length matches trailer\n")); + } + state.mode = DONE; + /* falls through */ + case DONE: + ret = Z_STREAM_END; + break inf_leave; + case BAD: + ret = Z_DATA_ERROR; + break inf_leave; + case MEM: + return Z_MEM_ERROR; + case SYNC: + /* falls through */ + default: + return Z_STREAM_ERROR; + } + } + + // inf_leave <- here is real place for "goto inf_leave", emulated via "break inf_leave" + + /* + Return from inflate(), updating the total counts and the check value. + If there was no progress during the inflate() call, return a buffer + error. Call updatewindow() to create and/or update the window state. + Note: a memory error from inflate() is non-recoverable. + */ + + //--- RESTORE() --- + strm.next_out = put; + strm.avail_out = left; + strm.next_in = next; + strm.avail_in = have; + state.hold = hold; + state.bits = bits; + //--- + + if (state.wsize || (_out !== strm.avail_out && state.mode < BAD && + (state.mode < CHECK || flush !== Z_FINISH))) { + if (updatewindow(strm, strm.output, strm.next_out, _out - strm.avail_out)) { + state.mode = MEM; + return Z_MEM_ERROR; + } + } + _in -= strm.avail_in; + _out -= strm.avail_out; + strm.total_in += _in; + strm.total_out += _out; + state.total += _out; + if (state.wrap && _out) { + strm.adler = state.check = /*UPDATE(state.check, strm.next_out - _out, _out);*/ + (state.flags ? crc32(state.check, output, _out, strm.next_out - _out) : adler32(state.check, output, _out, strm.next_out - _out)); + } + strm.data_type = state.bits + (state.last ? 64 : 0) + + (state.mode === TYPE ? 128 : 0) + + (state.mode === LEN_ || state.mode === COPY_ ? 256 : 0); + if (((_in === 0 && _out === 0) || flush === Z_FINISH) && ret === Z_OK) { + ret = Z_BUF_ERROR; + } + return ret; +} + +function inflateEnd(strm) { + + if (!strm || !strm.state /*|| strm->zfree == (free_func)0*/) { + return Z_STREAM_ERROR; + } + + var state = strm.state; + if (state.window) { + state.window = null; + } + strm.state = null; + return Z_OK; +} + +function inflateGetHeader(strm, head) { + var state; + + /* check state */ + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + if ((state.wrap & 2) === 0) { return Z_STREAM_ERROR; } + + /* save header structure */ + state.head = head; + head.done = false; + return Z_OK; +} + +function inflateSetDictionary(strm, dictionary) { + var dictLength = dictionary.length; + + var state; + var dictid; + var ret; + + /* check state */ + if (!strm /* == Z_NULL */ || !strm.state /* == Z_NULL */) { return Z_STREAM_ERROR; } + state = strm.state; + + if (state.wrap !== 0 && state.mode !== DICT) { + return Z_STREAM_ERROR; + } + + /* check for correct dictionary identifier */ + if (state.mode === DICT) { + dictid = 1; /* adler32(0, null, 0)*/ + /* dictid = adler32(dictid, dictionary, dictLength); */ + dictid = adler32(dictid, dictionary, dictLength, 0); + if (dictid !== state.check) { + return Z_DATA_ERROR; + } + } + /* copy dictionary to window using updatewindow(), which will amend the + existing dictionary if appropriate */ + ret = updatewindow(strm, dictionary, dictLength, dictLength); + if (ret) { + state.mode = MEM; + return Z_MEM_ERROR; + } + state.havedict = 1; + // Tracev((stderr, "inflate: dictionary set\n")); + return Z_OK; +} + +exports.inflateReset = inflateReset; +exports.inflateReset2 = inflateReset2; +exports.inflateResetKeep = inflateResetKeep; +exports.inflateInit = inflateInit; +exports.inflateInit2 = inflateInit2; +exports.inflate = inflate; +exports.inflateEnd = inflateEnd; +exports.inflateGetHeader = inflateGetHeader; +exports.inflateSetDictionary = inflateSetDictionary; +exports.inflateInfo = 'pako inflate (from Nodeca project)'; + +/* Not implemented +exports.inflateCopy = inflateCopy; +exports.inflateGetDictionary = inflateGetDictionary; +exports.inflateMark = inflateMark; +exports.inflatePrime = inflatePrime; +exports.inflateSync = inflateSync; +exports.inflateSyncPoint = inflateSyncPoint; +exports.inflateUndermine = inflateUndermine; +*/ + + +/***/ }), + +/***/ 56895: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +var utils = __nccwpck_require__(5483); + +var MAXBITS = 15; +var ENOUGH_LENS = 852; +var ENOUGH_DISTS = 592; +//var ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS); + +var CODES = 0; +var LENS = 1; +var DISTS = 2; + +var lbase = [ /* Length codes 257..285 base */ + 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, + 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0 +]; + +var lext = [ /* Length codes 257..285 extra */ + 16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18, + 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 16, 72, 78 +]; + +var dbase = [ /* Distance codes 0..29 base */ + 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, + 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145, + 8193, 12289, 16385, 24577, 0, 0 +]; + +var dext = [ /* Distance codes 0..29 extra */ + 16, 16, 16, 16, 17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22, + 23, 23, 24, 24, 25, 25, 26, 26, 27, 27, + 28, 28, 29, 29, 64, 64 +]; + +module.exports = function inflate_table(type, lens, lens_index, codes, table, table_index, work, opts) +{ + var bits = opts.bits; + //here = opts.here; /* table entry for duplication */ + + var len = 0; /* a code's length in bits */ + var sym = 0; /* index of code symbols */ + var min = 0, max = 0; /* minimum and maximum code lengths */ + var root = 0; /* number of index bits for root table */ + var curr = 0; /* number of index bits for current table */ + var drop = 0; /* code bits to drop for sub-table */ + var left = 0; /* number of prefix codes available */ + var used = 0; /* code entries in table used */ + var huff = 0; /* Huffman code */ + var incr; /* for incrementing code, index */ + var fill; /* index for replicating entries */ + var low; /* low bits for current root entry */ + var mask; /* mask for low root bits */ + var next; /* next available space in table */ + var base = null; /* base value table to use */ + var base_index = 0; +// var shoextra; /* extra bits table to use */ + var end; /* use base and extra for symbol > end */ + var count = new utils.Buf16(MAXBITS + 1); //[MAXBITS+1]; /* number of codes of each length */ + var offs = new utils.Buf16(MAXBITS + 1); //[MAXBITS+1]; /* offsets in table for each length */ + var extra = null; + var extra_index = 0; + + var here_bits, here_op, here_val; + + /* + Process a set of code lengths to create a canonical Huffman code. The + code lengths are lens[0..codes-1]. Each length corresponds to the + symbols 0..codes-1. The Huffman code is generated by first sorting the + symbols by length from short to long, and retaining the symbol order + for codes with equal lengths. Then the code starts with all zero bits + for the first code of the shortest length, and the codes are integer + increments for the same length, and zeros are appended as the length + increases. For the deflate format, these bits are stored backwards + from their more natural integer increment ordering, and so when the + decoding tables are built in the large loop below, the integer codes + are incremented backwards. + + This routine assumes, but does not check, that all of the entries in + lens[] are in the range 0..MAXBITS. The caller must assure this. + 1..MAXBITS is interpreted as that code length. zero means that that + symbol does not occur in this code. + + The codes are sorted by computing a count of codes for each length, + creating from that a table of starting indices for each length in the + sorted table, and then entering the symbols in order in the sorted + table. The sorted table is work[], with that space being provided by + the caller. + + The length counts are used for other purposes as well, i.e. finding + the minimum and maximum length codes, determining if there are any + codes at all, checking for a valid set of lengths, and looking ahead + at length counts to determine sub-table sizes when building the + decoding tables. + */ + + /* accumulate lengths for codes (assumes lens[] all in 0..MAXBITS) */ + for (len = 0; len <= MAXBITS; len++) { + count[len] = 0; + } + for (sym = 0; sym < codes; sym++) { + count[lens[lens_index + sym]]++; + } + + /* bound code lengths, force root to be within code lengths */ + root = bits; + for (max = MAXBITS; max >= 1; max--) { + if (count[max] !== 0) { break; } + } + if (root > max) { + root = max; + } + if (max === 0) { /* no symbols to code at all */ + //table.op[opts.table_index] = 64; //here.op = (var char)64; /* invalid code marker */ + //table.bits[opts.table_index] = 1; //here.bits = (var char)1; + //table.val[opts.table_index++] = 0; //here.val = (var short)0; + table[table_index++] = (1 << 24) | (64 << 16) | 0; + + + //table.op[opts.table_index] = 64; + //table.bits[opts.table_index] = 1; + //table.val[opts.table_index++] = 0; + table[table_index++] = (1 << 24) | (64 << 16) | 0; + + opts.bits = 1; + return 0; /* no symbols, but wait for decoding to report error */ + } + for (min = 1; min < max; min++) { + if (count[min] !== 0) { break; } + } + if (root < min) { + root = min; + } + + /* check for an over-subscribed or incomplete set of lengths */ + left = 1; + for (len = 1; len <= MAXBITS; len++) { + left <<= 1; + left -= count[len]; + if (left < 0) { + return -1; + } /* over-subscribed */ + } + if (left > 0 && (type === CODES || max !== 1)) { + return -1; /* incomplete set */ + } + + /* generate offsets into symbol table for each length for sorting */ + offs[1] = 0; + for (len = 1; len < MAXBITS; len++) { + offs[len + 1] = offs[len] + count[len]; + } + + /* sort symbols by length, by symbol order within each length */ + for (sym = 0; sym < codes; sym++) { + if (lens[lens_index + sym] !== 0) { + work[offs[lens[lens_index + sym]]++] = sym; + } + } + + /* + Create and fill in decoding tables. In this loop, the table being + filled is at next and has curr index bits. The code being used is huff + with length len. That code is converted to an index by dropping drop + bits off of the bottom. For codes where len is less than drop + curr, + those top drop + curr - len bits are incremented through all values to + fill the table with replicated entries. + + root is the number of index bits for the root table. When len exceeds + root, sub-tables are created pointed to by the root entry with an index + of the low root bits of huff. This is saved in low to check for when a + new sub-table should be started. drop is zero when the root table is + being filled, and drop is root when sub-tables are being filled. + + When a new sub-table is needed, it is necessary to look ahead in the + code lengths to determine what size sub-table is needed. The length + counts are used for this, and so count[] is decremented as codes are + entered in the tables. + + used keeps track of how many table entries have been allocated from the + provided *table space. It is checked for LENS and DIST tables against + the constants ENOUGH_LENS and ENOUGH_DISTS to guard against changes in + the initial root table size constants. See the comments in inftrees.h + for more information. + + sym increments through all symbols, and the loop terminates when + all codes of length max, i.e. all codes, have been processed. This + routine permits incomplete codes, so another loop after this one fills + in the rest of the decoding tables with invalid code markers. + */ + + /* set up for code type */ + // poor man optimization - use if-else instead of switch, + // to avoid deopts in old v8 + if (type === CODES) { + base = extra = work; /* dummy value--not used */ + end = 19; + + } else if (type === LENS) { + base = lbase; + base_index -= 257; + extra = lext; + extra_index -= 257; + end = 256; + + } else { /* DISTS */ + base = dbase; + extra = dext; + end = -1; + } + + /* initialize opts for loop */ + huff = 0; /* starting code */ + sym = 0; /* starting code symbol */ + len = min; /* starting code length */ + next = table_index; /* current table to fill in */ + curr = root; /* current table index bits */ + drop = 0; /* current bits to drop from code for index */ + low = -1; /* trigger new sub-table when len > root */ + used = 1 << root; /* use root table entries */ + mask = used - 1; /* mask for comparing low */ + + /* check available table space */ + if ((type === LENS && used > ENOUGH_LENS) || + (type === DISTS && used > ENOUGH_DISTS)) { + return 1; + } + + /* process all codes and make table entries */ + for (;;) { + /* create table entry */ + here_bits = len - drop; + if (work[sym] < end) { + here_op = 0; + here_val = work[sym]; + } + else if (work[sym] > end) { + here_op = extra[extra_index + work[sym]]; + here_val = base[base_index + work[sym]]; + } + else { + here_op = 32 + 64; /* end of block */ + here_val = 0; + } + + /* replicate for those indices with low len bits equal to huff */ + incr = 1 << (len - drop); + fill = 1 << curr; + min = fill; /* save offset to next table */ + do { + fill -= incr; + table[next + (huff >> drop) + fill] = (here_bits << 24) | (here_op << 16) | here_val |0; + } while (fill !== 0); + + /* backwards increment the len-bit code huff */ + incr = 1 << (len - 1); + while (huff & incr) { + incr >>= 1; + } + if (incr !== 0) { + huff &= incr - 1; + huff += incr; + } else { + huff = 0; + } + + /* go to next symbol, update count, len */ + sym++; + if (--count[len] === 0) { + if (len === max) { break; } + len = lens[lens_index + work[sym]]; + } + + /* create new sub-table if needed */ + if (len > root && (huff & mask) !== low) { + /* if first time, transition to sub-tables */ + if (drop === 0) { + drop = root; + } + + /* increment past last table */ + next += min; /* here min is 1 << curr */ + + /* determine length of next table */ + curr = len - drop; + left = 1 << curr; + while (curr + drop < max) { + left -= count[curr + drop]; + if (left <= 0) { break; } + curr++; + left <<= 1; + } + + /* check for enough space */ + used += 1 << curr; + if ((type === LENS && used > ENOUGH_LENS) || + (type === DISTS && used > ENOUGH_DISTS)) { + return 1; + } + + /* point entry in root table to sub-table */ + low = huff & mask; + /*table.op[low] = curr; + table.bits[low] = root; + table.val[low] = next - opts.table_index;*/ + table[low] = (root << 24) | (curr << 16) | (next - table_index) |0; + } + } + + /* fill in remaining table entry if code is incomplete (guaranteed to have + at most one remaining entry, since if the code is incomplete, the + maximum code length that was allowed to get this far is one bit) */ + if (huff !== 0) { + //table.op[next + huff] = 64; /* invalid code marker */ + //table.bits[next + huff] = len - drop; + //table.val[next + huff] = 0; + table[next + huff] = ((len - drop) << 24) | (64 << 16) |0; + } + + /* set return parameters */ + //opts.table_index += used; + opts.bits = root; + return 0; +}; + + +/***/ }), + +/***/ 1890: +/***/ ((module) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +module.exports = { + 2: 'need dictionary', /* Z_NEED_DICT 2 */ + 1: 'stream end', /* Z_STREAM_END 1 */ + 0: '', /* Z_OK 0 */ + '-1': 'file error', /* Z_ERRNO (-1) */ + '-2': 'stream error', /* Z_STREAM_ERROR (-2) */ + '-3': 'data error', /* Z_DATA_ERROR (-3) */ + '-4': 'insufficient memory', /* Z_MEM_ERROR (-4) */ + '-5': 'buffer error', /* Z_BUF_ERROR (-5) */ + '-6': 'incompatible version' /* Z_VERSION_ERROR (-6) */ +}; + + +/***/ }), + +/***/ 78754: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +/* eslint-disable space-unary-ops */ + +var utils = __nccwpck_require__(5483); + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + + +//var Z_FILTERED = 1; +//var Z_HUFFMAN_ONLY = 2; +//var Z_RLE = 3; +var Z_FIXED = 4; +//var Z_DEFAULT_STRATEGY = 0; + +/* Possible values of the data_type field (though see inflate()) */ +var Z_BINARY = 0; +var Z_TEXT = 1; +//var Z_ASCII = 1; // = Z_TEXT +var Z_UNKNOWN = 2; + +/*============================================================================*/ + + +function zero(buf) { var len = buf.length; while (--len >= 0) { buf[len] = 0; } } + +// From zutil.h + +var STORED_BLOCK = 0; +var STATIC_TREES = 1; +var DYN_TREES = 2; +/* The three kinds of block type */ + +var MIN_MATCH = 3; +var MAX_MATCH = 258; +/* The minimum and maximum match lengths */ + +// From deflate.h +/* =========================================================================== + * Internal compression state. + */ + +var LENGTH_CODES = 29; +/* number of length codes, not counting the special END_BLOCK code */ + +var LITERALS = 256; +/* number of literal bytes 0..255 */ + +var L_CODES = LITERALS + 1 + LENGTH_CODES; +/* number of Literal or Length codes, including the END_BLOCK code */ + +var D_CODES = 30; +/* number of distance codes */ + +var BL_CODES = 19; +/* number of codes used to transfer the bit lengths */ + +var HEAP_SIZE = 2 * L_CODES + 1; +/* maximum heap size */ + +var MAX_BITS = 15; +/* All codes must not exceed MAX_BITS bits */ + +var Buf_size = 16; +/* size of bit buffer in bi_buf */ + + +/* =========================================================================== + * Constants + */ + +var MAX_BL_BITS = 7; +/* Bit length codes must not exceed MAX_BL_BITS bits */ + +var END_BLOCK = 256; +/* end of block literal code */ + +var REP_3_6 = 16; +/* repeat previous bit length 3-6 times (2 bits of repeat count) */ + +var REPZ_3_10 = 17; +/* repeat a zero length 3-10 times (3 bits of repeat count) */ + +var REPZ_11_138 = 18; +/* repeat a zero length 11-138 times (7 bits of repeat count) */ + +/* eslint-disable comma-spacing,array-bracket-spacing */ +var extra_lbits = /* extra bits for each length code */ + [0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0]; + +var extra_dbits = /* extra bits for each distance code */ + [0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13]; + +var extra_blbits = /* extra bits for each bit length code */ + [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7]; + +var bl_order = + [16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15]; +/* eslint-enable comma-spacing,array-bracket-spacing */ + +/* The lengths of the bit length codes are sent in order of decreasing + * probability, to avoid transmitting the lengths for unused bit length codes. + */ + +/* =========================================================================== + * Local data. These are initialized only once. + */ + +// We pre-fill arrays with 0 to avoid uninitialized gaps + +var DIST_CODE_LEN = 512; /* see definition of array dist_code below */ + +// !!!! Use flat array instead of structure, Freq = i*2, Len = i*2+1 +var static_ltree = new Array((L_CODES + 2) * 2); +zero(static_ltree); +/* The static literal tree. Since the bit lengths are imposed, there is no + * need for the L_CODES extra codes used during heap construction. However + * The codes 286 and 287 are needed to build a canonical tree (see _tr_init + * below). + */ + +var static_dtree = new Array(D_CODES * 2); +zero(static_dtree); +/* The static distance tree. (Actually a trivial tree since all codes use + * 5 bits.) + */ + +var _dist_code = new Array(DIST_CODE_LEN); +zero(_dist_code); +/* Distance codes. The first 256 values correspond to the distances + * 3 .. 258, the last 256 values correspond to the top 8 bits of + * the 15 bit distances. + */ + +var _length_code = new Array(MAX_MATCH - MIN_MATCH + 1); +zero(_length_code); +/* length code for each normalized match length (0 == MIN_MATCH) */ + +var base_length = new Array(LENGTH_CODES); +zero(base_length); +/* First normalized length for each code (0 = MIN_MATCH) */ + +var base_dist = new Array(D_CODES); +zero(base_dist); +/* First normalized distance for each code (0 = distance of 1) */ + + +function StaticTreeDesc(static_tree, extra_bits, extra_base, elems, max_length) { + + this.static_tree = static_tree; /* static tree or NULL */ + this.extra_bits = extra_bits; /* extra bits for each code or NULL */ + this.extra_base = extra_base; /* base index for extra_bits */ + this.elems = elems; /* max number of elements in the tree */ + this.max_length = max_length; /* max bit length for the codes */ + + // show if `static_tree` has data or dummy - needed for monomorphic objects + this.has_stree = static_tree && static_tree.length; +} + + +var static_l_desc; +var static_d_desc; +var static_bl_desc; + + +function TreeDesc(dyn_tree, stat_desc) { + this.dyn_tree = dyn_tree; /* the dynamic tree */ + this.max_code = 0; /* largest code with non zero frequency */ + this.stat_desc = stat_desc; /* the corresponding static tree */ +} + + + +function d_code(dist) { + return dist < 256 ? _dist_code[dist] : _dist_code[256 + (dist >>> 7)]; +} + + +/* =========================================================================== + * Output a short LSB first on the stream. + * IN assertion: there is enough room in pendingBuf. + */ +function put_short(s, w) { +// put_byte(s, (uch)((w) & 0xff)); +// put_byte(s, (uch)((ush)(w) >> 8)); + s.pending_buf[s.pending++] = (w) & 0xff; + s.pending_buf[s.pending++] = (w >>> 8) & 0xff; +} + + +/* =========================================================================== + * Send a value on a given number of bits. + * IN assertion: length <= 16 and value fits in length bits. + */ +function send_bits(s, value, length) { + if (s.bi_valid > (Buf_size - length)) { + s.bi_buf |= (value << s.bi_valid) & 0xffff; + put_short(s, s.bi_buf); + s.bi_buf = value >> (Buf_size - s.bi_valid); + s.bi_valid += length - Buf_size; + } else { + s.bi_buf |= (value << s.bi_valid) & 0xffff; + s.bi_valid += length; + } +} + + +function send_code(s, c, tree) { + send_bits(s, tree[c * 2]/*.Code*/, tree[c * 2 + 1]/*.Len*/); +} + + +/* =========================================================================== + * Reverse the first len bits of a code, using straightforward code (a faster + * method would use a table) + * IN assertion: 1 <= len <= 15 + */ +function bi_reverse(code, len) { + var res = 0; + do { + res |= code & 1; + code >>>= 1; + res <<= 1; + } while (--len > 0); + return res >>> 1; +} + + +/* =========================================================================== + * Flush the bit buffer, keeping at most 7 bits in it. + */ +function bi_flush(s) { + if (s.bi_valid === 16) { + put_short(s, s.bi_buf); + s.bi_buf = 0; + s.bi_valid = 0; + + } else if (s.bi_valid >= 8) { + s.pending_buf[s.pending++] = s.bi_buf & 0xff; + s.bi_buf >>= 8; + s.bi_valid -= 8; + } +} + + +/* =========================================================================== + * Compute the optimal bit lengths for a tree and update the total bit length + * for the current block. + * IN assertion: the fields freq and dad are set, heap[heap_max] and + * above are the tree nodes sorted by increasing frequency. + * OUT assertions: the field len is set to the optimal bit length, the + * array bl_count contains the frequencies for each bit length. + * The length opt_len is updated; static_len is also updated if stree is + * not null. + */ +function gen_bitlen(s, desc) +// deflate_state *s; +// tree_desc *desc; /* the tree descriptor */ +{ + var tree = desc.dyn_tree; + var max_code = desc.max_code; + var stree = desc.stat_desc.static_tree; + var has_stree = desc.stat_desc.has_stree; + var extra = desc.stat_desc.extra_bits; + var base = desc.stat_desc.extra_base; + var max_length = desc.stat_desc.max_length; + var h; /* heap index */ + var n, m; /* iterate over the tree elements */ + var bits; /* bit length */ + var xbits; /* extra bits */ + var f; /* frequency */ + var overflow = 0; /* number of elements with bit length too large */ + + for (bits = 0; bits <= MAX_BITS; bits++) { + s.bl_count[bits] = 0; + } + + /* In a first pass, compute the optimal bit lengths (which may + * overflow in the case of the bit length tree). + */ + tree[s.heap[s.heap_max] * 2 + 1]/*.Len*/ = 0; /* root of the heap */ + + for (h = s.heap_max + 1; h < HEAP_SIZE; h++) { + n = s.heap[h]; + bits = tree[tree[n * 2 + 1]/*.Dad*/ * 2 + 1]/*.Len*/ + 1; + if (bits > max_length) { + bits = max_length; + overflow++; + } + tree[n * 2 + 1]/*.Len*/ = bits; + /* We overwrite tree[n].Dad which is no longer needed */ + + if (n > max_code) { continue; } /* not a leaf node */ + + s.bl_count[bits]++; + xbits = 0; + if (n >= base) { + xbits = extra[n - base]; + } + f = tree[n * 2]/*.Freq*/; + s.opt_len += f * (bits + xbits); + if (has_stree) { + s.static_len += f * (stree[n * 2 + 1]/*.Len*/ + xbits); + } + } + if (overflow === 0) { return; } + + // Trace((stderr,"\nbit length overflow\n")); + /* This happens for example on obj2 and pic of the Calgary corpus */ + + /* Find the first bit length which could increase: */ + do { + bits = max_length - 1; + while (s.bl_count[bits] === 0) { bits--; } + s.bl_count[bits]--; /* move one leaf down the tree */ + s.bl_count[bits + 1] += 2; /* move one overflow item as its brother */ + s.bl_count[max_length]--; + /* The brother of the overflow item also moves one step up, + * but this does not affect bl_count[max_length] + */ + overflow -= 2; + } while (overflow > 0); + + /* Now recompute all bit lengths, scanning in increasing frequency. + * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all + * lengths instead of fixing only the wrong ones. This idea is taken + * from 'ar' written by Haruhiko Okumura.) + */ + for (bits = max_length; bits !== 0; bits--) { + n = s.bl_count[bits]; + while (n !== 0) { + m = s.heap[--h]; + if (m > max_code) { continue; } + if (tree[m * 2 + 1]/*.Len*/ !== bits) { + // Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits)); + s.opt_len += (bits - tree[m * 2 + 1]/*.Len*/) * tree[m * 2]/*.Freq*/; + tree[m * 2 + 1]/*.Len*/ = bits; + } + n--; + } + } +} + + +/* =========================================================================== + * Generate the codes for a given tree and bit counts (which need not be + * optimal). + * IN assertion: the array bl_count contains the bit length statistics for + * the given tree and the field len is set for all tree elements. + * OUT assertion: the field code is set for all tree elements of non + * zero code length. + */ +function gen_codes(tree, max_code, bl_count) +// ct_data *tree; /* the tree to decorate */ +// int max_code; /* largest code with non zero frequency */ +// ushf *bl_count; /* number of codes at each bit length */ +{ + var next_code = new Array(MAX_BITS + 1); /* next code value for each bit length */ + var code = 0; /* running code value */ + var bits; /* bit index */ + var n; /* code index */ + + /* The distribution counts are first used to generate the code values + * without bit reversal. + */ + for (bits = 1; bits <= MAX_BITS; bits++) { + next_code[bits] = code = (code + bl_count[bits - 1]) << 1; + } + /* Check that the bit counts in bl_count are consistent. The last code + * must be all ones. + */ + //Assert (code + bl_count[MAX_BITS]-1 == (1< length code (0..28) */ + length = 0; + for (code = 0; code < LENGTH_CODES - 1; code++) { + base_length[code] = length; + for (n = 0; n < (1 << extra_lbits[code]); n++) { + _length_code[length++] = code; + } + } + //Assert (length == 256, "tr_static_init: length != 256"); + /* Note that the length 255 (match length 258) can be represented + * in two different ways: code 284 + 5 bits or code 285, so we + * overwrite length_code[255] to use the best encoding: + */ + _length_code[length - 1] = code; + + /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ + dist = 0; + for (code = 0; code < 16; code++) { + base_dist[code] = dist; + for (n = 0; n < (1 << extra_dbits[code]); n++) { + _dist_code[dist++] = code; + } + } + //Assert (dist == 256, "tr_static_init: dist != 256"); + dist >>= 7; /* from now on, all distances are divided by 128 */ + for (; code < D_CODES; code++) { + base_dist[code] = dist << 7; + for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) { + _dist_code[256 + dist++] = code; + } + } + //Assert (dist == 256, "tr_static_init: 256+dist != 512"); + + /* Construct the codes of the static literal tree */ + for (bits = 0; bits <= MAX_BITS; bits++) { + bl_count[bits] = 0; + } + + n = 0; + while (n <= 143) { + static_ltree[n * 2 + 1]/*.Len*/ = 8; + n++; + bl_count[8]++; + } + while (n <= 255) { + static_ltree[n * 2 + 1]/*.Len*/ = 9; + n++; + bl_count[9]++; + } + while (n <= 279) { + static_ltree[n * 2 + 1]/*.Len*/ = 7; + n++; + bl_count[7]++; + } + while (n <= 287) { + static_ltree[n * 2 + 1]/*.Len*/ = 8; + n++; + bl_count[8]++; + } + /* Codes 286 and 287 do not exist, but we must include them in the + * tree construction to get a canonical Huffman tree (longest code + * all ones) + */ + gen_codes(static_ltree, L_CODES + 1, bl_count); + + /* The static distance tree is trivial: */ + for (n = 0; n < D_CODES; n++) { + static_dtree[n * 2 + 1]/*.Len*/ = 5; + static_dtree[n * 2]/*.Code*/ = bi_reverse(n, 5); + } + + // Now data ready and we can init static trees + static_l_desc = new StaticTreeDesc(static_ltree, extra_lbits, LITERALS + 1, L_CODES, MAX_BITS); + static_d_desc = new StaticTreeDesc(static_dtree, extra_dbits, 0, D_CODES, MAX_BITS); + static_bl_desc = new StaticTreeDesc(new Array(0), extra_blbits, 0, BL_CODES, MAX_BL_BITS); + + //static_init_done = true; +} + + +/* =========================================================================== + * Initialize a new block. + */ +function init_block(s) { + var n; /* iterates over tree elements */ + + /* Initialize the trees. */ + for (n = 0; n < L_CODES; n++) { s.dyn_ltree[n * 2]/*.Freq*/ = 0; } + for (n = 0; n < D_CODES; n++) { s.dyn_dtree[n * 2]/*.Freq*/ = 0; } + for (n = 0; n < BL_CODES; n++) { s.bl_tree[n * 2]/*.Freq*/ = 0; } + + s.dyn_ltree[END_BLOCK * 2]/*.Freq*/ = 1; + s.opt_len = s.static_len = 0; + s.last_lit = s.matches = 0; +} + + +/* =========================================================================== + * Flush the bit buffer and align the output on a byte boundary + */ +function bi_windup(s) +{ + if (s.bi_valid > 8) { + put_short(s, s.bi_buf); + } else if (s.bi_valid > 0) { + //put_byte(s, (Byte)s->bi_buf); + s.pending_buf[s.pending++] = s.bi_buf; + } + s.bi_buf = 0; + s.bi_valid = 0; +} + +/* =========================================================================== + * Copy a stored block, storing first the length and its + * one's complement if requested. + */ +function copy_block(s, buf, len, header) +//DeflateState *s; +//charf *buf; /* the input data */ +//unsigned len; /* its length */ +//int header; /* true if block header must be written */ +{ + bi_windup(s); /* align on byte boundary */ + + if (header) { + put_short(s, len); + put_short(s, ~len); + } +// while (len--) { +// put_byte(s, *buf++); +// } + utils.arraySet(s.pending_buf, s.window, buf, len, s.pending); + s.pending += len; +} + +/* =========================================================================== + * Compares to subtrees, using the tree depth as tie breaker when + * the subtrees have equal frequency. This minimizes the worst case length. + */ +function smaller(tree, n, m, depth) { + var _n2 = n * 2; + var _m2 = m * 2; + return (tree[_n2]/*.Freq*/ < tree[_m2]/*.Freq*/ || + (tree[_n2]/*.Freq*/ === tree[_m2]/*.Freq*/ && depth[n] <= depth[m])); +} + +/* =========================================================================== + * Restore the heap property by moving down the tree starting at node k, + * exchanging a node with the smallest of its two sons if necessary, stopping + * when the heap property is re-established (each father smaller than its + * two sons). + */ +function pqdownheap(s, tree, k) +// deflate_state *s; +// ct_data *tree; /* the tree to restore */ +// int k; /* node to move down */ +{ + var v = s.heap[k]; + var j = k << 1; /* left son of k */ + while (j <= s.heap_len) { + /* Set j to the smallest of the two sons: */ + if (j < s.heap_len && + smaller(tree, s.heap[j + 1], s.heap[j], s.depth)) { + j++; + } + /* Exit if v is smaller than both sons */ + if (smaller(tree, v, s.heap[j], s.depth)) { break; } + + /* Exchange v with the smallest son */ + s.heap[k] = s.heap[j]; + k = j; + + /* And continue down the tree, setting j to the left son of k */ + j <<= 1; + } + s.heap[k] = v; +} + + +// inlined manually +// var SMALLEST = 1; + +/* =========================================================================== + * Send the block data compressed using the given Huffman trees + */ +function compress_block(s, ltree, dtree) +// deflate_state *s; +// const ct_data *ltree; /* literal tree */ +// const ct_data *dtree; /* distance tree */ +{ + var dist; /* distance of matched string */ + var lc; /* match length or unmatched char (if dist == 0) */ + var lx = 0; /* running index in l_buf */ + var code; /* the code to send */ + var extra; /* number of extra bits to send */ + + if (s.last_lit !== 0) { + do { + dist = (s.pending_buf[s.d_buf + lx * 2] << 8) | (s.pending_buf[s.d_buf + lx * 2 + 1]); + lc = s.pending_buf[s.l_buf + lx]; + lx++; + + if (dist === 0) { + send_code(s, lc, ltree); /* send a literal byte */ + //Tracecv(isgraph(lc), (stderr," '%c' ", lc)); + } else { + /* Here, lc is the match length - MIN_MATCH */ + code = _length_code[lc]; + send_code(s, code + LITERALS + 1, ltree); /* send the length code */ + extra = extra_lbits[code]; + if (extra !== 0) { + lc -= base_length[code]; + send_bits(s, lc, extra); /* send the extra length bits */ + } + dist--; /* dist is now the match distance - 1 */ + code = d_code(dist); + //Assert (code < D_CODES, "bad d_code"); + + send_code(s, code, dtree); /* send the distance code */ + extra = extra_dbits[code]; + if (extra !== 0) { + dist -= base_dist[code]; + send_bits(s, dist, extra); /* send the extra distance bits */ + } + } /* literal or match pair ? */ + + /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */ + //Assert((uInt)(s->pending) < s->lit_bufsize + 2*lx, + // "pendingBuf overflow"); + + } while (lx < s.last_lit); + } + + send_code(s, END_BLOCK, ltree); +} + + +/* =========================================================================== + * Construct one Huffman tree and assigns the code bit strings and lengths. + * Update the total bit length for the current block. + * IN assertion: the field freq is set for all tree elements. + * OUT assertions: the fields len and code are set to the optimal bit length + * and corresponding code. The length opt_len is updated; static_len is + * also updated if stree is not null. The field max_code is set. + */ +function build_tree(s, desc) +// deflate_state *s; +// tree_desc *desc; /* the tree descriptor */ +{ + var tree = desc.dyn_tree; + var stree = desc.stat_desc.static_tree; + var has_stree = desc.stat_desc.has_stree; + var elems = desc.stat_desc.elems; + var n, m; /* iterate over heap elements */ + var max_code = -1; /* largest code with non zero frequency */ + var node; /* new node being created */ + + /* Construct the initial heap, with least frequent element in + * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. + * heap[0] is not used. + */ + s.heap_len = 0; + s.heap_max = HEAP_SIZE; + + for (n = 0; n < elems; n++) { + if (tree[n * 2]/*.Freq*/ !== 0) { + s.heap[++s.heap_len] = max_code = n; + s.depth[n] = 0; + + } else { + tree[n * 2 + 1]/*.Len*/ = 0; + } + } + + /* The pkzip format requires that at least one distance code exists, + * and that at least one bit should be sent even if there is only one + * possible code. So to avoid special checks later on we force at least + * two codes of non zero frequency. + */ + while (s.heap_len < 2) { + node = s.heap[++s.heap_len] = (max_code < 2 ? ++max_code : 0); + tree[node * 2]/*.Freq*/ = 1; + s.depth[node] = 0; + s.opt_len--; + + if (has_stree) { + s.static_len -= stree[node * 2 + 1]/*.Len*/; + } + /* node is 0 or 1 so it does not have extra bits */ + } + desc.max_code = max_code; + + /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, + * establish sub-heaps of increasing lengths: + */ + for (n = (s.heap_len >> 1/*int /2*/); n >= 1; n--) { pqdownheap(s, tree, n); } + + /* Construct the Huffman tree by repeatedly combining the least two + * frequent nodes. + */ + node = elems; /* next internal node of the tree */ + do { + //pqremove(s, tree, n); /* n = node of least frequency */ + /*** pqremove ***/ + n = s.heap[1/*SMALLEST*/]; + s.heap[1/*SMALLEST*/] = s.heap[s.heap_len--]; + pqdownheap(s, tree, 1/*SMALLEST*/); + /***/ + + m = s.heap[1/*SMALLEST*/]; /* m = node of next least frequency */ + + s.heap[--s.heap_max] = n; /* keep the nodes sorted by frequency */ + s.heap[--s.heap_max] = m; + + /* Create a new node father of n and m */ + tree[node * 2]/*.Freq*/ = tree[n * 2]/*.Freq*/ + tree[m * 2]/*.Freq*/; + s.depth[node] = (s.depth[n] >= s.depth[m] ? s.depth[n] : s.depth[m]) + 1; + tree[n * 2 + 1]/*.Dad*/ = tree[m * 2 + 1]/*.Dad*/ = node; + + /* and insert the new node in the heap */ + s.heap[1/*SMALLEST*/] = node++; + pqdownheap(s, tree, 1/*SMALLEST*/); + + } while (s.heap_len >= 2); + + s.heap[--s.heap_max] = s.heap[1/*SMALLEST*/]; + + /* At this point, the fields freq and dad are set. We can now + * generate the bit lengths. + */ + gen_bitlen(s, desc); + + /* The field len is now set, we can generate the bit codes */ + gen_codes(tree, max_code, s.bl_count); +} + + +/* =========================================================================== + * Scan a literal or distance tree to determine the frequencies of the codes + * in the bit length tree. + */ +function scan_tree(s, tree, max_code) +// deflate_state *s; +// ct_data *tree; /* the tree to be scanned */ +// int max_code; /* and its largest code of non zero frequency */ +{ + var n; /* iterates over all tree elements */ + var prevlen = -1; /* last emitted length */ + var curlen; /* length of current code */ + + var nextlen = tree[0 * 2 + 1]/*.Len*/; /* length of next code */ + + var count = 0; /* repeat count of the current code */ + var max_count = 7; /* max repeat count */ + var min_count = 4; /* min repeat count */ + + if (nextlen === 0) { + max_count = 138; + min_count = 3; + } + tree[(max_code + 1) * 2 + 1]/*.Len*/ = 0xffff; /* guard */ + + for (n = 0; n <= max_code; n++) { + curlen = nextlen; + nextlen = tree[(n + 1) * 2 + 1]/*.Len*/; + + if (++count < max_count && curlen === nextlen) { + continue; + + } else if (count < min_count) { + s.bl_tree[curlen * 2]/*.Freq*/ += count; + + } else if (curlen !== 0) { + + if (curlen !== prevlen) { s.bl_tree[curlen * 2]/*.Freq*/++; } + s.bl_tree[REP_3_6 * 2]/*.Freq*/++; + + } else if (count <= 10) { + s.bl_tree[REPZ_3_10 * 2]/*.Freq*/++; + + } else { + s.bl_tree[REPZ_11_138 * 2]/*.Freq*/++; + } + + count = 0; + prevlen = curlen; + + if (nextlen === 0) { + max_count = 138; + min_count = 3; + + } else if (curlen === nextlen) { + max_count = 6; + min_count = 3; + + } else { + max_count = 7; + min_count = 4; + } + } +} + + +/* =========================================================================== + * Send a literal or distance tree in compressed form, using the codes in + * bl_tree. + */ +function send_tree(s, tree, max_code) +// deflate_state *s; +// ct_data *tree; /* the tree to be scanned */ +// int max_code; /* and its largest code of non zero frequency */ +{ + var n; /* iterates over all tree elements */ + var prevlen = -1; /* last emitted length */ + var curlen; /* length of current code */ + + var nextlen = tree[0 * 2 + 1]/*.Len*/; /* length of next code */ + + var count = 0; /* repeat count of the current code */ + var max_count = 7; /* max repeat count */ + var min_count = 4; /* min repeat count */ + + /* tree[max_code+1].Len = -1; */ /* guard already set */ + if (nextlen === 0) { + max_count = 138; + min_count = 3; + } + + for (n = 0; n <= max_code; n++) { + curlen = nextlen; + nextlen = tree[(n + 1) * 2 + 1]/*.Len*/; + + if (++count < max_count && curlen === nextlen) { + continue; + + } else if (count < min_count) { + do { send_code(s, curlen, s.bl_tree); } while (--count !== 0); + + } else if (curlen !== 0) { + if (curlen !== prevlen) { + send_code(s, curlen, s.bl_tree); + count--; + } + //Assert(count >= 3 && count <= 6, " 3_6?"); + send_code(s, REP_3_6, s.bl_tree); + send_bits(s, count - 3, 2); + + } else if (count <= 10) { + send_code(s, REPZ_3_10, s.bl_tree); + send_bits(s, count - 3, 3); + + } else { + send_code(s, REPZ_11_138, s.bl_tree); + send_bits(s, count - 11, 7); + } + + count = 0; + prevlen = curlen; + if (nextlen === 0) { + max_count = 138; + min_count = 3; + + } else if (curlen === nextlen) { + max_count = 6; + min_count = 3; + + } else { + max_count = 7; + min_count = 4; + } + } +} + + +/* =========================================================================== + * Construct the Huffman tree for the bit lengths and return the index in + * bl_order of the last bit length code to send. + */ +function build_bl_tree(s) { + var max_blindex; /* index of last bit length code of non zero freq */ + + /* Determine the bit length frequencies for literal and distance trees */ + scan_tree(s, s.dyn_ltree, s.l_desc.max_code); + scan_tree(s, s.dyn_dtree, s.d_desc.max_code); + + /* Build the bit length tree: */ + build_tree(s, s.bl_desc); + /* opt_len now includes the length of the tree representations, except + * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. + */ + + /* Determine the number of bit length codes to send. The pkzip format + * requires that at least 4 bit length codes be sent. (appnote.txt says + * 3 but the actual value used is 4.) + */ + for (max_blindex = BL_CODES - 1; max_blindex >= 3; max_blindex--) { + if (s.bl_tree[bl_order[max_blindex] * 2 + 1]/*.Len*/ !== 0) { + break; + } + } + /* Update opt_len to include the bit length tree and counts */ + s.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4; + //Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", + // s->opt_len, s->static_len)); + + return max_blindex; +} + + +/* =========================================================================== + * Send the header for a block using dynamic Huffman trees: the counts, the + * lengths of the bit length codes, the literal tree and the distance tree. + * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. + */ +function send_all_trees(s, lcodes, dcodes, blcodes) +// deflate_state *s; +// int lcodes, dcodes, blcodes; /* number of codes for each tree */ +{ + var rank; /* index in bl_order */ + + //Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); + //Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, + // "too many codes"); + //Tracev((stderr, "\nbl counts: ")); + send_bits(s, lcodes - 257, 5); /* not +255 as stated in appnote.txt */ + send_bits(s, dcodes - 1, 5); + send_bits(s, blcodes - 4, 4); /* not -3 as stated in appnote.txt */ + for (rank = 0; rank < blcodes; rank++) { + //Tracev((stderr, "\nbl code %2d ", bl_order[rank])); + send_bits(s, s.bl_tree[bl_order[rank] * 2 + 1]/*.Len*/, 3); + } + //Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); + + send_tree(s, s.dyn_ltree, lcodes - 1); /* literal tree */ + //Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); + + send_tree(s, s.dyn_dtree, dcodes - 1); /* distance tree */ + //Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); +} + + +/* =========================================================================== + * Check if the data type is TEXT or BINARY, using the following algorithm: + * - TEXT if the two conditions below are satisfied: + * a) There are no non-portable control characters belonging to the + * "black list" (0..6, 14..25, 28..31). + * b) There is at least one printable character belonging to the + * "white list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255). + * - BINARY otherwise. + * - The following partially-portable control characters form a + * "gray list" that is ignored in this detection algorithm: + * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}). + * IN assertion: the fields Freq of dyn_ltree are set. + */ +function detect_data_type(s) { + /* black_mask is the bit mask of black-listed bytes + * set bits 0..6, 14..25, and 28..31 + * 0xf3ffc07f = binary 11110011111111111100000001111111 + */ + var black_mask = 0xf3ffc07f; + var n; + + /* Check for non-textual ("black-listed") bytes. */ + for (n = 0; n <= 31; n++, black_mask >>>= 1) { + if ((black_mask & 1) && (s.dyn_ltree[n * 2]/*.Freq*/ !== 0)) { + return Z_BINARY; + } + } + + /* Check for textual ("white-listed") bytes. */ + if (s.dyn_ltree[9 * 2]/*.Freq*/ !== 0 || s.dyn_ltree[10 * 2]/*.Freq*/ !== 0 || + s.dyn_ltree[13 * 2]/*.Freq*/ !== 0) { + return Z_TEXT; + } + for (n = 32; n < LITERALS; n++) { + if (s.dyn_ltree[n * 2]/*.Freq*/ !== 0) { + return Z_TEXT; + } + } + + /* There are no "black-listed" or "white-listed" bytes: + * this stream either is empty or has tolerated ("gray-listed") bytes only. + */ + return Z_BINARY; +} + + +var static_init_done = false; + +/* =========================================================================== + * Initialize the tree data structures for a new zlib stream. + */ +function _tr_init(s) +{ + + if (!static_init_done) { + tr_static_init(); + static_init_done = true; + } + + s.l_desc = new TreeDesc(s.dyn_ltree, static_l_desc); + s.d_desc = new TreeDesc(s.dyn_dtree, static_d_desc); + s.bl_desc = new TreeDesc(s.bl_tree, static_bl_desc); + + s.bi_buf = 0; + s.bi_valid = 0; + + /* Initialize the first block of the first file: */ + init_block(s); +} + + +/* =========================================================================== + * Send a stored block + */ +function _tr_stored_block(s, buf, stored_len, last) +//DeflateState *s; +//charf *buf; /* input block */ +//ulg stored_len; /* length of input block */ +//int last; /* one if this is the last block for a file */ +{ + send_bits(s, (STORED_BLOCK << 1) + (last ? 1 : 0), 3); /* send block type */ + copy_block(s, buf, stored_len, true); /* with header */ +} + + +/* =========================================================================== + * Send one empty static block to give enough lookahead for inflate. + * This takes 10 bits, of which 7 may remain in the bit buffer. + */ +function _tr_align(s) { + send_bits(s, STATIC_TREES << 1, 3); + send_code(s, END_BLOCK, static_ltree); + bi_flush(s); +} + + +/* =========================================================================== + * Determine the best encoding for the current block: dynamic trees, static + * trees or store, and output the encoded block to the zip file. + */ +function _tr_flush_block(s, buf, stored_len, last) +//DeflateState *s; +//charf *buf; /* input block, or NULL if too old */ +//ulg stored_len; /* length of input block */ +//int last; /* one if this is the last block for a file */ +{ + var opt_lenb, static_lenb; /* opt_len and static_len in bytes */ + var max_blindex = 0; /* index of last bit length code of non zero freq */ + + /* Build the Huffman trees unless a stored block is forced */ + if (s.level > 0) { + + /* Check if the file is binary or text */ + if (s.strm.data_type === Z_UNKNOWN) { + s.strm.data_type = detect_data_type(s); + } + + /* Construct the literal and distance trees */ + build_tree(s, s.l_desc); + // Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len, + // s->static_len)); + + build_tree(s, s.d_desc); + // Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len, + // s->static_len)); + /* At this point, opt_len and static_len are the total bit lengths of + * the compressed block data, excluding the tree representations. + */ + + /* Build the bit length tree for the above two trees, and get the index + * in bl_order of the last bit length code to send. + */ + max_blindex = build_bl_tree(s); + + /* Determine the best encoding. Compute the block lengths in bytes. */ + opt_lenb = (s.opt_len + 3 + 7) >>> 3; + static_lenb = (s.static_len + 3 + 7) >>> 3; + + // Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", + // opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, + // s->last_lit)); + + if (static_lenb <= opt_lenb) { opt_lenb = static_lenb; } + + } else { + // Assert(buf != (char*)0, "lost buf"); + opt_lenb = static_lenb = stored_len + 5; /* force a stored block */ + } + + if ((stored_len + 4 <= opt_lenb) && (buf !== -1)) { + /* 4: two words for the lengths */ + + /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. + * Otherwise we can't have processed more than WSIZE input bytes since + * the last block flush, because compression would have been + * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to + * transform a block into a stored block. + */ + _tr_stored_block(s, buf, stored_len, last); + + } else if (s.strategy === Z_FIXED || static_lenb === opt_lenb) { + + send_bits(s, (STATIC_TREES << 1) + (last ? 1 : 0), 3); + compress_block(s, static_ltree, static_dtree); + + } else { + send_bits(s, (DYN_TREES << 1) + (last ? 1 : 0), 3); + send_all_trees(s, s.l_desc.max_code + 1, s.d_desc.max_code + 1, max_blindex + 1); + compress_block(s, s.dyn_ltree, s.dyn_dtree); + } + // Assert (s->compressed_len == s->bits_sent, "bad compressed size"); + /* The above check is made mod 2^32, for files larger than 512 MB + * and uLong implemented on 32 bits. + */ + init_block(s); + + if (last) { + bi_windup(s); + } + // Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3, + // s->compressed_len-7*last)); +} + +/* =========================================================================== + * Save the match info and tally the frequency counts. Return true if + * the current block must be flushed. + */ +function _tr_tally(s, dist, lc) +// deflate_state *s; +// unsigned dist; /* distance of matched string */ +// unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */ +{ + //var out_length, in_length, dcode; + + s.pending_buf[s.d_buf + s.last_lit * 2] = (dist >>> 8) & 0xff; + s.pending_buf[s.d_buf + s.last_lit * 2 + 1] = dist & 0xff; + + s.pending_buf[s.l_buf + s.last_lit] = lc & 0xff; + s.last_lit++; + + if (dist === 0) { + /* lc is the unmatched char */ + s.dyn_ltree[lc * 2]/*.Freq*/++; + } else { + s.matches++; + /* Here, lc is the match length - MIN_MATCH */ + dist--; /* dist = match distance - 1 */ + //Assert((ush)dist < (ush)MAX_DIST(s) && + // (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && + // (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match"); + + s.dyn_ltree[(_length_code[lc] + LITERALS + 1) * 2]/*.Freq*/++; + s.dyn_dtree[d_code(dist) * 2]/*.Freq*/++; + } + +// (!) This block is disabled in zlib defaults, +// don't enable it for binary compatibility + +//#ifdef TRUNCATE_BLOCK +// /* Try to guess if it is profitable to stop the current block here */ +// if ((s.last_lit & 0x1fff) === 0 && s.level > 2) { +// /* Compute an upper bound for the compressed length */ +// out_length = s.last_lit*8; +// in_length = s.strstart - s.block_start; +// +// for (dcode = 0; dcode < D_CODES; dcode++) { +// out_length += s.dyn_dtree[dcode*2]/*.Freq*/ * (5 + extra_dbits[dcode]); +// } +// out_length >>>= 3; +// //Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ", +// // s->last_lit, in_length, out_length, +// // 100L - out_length*100L/in_length)); +// if (s.matches < (s.last_lit>>1)/*int /2*/ && out_length < (in_length>>1)/*int /2*/) { +// return true; +// } +// } +//#endif + + return (s.last_lit === s.lit_bufsize - 1); + /* We avoid equality with lit_bufsize because of wraparound at 64K + * on 16 bit machines and because stored blocks are restricted to + * 64K-1 bytes. + */ +} + +exports._tr_init = _tr_init; +exports._tr_stored_block = _tr_stored_block; +exports._tr_flush_block = _tr_flush_block; +exports._tr_tally = _tr_tally; +exports._tr_align = _tr_align; + + +/***/ }), + +/***/ 86442: +/***/ ((module) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +function ZStream() { + /* next input byte */ + this.input = null; // JS specific, because we have no pointers + this.next_in = 0; + /* number of bytes available at input */ + this.avail_in = 0; + /* total number of input bytes read so far */ + this.total_in = 0; + /* next output byte should be put there */ + this.output = null; // JS specific, because we have no pointers + this.next_out = 0; + /* remaining free space at output */ + this.avail_out = 0; + /* total number of bytes output so far */ + this.total_out = 0; + /* last error message, NULL if no error */ + this.msg = ''/*Z_NULL*/; + /* not visible by applications */ + this.state = null; + /* best guess about the data type: binary or text */ + this.data_type = 2/*Z_UNKNOWN*/; + /* adler32 value of the uncompressed data */ + this.adler = 0; +} + +module.exports = ZStream; + + /***/ }), /***/ 94833: @@ -50374,13472 +72526,79 @@ function _typeof(obj){"@babel/helpers - typeof";return _typeof="function"==typeo /***/ }), -/***/ 71480: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 64810: +/***/ ((module) => { "use strict"; -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DefaultChangelogNotes = void 0; -// eslint-disable-next-line @typescript-eslint/no-var-requires -const conventionalChangelogWriter = __nccwpck_require__(86207); -// eslint-disable-next-line @typescript-eslint/no-var-requires -const presetFactory = __nccwpck_require__(88761); -const DEFAULT_HOST = 'https://github.com'; -class DefaultChangelogNotes { - constructor(options = {}) { - this.commitPartial = options.commitPartial; - this.headerPartial = options.headerPartial; - this.mainTemplate = options.mainTemplate; - } - async buildNotes(commits, options) { - const context = { - host: options.host || DEFAULT_HOST, - owner: options.owner, - repository: options.repository, - version: options.version, - previousTag: options.previousTag, - currentTag: options.currentTag, - linkCompare: !!options.previousTag, - }; - const config = {}; - if (options.changelogSections) { - config.types = options.changelogSections; - } - const preset = await presetFactory(config); - preset.writerOpts.commitPartial = - this.commitPartial || preset.writerOpts.commitPartial; - preset.writerOpts.headerPartial = - this.headerPartial || preset.writerOpts.headerPartial; - preset.writerOpts.mainTemplate = - this.mainTemplate || preset.writerOpts.mainTemplate; - const changelogCommits = commits.map(commit => { - const notes = commit.notes - .filter(note => note.title === 'BREAKING CHANGE') - .map(note => replaceIssueLink(note, context.host, context.owner, context.repository)); - return { - body: '', - subject: htmlEscape(commit.bareMessage), - type: commit.type, - scope: commit.scope, - notes, - references: commit.references, - mentions: [], - merge: null, - revert: null, - header: commit.message, - footer: commit.notes - .filter(note => note.title === 'RELEASE AS') - .map(note => `Release-As: ${note.text}`) - .join('\n'), - hash: commit.sha, - }; - }); - return conventionalChangelogWriter - .parseArray(changelogCommits, context, preset.writerOpts) - .trim(); - } -} -exports.DefaultChangelogNotes = DefaultChangelogNotes; -function replaceIssueLink(note, host, owner, repo) { - note.text = note.text.replace(/\(#(\d+)\)/, `([#$1](${host}/${owner}/${repo}/issues/$1))`); - return note; -} -function htmlEscape(message) { - return message.replace(/``[^`].*[^`]``|`[^`]*`|<|>/g, match => match.length > 1 ? match : match === '<' ? '<' : '>'); -} -//# sourceMappingURL=default.js.map -/***/ }), +const processFn = (fn, options) => function (...args) { + const P = options.promiseModule; -/***/ 88433: -/***/ ((__unused_webpack_module, exports) => { + return new P((resolve, reject) => { + if (options.multiArgs) { + args.push((...result) => { + if (options.errorFirst) { + if (result[0]) { + reject(result); + } else { + result.shift(); + resolve(result); + } + } else { + resolve(result); + } + }); + } else if (options.errorFirst) { + args.push((error, result) => { + if (error) { + reject(error); + } else { + resolve(result); + } + }); + } else { + args.push(resolve); + } -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.GitHubChangelogNotes = void 0; -class GitHubChangelogNotes { - constructor(github) { - this.github = github; - } - async buildNotes(_commits, options) { - const body = await this.github.generateReleaseNotes(options.currentTag, options.targetBranch, options.previousTag); - const date = new Date().toLocaleDateString('en-CA'); - const header = `## ${options.version} (${date})`; - return `${header}\n\n${body}`; - } -} -exports.GitHubChangelogNotes = GitHubChangelogNotes; -//# sourceMappingURL=github.js.map - -/***/ }), - -/***/ 69158: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.parseConventionalCommits = void 0; -// eslint-disable-next-line @typescript-eslint/no-var-requires -const visit = __nccwpck_require__(80199); -// eslint-disable-next-line @typescript-eslint/no-var-requires -const visitWithAncestors = __nccwpck_require__(13246); -const NUMBER_REGEX = /^[0-9]+$/; -const logger_1 = __nccwpck_require__(68809); -const parser = __nccwpck_require__(74523); -// eslint-disable-next-line @typescript-eslint/no-var-requires -const conventionalCommitsFilter = __nccwpck_require__(55003); -function getBlankConventionalCommit() { - return { - body: '', - subject: '', - type: '', - scope: null, - notes: [], - references: [], - mentions: [], - merge: null, - revert: null, - header: '', - footer: null, - }; -} -// Converts conventional commit AST into conventional-changelog's -// output format, see: https://www.npmjs.com/package/conventional-commits-parser -function toConventionalChangelogFormat(ast) { - const commits = []; - const headerCommit = getBlankConventionalCommit(); - // Separate the body and summary nodes, this simplifies the subsequent - // tree walking logic: - let body; - let summary; - visit(ast, ['body', 'summary'], (node) => { - switch (node.type) { - case 'body': - body = node; - break; - case 'summary': - summary = node; - break; - } - }); - // , "(", , ")", ["!"], ":", *, - visit(summary, (node) => { - switch (node.type) { - case 'type': - headerCommit.type = node.value; - headerCommit.header += node.value; - break; - case 'scope': - headerCommit.scope = node.value; - headerCommit.header += `(${node.value})`; - break; - case 'breaking-change': - headerCommit.header += '!'; - break; - case 'text': - headerCommit.subject = node.value; - headerCommit.header += `: ${node.value}`; - break; - default: - break; - } - }); - // [] - if (body) { - visit(body, ['text', 'newline'], (node) => { - headerCommit.body += node.value; - }); - } - // Extract BREAKING CHANGE notes, regardless of whether they fall in - // summary, body, or footer: - const breaking = { - title: 'BREAKING CHANGE', - text: '', // "text" will be populated if a BREAKING CHANGE token is parsed. - }; - visitWithAncestors(ast, ['breaking-change'], (node, ancestors) => { - let hitBreakingMarker = false; - let parent = ancestors.pop(); - if (!parent) { - return; - } - switch (parent.type) { - case 'summary': - breaking.text = headerCommit.subject; - break; - case 'body': - breaking.text = ''; - // We treat text from the BREAKING CHANGE marker forward as - // the breaking change notes: - visit(parent, ['breaking-change', 'text', 'newline'], (node) => { - if (node.type === 'breaking-change') { - hitBreakingMarker = true; - return; - } - if (!hitBreakingMarker) - return; - breaking.text += node.value; - }); - break; - case 'token': - // If the '!' breaking change marker is used, the breaking change - // will be identified when the footer is parsed as a commit: - if (!node.value.includes('BREAKING')) - return; - parent = ancestors.pop(); - visit(parent, ['text', 'newline'], (node) => { - breaking.text = node.value; - }); - break; - } - }); - // Add additional breaking change detection from commit body - if (body) { - const bodyString = String(body); - const breakingChangeMatch = bodyString.match(/BREAKING-CHANGE:\s*(.*)/); - if (breakingChangeMatch && breakingChangeMatch[1]) { - if (breaking.text) { - breaking.text += '\n'; - } - breaking.text += breakingChangeMatch[1].trim(); - } - } - if (breaking.text !== '') - headerCommit.notes.push(breaking); - // Populates references array from footers: - // references: [{ - // action: 'Closes', - // owner: null, - // repository: null, - // issue: '1', raw: '#1', - // prefix: '#' - // }] - visit(ast, ['footer'], (node) => { - const reference = { - prefix: '#', - action: '', - issue: '', - }; - let hasRefSepartor = false; - visit(node, ['type', 'separator', 'text'], (node) => { - switch (node.type) { - case 'type': - // refs, closes, etc: - // TODO(@bcoe): conventional-changelog does not currently use - // "reference.action" in its templates: - reference.action = node.value; - break; - case 'separator': - // Footer of the form "Refs #99": - if (node.value.includes('#')) - hasRefSepartor = true; - break; - case 'text': - // Footer of the form "Refs: #99" - if (node.value.charAt(0) === '#') { - hasRefSepartor = true; - reference.issue = node.value.substring(1); - // TODO(@bcoe): what about references like "Refs: #99, #102"? - } - else { - reference.issue = node.value; - } - break; - } - }); - // TODO(@bcoe): how should references like "Refs: v8:8940" work. - if (hasRefSepartor && reference.issue.match(NUMBER_REGEX)) { - headerCommit.references.push(reference); - } - }); - /* - * Split footers that resemble commits into additional commits, e.g., - * chore: multiple commits - * chore(recaptchaenterprise): migrate recaptchaenterprise to the Java microgenerator - * Committer: @miraleung - * PiperOrigin-RevId: 345559154 - * ... - */ - visitWithAncestors(ast, ['type'], (node, ancestors) => { - let parent = ancestors.pop(); - if (!parent) { - return; - } - if (parent.type === 'token') { - parent = ancestors.pop(); - let footerText = ''; - const semanticFooter = node.value.toLowerCase() === 'release-as'; - visit(parent, ['type', 'scope', 'breaking-change', 'separator', 'text', 'newline'], (node) => { - switch (node.type) { - case 'scope': - footerText += `(${node.value})`; - break; - case 'separator': - // Footers of the form Fixes #99, should not be parsed. - if (node.value.includes('#')) - return; - footerText += `${node.value} `; - break; - default: - footerText += node.value; - break; - } - }); - // Any footers that carry semantic meaning, e.g., Release-As, should - // be added to the footer field, for the benefits of post-processing: - if (semanticFooter) { - let releaseAs = ''; - visit(parent, ['text'], (node) => { - releaseAs = node.value; - }); - // record Release-As footer as a note - headerCommit.notes.push({ - title: 'RELEASE AS', - text: releaseAs, - }); - if (!headerCommit.footer) - headerCommit.footer = ''; - headerCommit.footer += `\n${footerText.toLowerCase()}`.trimStart(); - } - try { - for (const commit of toConventionalChangelogFormat(parser.parser(footerText))) { - commits.push(commit); - } - } - catch (err) { - // Footer does not appear to be an additional commit. - } - } - }); - commits.push(headerCommit); - return commits; -} -// TODO(@bcoe): now that we walk the actual AST of conventional commits -// we should be able to move post processing into -// to-conventional-changelog.ts. -function postProcessCommits(commit) { - var _a; - commit.notes.forEach(note => { - let text = ''; - let i = 0; - let extendedContext = false; - for (const chunk of note.text.split(/\r?\n/)) { - if (i > 0 && hasExtendedContext(chunk) && !extendedContext) { - text = `${text.trim()}\n`; - extendedContext = true; - } - if (chunk === '') - break; - else if (extendedContext) { - text += ` ${chunk}\n`; - } - else { - text += `${chunk} `; - } - i++; - } - note.text = text.trim(); - }); - const breakingChangeMatch = (_a = commit.body) === null || _a === void 0 ? void 0 : _a.match(/BREAKING-CHANGE:\s*(.*)/); - if (breakingChangeMatch && breakingChangeMatch[1]) { - const existingNote = commit.notes.find(note => note.title === 'BREAKING CHANGE'); - if (existingNote) { - existingNote.text += `\n${breakingChangeMatch[1].trim()}`; - } - else { - commit.notes.push({ - title: 'BREAKING CHANGE', - text: breakingChangeMatch[1].trim(), - }); - } - } - return commit; -} -// If someone wishes to include additional contextual information for a -// BREAKING CHANGE using markdown, they can do so by starting the line after the initial -// breaking change description with either: -// -// 1. a fourth-level header. -// 2. a bulleted list (using either '*' or '-'). -// -// BREAKING CHANGE: there were breaking changes -// #### Deleted Endpoints -// - endpoint 1 -// - endpoint 2 -function hasExtendedContext(line) { - if (line.match(/^#### |^[*-] /)) - return true; - return false; -} -function parseCommits(message) { - return conventionalCommitsFilter(toConventionalChangelogFormat(parser.parser(message))).map(postProcessCommits); -} -/** - * Splits a commit message into multiple messages based on conventional commit format and nested commit blocks. - * This function is capable of: - * 1. Separating conventional commits (feat, fix, docs, etc.) within the main message. - * 2. Extracting nested commits enclosed in BEGIN_NESTED_COMMIT/END_NESTED_COMMIT blocks. - * 3. Preserving the original message structure outside of nested commit blocks. - * 4. Handling multiple nested commits and conventional commits in a single message. - * - * @param message The input commit message string - * @returns An array of individual commit messages - */ -function splitMessages(message) { - const parts = message.split('BEGIN_NESTED_COMMIT'); - const messages = [parts.shift()]; - for (const part of parts) { - const [newMessage, ...rest] = part.split('END_NESTED_COMMIT'); - messages.push(newMessage); - messages[0] = messages[0] + rest.join('END_NESTED_COMMIT'); - } - const conventionalCommits = messages[0] - .split(/\r?\n\r?\n(?=(?:feat|fix|docs|style|refactor|perf|test|build|ci|chore|revert)(?:\(.*?\))?: )/) - .filter(Boolean); - return [...conventionalCommits, ...messages.slice(1)]; -} -/** - * Given a list of raw commits, parse and expand into conventional commits. - * - * @param commits {Commit[]} The input commits - * - * @returns {ConventionalCommit[]} Parsed and expanded commits. There may be - * more commits returned as a single raw commit may contain multiple release - * messages. - */ -function parseConventionalCommits(commits, logger = logger_1.logger) { - const conventionalCommits = []; - for (const commit of commits) { - for (const commitMessage of splitMessages(preprocessCommitMessage(commit))) { - try { - for (const parsedCommit of parseCommits(commitMessage)) { - const breaking = parsedCommit.notes.filter(note => note.title === 'BREAKING CHANGE') - .length > 0; - conventionalCommits.push({ - sha: commit.sha, - message: parsedCommit.header, - files: commit.files, - pullRequest: commit.pullRequest, - type: parsedCommit.type, - scope: parsedCommit.scope, - bareMessage: parsedCommit.subject, - notes: parsedCommit.notes, - references: parsedCommit.references, - breaking, - }); - } - } - catch (_err) { - logger.debug(`commit could not be parsed: ${commit.sha} ${commit.message.split('\n')[0]}`); - logger.debug(`error message: ${_err}`); - } - } - } - return conventionalCommits; -} -exports.parseConventionalCommits = parseConventionalCommits; -function preprocessCommitMessage(commit) { - // look for 'BEGIN_COMMIT_OVERRIDE' section of pull request body - if (commit.pullRequest) { - const overrideMessage = (commit.pullRequest.body.split('BEGIN_COMMIT_OVERRIDE')[1] || '') - .split('END_COMMIT_OVERRIDE')[0] - .trim(); - if (overrideMessage) { - return overrideMessage; - } - } - return commit.message; -} -//# sourceMappingURL=commit.js.map - -/***/ }), - -/***/ 93637: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.FileNotFoundError = exports.DuplicateReleaseError = exports.AuthError = exports.GitHubAPIError = exports.MissingRequiredFileError = exports.ConfigurationError = void 0; -class ConfigurationError extends Error { - constructor(message, releaserName, repository) { - super(`${releaserName} (${repository}): ${message}`); - this.releaserName = releaserName; - this.repository = repository; - this.name = ConfigurationError.name; - } -} -exports.ConfigurationError = ConfigurationError; -class MissingRequiredFileError extends ConfigurationError { - constructor(file, releaserName, repository) { - super(`Missing required file: ${file}`, releaserName, repository); - this.file = file; - this.name = MissingRequiredFileError.name; - } -} -exports.MissingRequiredFileError = MissingRequiredFileError; -class GitHubAPIError extends Error { - constructor(requestError, message) { - super(message !== null && message !== void 0 ? message : requestError.message); - this.status = requestError.status; - this.body = GitHubAPIError.parseErrorBody(requestError); - this.name = GitHubAPIError.name; - this.cause = requestError; - this.stack = requestError.stack; - } - static parseErrorBody(requestError) { - const body = requestError.response; - return (body === null || body === void 0 ? void 0 : body.data) || undefined; - } - static parseErrors(requestError) { - var _a; - return ((_a = GitHubAPIError.parseErrorBody(requestError)) === null || _a === void 0 ? void 0 : _a.errors) || []; - } -} -exports.GitHubAPIError = GitHubAPIError; -class AuthError extends GitHubAPIError { - constructor(requestError) { - super(requestError, 'unauthorized'); - this.status = 401; - this.name = AuthError.name; - } -} -exports.AuthError = AuthError; -class DuplicateReleaseError extends GitHubAPIError { - constructor(requestError, tag) { - super(requestError); - this.tag = tag; - this.name = DuplicateReleaseError.name; - } -} -exports.DuplicateReleaseError = DuplicateReleaseError; -class FileNotFoundError extends Error { - constructor(path) { - super(`Failed to find file: ${path}`); - this.path = path; - this.name = FileNotFoundError.name; - } -} -exports.FileNotFoundError = FileNotFoundError; -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 3095: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getChangelogTypes = exports.unregisterChangelogNotes = exports.registerChangelogNotes = exports.buildChangelogNotes = void 0; -const github_1 = __nccwpck_require__(88433); -const default_1 = __nccwpck_require__(71480); -const errors_1 = __nccwpck_require__(93637); -const changelogNotesFactories = { - github: options => new github_1.GitHubChangelogNotes(options.github), - default: options => new default_1.DefaultChangelogNotes(options), + fn.apply(this, args); + }); }; -function buildChangelogNotes(options) { - const builder = changelogNotesFactories[options.type]; - if (builder) { - return builder(options); - } - throw new errors_1.ConfigurationError(`Unknown changelog type: ${options.type}`, 'core', `${options.github.repository.owner}/${options.github.repository.repo}`); -} -exports.buildChangelogNotes = buildChangelogNotes; -function registerChangelogNotes(name, changelogNotesBuilder) { - changelogNotesFactories[name] = changelogNotesBuilder; -} -exports.registerChangelogNotes = registerChangelogNotes; -function unregisterChangelogNotes(name) { - delete changelogNotesFactories[name]; -} -exports.unregisterChangelogNotes = unregisterChangelogNotes; -function getChangelogTypes() { - return Object.keys(changelogNotesFactories).sort(); -} -exports.getChangelogTypes = getChangelogTypes; -//# sourceMappingURL=changelog-notes-factory.js.map -/***/ }), +module.exports = (input, options) => { + options = Object.assign({ + exclude: [/.+(Sync|Stream)$/], + errorFirst: true, + promiseModule: Promise + }, options); -/***/ 56259: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + const objType = typeof input; + if (!(input !== null && (objType === 'object' || objType === 'function'))) { + throw new TypeError(`Expected \`input\` to be a \`Function\` or \`Object\`, got \`${input === null ? 'null' : objType}\``); + } -"use strict"; + const filter = key => { + const match = pattern => typeof pattern === 'string' ? key === pattern : pattern.test(key); + return options.include ? options.include.some(match) : !options.exclude.some(match); + }; -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getPluginTypes = exports.unregisterPlugin = exports.registerPlugin = exports.buildPlugin = void 0; -const linked_versions_1 = __nccwpck_require__(59641); -const cargo_workspace_1 = __nccwpck_require__(77430); -const node_workspace_1 = __nccwpck_require__(23256); -const maven_workspace_1 = __nccwpck_require__(66113); -const errors_1 = __nccwpck_require__(93637); -const sentence_case_1 = __nccwpck_require__(36662); -const group_priority_1 = __nccwpck_require__(83172); -const pluginFactories = { - 'linked-versions': options => new linked_versions_1.LinkedVersions(options.github, options.targetBranch, options.repositoryConfig, options.type.groupName, options.type.components, { - ...options, - ...options.type, - }), - 'cargo-workspace': options => new cargo_workspace_1.CargoWorkspace(options.github, options.targetBranch, options.repositoryConfig, { - ...options, - ...options.type, - }), - 'node-workspace': options => new node_workspace_1.NodeWorkspace(options.github, options.targetBranch, options.repositoryConfig, { - ...options, - ...options.type, - }), - 'maven-workspace': options => new maven_workspace_1.MavenWorkspace(options.github, options.targetBranch, options.repositoryConfig, { - ...options, - ...options.type, - }), - 'sentence-case': options => new sentence_case_1.SentenceCase(options.github, options.targetBranch, options.repositoryConfig, options.type.specialWords), - 'group-priority': options => new group_priority_1.GroupPriority(options.github, options.targetBranch, options.repositoryConfig, options.type.groups), + let ret; + if (objType === 'function') { + ret = function (...args) { + return options.excludeMain ? input(...args) : processFn(input, options).apply(this, args); + }; + } else { + ret = Object.create(Object.getPrototypeOf(input)); + } + + for (const key in input) { // eslint-disable-line guard-for-in + const property = input[key]; + ret[key] = typeof property === 'function' && filter(key) ? processFn(property, options) : property; + } + + return ret; }; -function buildPlugin(options) { - if (typeof options.type === 'object') { - const builder = pluginFactories[options.type.type]; - if (builder) { - return builder({ - ...options.type, - ...options, - }); - } - throw new errors_1.ConfigurationError(`Unknown plugin type: ${options.type.type}`, 'core', `${options.github.repository.owner}/${options.github.repository.repo}`); - } - else { - const builder = pluginFactories[options.type]; - if (builder) { - return builder(options); - } - throw new errors_1.ConfigurationError(`Unknown plugin type: ${options.type}`, 'core', `${options.github.repository.owner}/${options.github.repository.repo}`); - } -} -exports.buildPlugin = buildPlugin; -function registerPlugin(name, pluginBuilder) { - pluginFactories[name] = pluginBuilder; -} -exports.registerPlugin = registerPlugin; -function unregisterPlugin(name) { - delete pluginFactories[name]; -} -exports.unregisterPlugin = unregisterPlugin; -function getPluginTypes() { - return Object.keys(pluginFactories).sort(); -} -exports.getPluginTypes = getPluginTypes; -//# sourceMappingURL=plugin-factory.js.map -/***/ }), - -/***/ 11833: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getVersioningStrategyTypes = exports.unregisterVersioningStrategy = exports.registerVersioningStrategy = exports.buildVersioningStrategy = void 0; -const default_1 = __nccwpck_require__(94073); -const always_bump_patch_1 = __nccwpck_require__(82926); -const always_bump_minor_1 = __nccwpck_require__(9657); -const always_bump_major_1 = __nccwpck_require__(51346); -const service_pack_1 = __nccwpck_require__(56772); -const errors_1 = __nccwpck_require__(93637); -const prerelease_1 = __nccwpck_require__(86185); -const versioningTypes = { - default: options => new default_1.DefaultVersioningStrategy(options), - 'always-bump-patch': options => new always_bump_patch_1.AlwaysBumpPatch(options), - 'always-bump-minor': options => new always_bump_minor_1.AlwaysBumpMinor(options), - 'always-bump-major': options => new always_bump_major_1.AlwaysBumpMajor(options), - 'service-pack': options => new service_pack_1.ServicePackVersioningStrategy(options), - prerelease: options => new prerelease_1.PrereleaseVersioningStrategy(options), -}; -function buildVersioningStrategy(options) { - const builder = versioningTypes[options.type || 'default']; - if (builder) { - return builder(options); - } - throw new errors_1.ConfigurationError(`Unknown versioning strategy type: ${options.type}`, 'core', `${options.github.repository.owner}/${options.github.repository.repo}`); -} -exports.buildVersioningStrategy = buildVersioningStrategy; -function registerVersioningStrategy(name, versioningStrategyBuilder) { - versioningTypes[name] = versioningStrategyBuilder; -} -exports.registerVersioningStrategy = registerVersioningStrategy; -function unregisterVersioningStrategy(name) { - delete versioningTypes[name]; -} -exports.unregisterVersioningStrategy = unregisterVersioningStrategy; -function getVersioningStrategyTypes() { - return Object.keys(versioningTypes).sort(); -} -exports.getVersioningStrategyTypes = getVersioningStrategyTypes; -//# sourceMappingURL=versioning-strategy-factory.js.map - -/***/ }), - -/***/ 75695: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getReleaserTypes = exports.unregisterReleaseType = exports.registerReleaseType = exports.buildStrategy = void 0; -const errors_1 = __nccwpck_require__(93637); -const changelog_notes_factory_1 = __nccwpck_require__(3095); -const versioning_strategy_factory_1 = __nccwpck_require__(11833); -const bazel_1 = __nccwpck_require__(76082); -const dart_1 = __nccwpck_require__(86518); -const dotnet_yoshi_1 = __nccwpck_require__(98175); -const elixir_1 = __nccwpck_require__(91731); -const expo_1 = __nccwpck_require__(181); -const go_1 = __nccwpck_require__(45953); -const go_yoshi_1 = __nccwpck_require__(6492); -const helm_1 = __nccwpck_require__(97687); -const java_1 = __nccwpck_require__(46892); -const java_yoshi_1 = __nccwpck_require__(35330); -const java_yoshi_mono_repo_1 = __nccwpck_require__(77664); -const krm_blueprint_1 = __nccwpck_require__(76397); -const maven_1 = __nccwpck_require__(60899); -const node_1 = __nccwpck_require__(78957); -const ocaml_1 = __nccwpck_require__(72064); -const php_1 = __nccwpck_require__(57658); -const php_yoshi_1 = __nccwpck_require__(88460); -const python_1 = __nccwpck_require__(32109); -const ruby_1 = __nccwpck_require__(68142); -const ruby_yoshi_1 = __nccwpck_require__(72294); -const rust_1 = __nccwpck_require__(43066); -const sfdx_1 = __nccwpck_require__(87648); -const simple_1 = __nccwpck_require__(10591); -const terraform_module_1 = __nccwpck_require__(80908); -const always_bump_patch_1 = __nccwpck_require__(82926); -const dependency_manifest_1 = __nccwpck_require__(25029); -const service_pack_1 = __nccwpck_require__(56772); -__exportStar(__nccwpck_require__(3095), exports); -__exportStar(__nccwpck_require__(56259), exports); -__exportStar(__nccwpck_require__(11833), exports); -const releasers = { - 'dotnet-yoshi': options => new dotnet_yoshi_1.DotnetYoshi(options), - go: options => new go_1.Go(options), - 'go-yoshi': options => new go_yoshi_1.GoYoshi(options), - java: options => new java_1.Java(options), - maven: options => new maven_1.Maven(options), - 'java-yoshi': options => new java_yoshi_1.JavaYoshi(options), - 'java-yoshi-mono-repo': options => new java_yoshi_mono_repo_1.JavaYoshiMonoRepo(options), - 'java-backport': options => new java_yoshi_1.JavaYoshi({ - ...options, - versioningStrategy: new always_bump_patch_1.AlwaysBumpPatch(), - }), - 'java-bom': options => new java_yoshi_1.JavaYoshi({ - ...options, - versioningStrategy: new dependency_manifest_1.DependencyManifest({ - bumpMinorPreMajor: options.bumpMinorPreMajor, - bumpPatchForMinorPreMajor: options.bumpPatchForMinorPreMajor, - }), - }), - 'java-lts': options => new java_yoshi_1.JavaYoshi({ - ...options, - versioningStrategy: new service_pack_1.ServicePackVersioningStrategy(), - }), - 'krm-blueprint': options => new krm_blueprint_1.KRMBlueprint(options), - node: options => new node_1.Node(options), - expo: options => new expo_1.Expo(options), - ocaml: options => new ocaml_1.OCaml(options), - php: options => new php_1.PHP(options), - 'php-yoshi': options => new php_yoshi_1.PHPYoshi(options), - python: options => new python_1.Python(options), - ruby: options => new ruby_1.Ruby(options), - 'ruby-yoshi': options => new ruby_yoshi_1.RubyYoshi(options), - rust: options => new rust_1.Rust(options), - salesforce: options => new sfdx_1.Sfdx(options), - sfdx: options => new sfdx_1.Sfdx(options), - simple: options => new simple_1.Simple(options), - 'terraform-module': options => new terraform_module_1.TerraformModule(options), - helm: options => new helm_1.Helm(options), - elixir: options => new elixir_1.Elixir(options), - dart: options => new dart_1.Dart(options), - bazel: options => new bazel_1.Bazel(options), -}; -async function buildStrategy(options) { - var _a; - const targetBranch = (_a = options.targetBranch) !== null && _a !== void 0 ? _a : options.github.repository.defaultBranch; - const versioningStrategy = (0, versioning_strategy_factory_1.buildVersioningStrategy)({ - github: options.github, - type: options.versioning, - bumpMinorPreMajor: options.bumpMinorPreMajor, - bumpPatchForMinorPreMajor: options.bumpPatchForMinorPreMajor, - prereleaseType: options.prereleaseType, - prerelease: options.prerelease, - }); - const changelogNotes = (0, changelog_notes_factory_1.buildChangelogNotes)({ - type: options.changelogType || 'default', - github: options.github, - changelogSections: options.changelogSections, - }); - const strategyOptions = { - skipGitHubRelease: options.skipGithubRelease, - ...options, - targetBranch, - versioningStrategy, - changelogNotes, - }; - const builder = releasers[options.releaseType]; - if (builder) { - return builder(strategyOptions); - } - throw new errors_1.ConfigurationError(`Unknown release type: ${options.releaseType}`, 'core', `${options.github.repository.owner}/${options.github.repository.repo}`); -} -exports.buildStrategy = buildStrategy; -function registerReleaseType(name, strategyBuilder) { - releasers[name] = strategyBuilder; -} -exports.registerReleaseType = registerReleaseType; -function unregisterReleaseType(name) { - delete releasers[name]; -} -exports.unregisterReleaseType = unregisterReleaseType; -function getReleaserTypes() { - return Object.keys(releasers).sort(); -} -exports.getReleaserTypes = getReleaserTypes; -//# sourceMappingURL=factory.js.map - -/***/ }), - -/***/ 19746: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.sleepInMs = exports.GitHub = exports.GH_GRAPHQL_URL = exports.GH_API_URL = void 0; -const code_suggester_1 = __nccwpck_require__(77103); -const rest_1 = __nccwpck_require__(55375); -const request_1 = __nccwpck_require__(36234); -const graphql_1 = __nccwpck_require__(88467); -const request_error_1 = __nccwpck_require__(10537); -const errors_1 = __nccwpck_require__(93637); -const MAX_ISSUE_BODY_SIZE = 65536; -const MAX_SLEEP_SECONDS = 20; -exports.GH_API_URL = 'https://api.github.com'; -exports.GH_GRAPHQL_URL = 'https://api.github.com'; -const logger_1 = __nccwpck_require__(68809); -const manifest_1 = __nccwpck_require__(31999); -const signoff_commit_message_1 = __nccwpck_require__(2686); -const git_file_utils_1 = __nccwpck_require__(32997); -const https_proxy_agent_1 = __nccwpck_require__(77219); -const http_proxy_agent_1 = __nccwpck_require__(23764); -class GitHub { - constructor(options) { - var _a; - /** - * Get the list of file paths modified in a given commit. - * - * @param {string} sha The commit SHA - * @returns {string[]} File paths - * @throws {GitHubAPIError} on an API error - */ - this.getCommitFiles = wrapAsync(async (sha) => { - this.logger.debug(`Backfilling file list for commit: ${sha}`); - const files = []; - for await (const resp of this.octokit.paginate.iterator('GET /repos/{owner}/{repo}/commits/{ref}', { - owner: this.repository.owner, - repo: this.repository.repo, - ref: sha, - })) { - // Paginate plugin doesn't have types for listing files on a commit - const data = resp.data; - for (const f of data.files || []) { - if (f.filename) { - files.push(f.filename); - } - } - } - if (files.length >= 3000) { - this.logger.warn(`Found ${files.length} files. This may not include all the files.`); - } - else { - this.logger.debug(`Found ${files.length} files`); - } - return files; - }); - this.graphqlRequest = wrapAsync(async (opts, options) => { - var _a; - let maxRetries = (_a = options === null || options === void 0 ? void 0 : options.maxRetries) !== null && _a !== void 0 ? _a : 5; - let seconds = 1; - while (maxRetries >= 0) { - try { - const response = await this.graphql(opts); - if (response) { - return response; - } - this.logger.trace('no GraphQL response, retrying'); - } - catch (err) { - if (err.status !== 502) { - throw err; - } - if (maxRetries === 0) { - this.logger.warn('ran out of retries and response is required'); - throw err; - } - this.logger.info(`received 502 error, ${maxRetries} attempts remaining`); - } - maxRetries -= 1; - if (maxRetries >= 0) { - this.logger.trace(`sleeping ${seconds} seconds`); - await (0, exports.sleepInMs)(1000 * seconds); - seconds = Math.min(seconds * 2, MAX_SLEEP_SECONDS); - } - } - this.logger.trace('ran out of retries'); - return undefined; - }); - /** - * Returns a list of paths to all files with a given name. - * - * If a prefix is specified, only return paths that match - * the provided prefix. - * - * @param filename The name of the file to find - * @param ref Git reference to search files in - * @param prefix Optional path prefix used to filter results - * @throws {GitHubAPIError} on an API error - */ - this.findFilesByFilenameAndRef = wrapAsync(async (filename, ref, prefix) => { - if (prefix) { - prefix = normalizePrefix(prefix); - } - this.logger.debug(`finding files by filename: ${filename}, ref: ${ref}, prefix: ${prefix}`); - return await this.fileCache.findFilesByFilename(filename, ref, prefix); - }); - /** - * Returns a list of paths to all files matching a glob pattern. - * - * If a prefix is specified, only return paths that match - * the provided prefix. - * - * @param glob The glob to match - * @param ref Git reference to search files in - * @param prefix Optional path prefix used to filter results - * @throws {GitHubAPIError} on an API error - */ - this.findFilesByGlobAndRef = wrapAsync(async (glob, ref, prefix) => { - if (prefix) { - prefix = normalizePrefix(prefix); - } - this.logger.debug(`finding files by glob: ${glob}, ref: ${ref}, prefix: ${prefix}`); - return await this.fileCache.findFilesByGlob(glob, ref, prefix); - }); - /** - * Open a pull request - * - * @param {PullRequest} pullRequest Pull request data to update - * @param {string} targetBranch The base branch of the pull request - * @param {string} message The commit message for the commit - * @param {Update[]} updates The files to update - * @param {CreatePullRequestOptions} options The pull request options - * @throws {GitHubAPIError} on an API error - */ - this.createPullRequest = wrapAsync(async (pullRequest, targetBranch, message, updates, options) => { - // Update the files for the release if not already supplied - const changes = await this.buildChangeSet(updates, targetBranch); - const prNumber = await (0, code_suggester_1.createPullRequest)(this.octokit, changes, { - upstreamOwner: this.repository.owner, - upstreamRepo: this.repository.repo, - title: pullRequest.title, - branch: pullRequest.headBranchName, - description: pullRequest.body, - primary: targetBranch, - force: true, - fork: !!(options === null || options === void 0 ? void 0 : options.fork), - message, - logger: this.logger, - draft: !!(options === null || options === void 0 ? void 0 : options.draft), - labels: pullRequest.labels, - }); - return await this.getPullRequest(prNumber); - }); - /** - * Fetch a pull request given the pull number - * @param {number} number The pull request number - * @returns {PullRequest} - */ - this.getPullRequest = wrapAsync(async (number) => { - const response = await this.octokit.pulls.get({ - owner: this.repository.owner, - repo: this.repository.repo, - pull_number: number, - }); - return { - headBranchName: response.data.head.ref, - baseBranchName: response.data.base.ref, - number: response.data.number, - title: response.data.title, - body: response.data.body || '', - files: [], - labels: response.data.labels - .map(label => label.name) - .filter(name => !!name), - }; - }); - /** - * Update a pull request's title and body. - * @param {number} number The pull request number - * @param {ReleasePullRequest} releasePullRequest Pull request data to update - * @param {string} targetBranch The target branch of the pull request - * @param {string} options.signoffUser Optional. Commit signoff message - * @param {boolean} options.fork Optional. Whether to open the pull request from - * a fork or not. Defaults to `false` - * @param {PullRequestOverflowHandler} options.pullRequestOverflowHandler Optional. - * Handles extra large pull request body messages. - */ - this.updatePullRequest = wrapAsync(async (number, releasePullRequest, targetBranch, options) => { - // Update the files for the release if not already supplied - const changes = await this.buildChangeSet(releasePullRequest.updates, targetBranch); - let message = releasePullRequest.title.toString(); - if (options === null || options === void 0 ? void 0 : options.signoffUser) { - message = (0, signoff_commit_message_1.signoffCommitMessage)(message, options.signoffUser); - } - const title = releasePullRequest.title.toString(); - const body = ((options === null || options === void 0 ? void 0 : options.pullRequestOverflowHandler) - ? await options.pullRequestOverflowHandler.handleOverflow(releasePullRequest) - : releasePullRequest.body) - .toString() - .slice(0, MAX_ISSUE_BODY_SIZE); - const prNumber = await (0, code_suggester_1.createPullRequest)(this.octokit, changes, { - upstreamOwner: this.repository.owner, - upstreamRepo: this.repository.repo, - title, - branch: releasePullRequest.headRefName, - description: body, - primary: targetBranch, - force: true, - fork: (options === null || options === void 0 ? void 0 : options.fork) === false ? false : true, - message, - logger: this.logger, - draft: releasePullRequest.draft, - }); - if (prNumber !== number) { - this.logger.warn(`updated code for ${prNumber}, but update requested for ${number}`); - } - const response = await this.octokit.pulls.update({ - owner: this.repository.owner, - repo: this.repository.repo, - pull_number: number, - title: releasePullRequest.title.toString(), - body, - state: 'open', - }); - return { - headBranchName: response.data.head.ref, - baseBranchName: response.data.base.ref, - number: response.data.number, - title: response.data.title, - body: response.data.body || '', - files: [], - labels: response.data.labels - .map(label => label.name) - .filter(name => !!name), - }; - }); - /** - * Returns a list of paths to all files with a given file - * extension. - * - * If a prefix is specified, only return paths that match - * the provided prefix. - * - * @param extension The file extension used to filter results. - * Example: `js`, `java` - * @param ref Git reference to search files in - * @param prefix Optional path prefix used to filter results - * @returns {string[]} List of file paths - * @throws {GitHubAPIError} on an API error - */ - this.findFilesByExtensionAndRef = wrapAsync(async (extension, ref, prefix) => { - if (prefix) { - prefix = normalizePrefix(prefix); - } - return this.fileCache.findFilesByExtension(extension, ref, prefix); - }); - /** - * Create a GitHub release - * - * @param {Release} release Release parameters - * @param {ReleaseOptions} options Release option parameters - * @throws {DuplicateReleaseError} if the release tag already exists - * @throws {GitHubAPIError} on other API errors - */ - this.createRelease = wrapAsync(async (release, options = {}) => { - const resp = await this.octokit.repos.createRelease({ - name: release.name, - owner: this.repository.owner, - repo: this.repository.repo, - tag_name: release.tag.toString(), - body: release.notes, - draft: !!options.draft, - prerelease: !!options.prerelease, - target_commitish: release.sha, - }); - return { - id: resp.data.id, - name: resp.data.name || undefined, - tagName: resp.data.tag_name, - sha: resp.data.target_commitish, - notes: resp.data.body_text || - resp.data.body || - resp.data.body_html || - undefined, - url: resp.data.html_url, - draft: resp.data.draft, - uploadUrl: resp.data.upload_url, - }; - }, e => { - if (e instanceof request_error_1.RequestError) { - if (e.status === 422 && - errors_1.GitHubAPIError.parseErrors(e).some(error => { - return error.code === 'already_exists'; - })) { - throw new errors_1.DuplicateReleaseError(e, 'tagName'); - } - } - }); - /** - * Makes a comment on a issue/pull request. - * - * @param {string} comment - The body of the comment to post. - * @param {number} number - The issue or pull request number. - * @throws {GitHubAPIError} on an API error - */ - this.commentOnIssue = wrapAsync(async (comment, number) => { - this.logger.debug(`adding comment to https://github.com/${this.repository.owner}/${this.repository.repo}/issues/${number}`); - const resp = await this.octokit.issues.createComment({ - owner: this.repository.owner, - repo: this.repository.repo, - issue_number: number, - body: comment, - }); - return resp.data.html_url; - }); - /** - * Removes labels from an issue/pull request. - * - * @param {string[]} labels The labels to remove. - * @param {number} number The issue/pull request number. - */ - this.removeIssueLabels = wrapAsync(async (labels, number) => { - if (labels.length === 0) { - return; - } - this.logger.debug(`removing labels: ${labels} from issue/pull ${number}`); - await Promise.all(labels.map(label => this.octokit.issues.removeLabel({ - owner: this.repository.owner, - repo: this.repository.repo, - issue_number: number, - name: label, - }))); - }); - /** - * Adds label to an issue/pull request. - * - * @param {string[]} labels The labels to add. - * @param {number} number The issue/pull request number. - */ - this.addIssueLabels = wrapAsync(async (labels, number) => { - if (labels.length === 0) { - return; - } - this.logger.debug(`adding labels: ${labels} from issue/pull ${number}`); - await this.octokit.issues.addLabels({ - owner: this.repository.owner, - repo: this.repository.repo, - issue_number: number, - labels, - }); - }); - this.repository = options.repository; - this.octokit = options.octokitAPIs.octokit; - this.request = options.octokitAPIs.request; - this.graphql = options.octokitAPIs.graphql; - this.fileCache = new git_file_utils_1.RepositoryFileCache(this.octokit, this.repository); - this.logger = (_a = options.logger) !== null && _a !== void 0 ? _a : logger_1.logger; - } - static createDefaultAgent(baseUrl, defaultProxy) { - if (!defaultProxy) { - return undefined; - } - const { host, port } = defaultProxy; - if (new URL(baseUrl).protocol.replace(':', '') === 'http') { - return new http_proxy_agent_1.HttpProxyAgent(`http://${host}:${port}`); - } - else { - return new https_proxy_agent_1.HttpsProxyAgent(`https://${host}:${port}`); - } - } - /** - * Build a new GitHub client with auto-detected default branch. - * - * @param {GitHubCreateOptions} options Configuration options - * @param {string} options.owner The repository owner. - * @param {string} options.repo The repository name. - * @param {string} options.defaultBranch Optional. The repository's default branch. - * Defaults to the value fetched via the API. - * @param {string} options.apiUrl Optional. The base url of the GitHub API. - * @param {string} options.graphqlUrl Optional. The base url of the GraphQL API. - * @param {OctokitAPISs} options.octokitAPIs Optional. Override the internal - * client instances with a pre-authenticated instance. - * @param {string} token Optional. A GitHub API token used for authentication. - */ - static async create(options) { - var _a, _b, _c, _d; - const apiUrl = (_a = options.apiUrl) !== null && _a !== void 0 ? _a : exports.GH_API_URL; - const graphqlUrl = (_b = options.graphqlUrl) !== null && _b !== void 0 ? _b : exports.GH_GRAPHQL_URL; - const releasePleaseVersion = (__nccwpck_require__(15833)/* .version */ .i8); - const apis = (_c = options.octokitAPIs) !== null && _c !== void 0 ? _c : { - octokit: new rest_1.Octokit({ - baseUrl: apiUrl, - auth: options.token, - request: { - agent: this.createDefaultAgent(apiUrl, options.proxy), - }, - }), - request: request_1.request.defaults({ - baseUrl: apiUrl, - headers: { - 'user-agent': `release-please/${releasePleaseVersion}`, - Authorization: `token ${options.token}`, - }, - }), - graphql: graphql_1.graphql.defaults({ - baseUrl: graphqlUrl, - request: { - agent: this.createDefaultAgent(graphqlUrl, options.proxy), - }, - headers: { - 'user-agent': `release-please/${releasePleaseVersion}`, - Authorization: `token ${options.token}`, - 'content-type': 'application/vnd.github.v3+json', - }, - }), - }; - const opts = { - repository: { - owner: options.owner, - repo: options.repo, - defaultBranch: (_d = options.defaultBranch) !== null && _d !== void 0 ? _d : (await GitHub.defaultBranch(options.owner, options.repo, apis.octokit)), - }, - octokitAPIs: apis, - logger: options.logger, - }; - return new GitHub(opts); - } - /** - * Returns the default branch for a given repository. - * - * @param {string} owner The GitHub repository owner - * @param {string} repo The GitHub repository name - * @param {OctokitType} octokit An authenticated octokit instance - * @returns {string} Name of the default branch - */ - static async defaultBranch(owner, repo, octokit) { - const { data } = await octokit.repos.get({ - repo, - owner, - }); - return data.default_branch; - } - /** - * Returns the list of commits to the default branch after the provided filter - * query has been satified. - * - * @param {string} targetBranch Target branch of commit - * @param {CommitFilter} filter Callback function that returns whether a - * commit/pull request matches certain criteria - * @param {CommitIteratorOptions} options Query options - * @param {number} options.maxResults Limit the number of results searched. - * Defaults to unlimited. - * @param {boolean} options.backfillFiles If set, use the REST API for - * fetching the list of touched files in this commit. Defaults to `false`. - * @returns {Commit[]} List of commits to current branch - * @throws {GitHubAPIError} on an API error - */ - async commitsSince(targetBranch, filter, options = {}) { - const commits = []; - const generator = this.mergeCommitIterator(targetBranch, options); - for await (const commit of generator) { - if (filter(commit)) { - break; - } - commits.push(commit); - } - return commits; - } - /** - * Iterate through commit history with a max number of results scanned. - * - * @param {string} targetBranch target branch of commit - * @param {CommitIteratorOptions} options Query options - * @param {number} options.maxResults Limit the number of results searched. - * Defaults to unlimited. - * @param {boolean} options.backfillFiles If set, use the REST API for - * fetching the list of touched files in this commit. Defaults to `false`. - * @yields {Commit} - * @throws {GitHubAPIError} on an API error - */ - async *mergeCommitIterator(targetBranch, options = {}) { - var _a; - const maxResults = (_a = options.maxResults) !== null && _a !== void 0 ? _a : Number.MAX_SAFE_INTEGER; - let cursor = undefined; - let results = 0; - while (results < maxResults) { - const response = await this.mergeCommitsGraphQL(targetBranch, cursor, options); - // no response usually means that the branch can't be found - if (!response) { - break; - } - for (let i = 0; i < response.data.length; i++) { - results += 1; - yield response.data[i]; - } - if (!response.pageInfo.hasNextPage) { - break; - } - cursor = response.pageInfo.endCursor; - } - } - async mergeCommitsGraphQL(targetBranch, cursor, options = {}) { - var _a, _b, _c, _d, _e, _f, _g, _h; - var _j; - this.logger.debug(`Fetching merge commits on branch ${targetBranch} with cursor: ${cursor}`); - const query = `query pullRequestsSince($owner: String!, $repo: String!, $num: Int!, $maxFilesChanged: Int, $targetBranch: String!, $cursor: String) { - repository(owner: $owner, name: $repo) { - ref(qualifiedName: $targetBranch) { - target { - ... on Commit { - history(first: $num, after: $cursor) { - nodes { - associatedPullRequests(first: 10) { - nodes { - number - title - baseRefName - headRefName - labels(first: 10) { - nodes { - name - } - } - body - mergeCommit { - oid - } - files(first: $maxFilesChanged) { - nodes { - path - } - pageInfo { - endCursor - hasNextPage - } - } - } - } - sha: oid - message - } - pageInfo { - hasNextPage - endCursor - } - } - } - } - } - } - }`; - const params = { - cursor, - owner: this.repository.owner, - repo: this.repository.repo, - num: 25, - targetBranch, - maxFilesChanged: 100, // max is 100 - }; - const response = await this.graphqlRequest({ - query, - ...params, - }); - if (!response) { - this.logger.warn(`Did not receive a response for query: ${query}`, params); - return null; - } - // if the branch does exist, return null - if (!((_a = response.repository) === null || _a === void 0 ? void 0 : _a.ref)) { - this.logger.warn(`Could not find commits for branch ${targetBranch} - it likely does not exist.`); - return null; - } - const history = response.repository.ref.target.history; - const commits = (history.nodes || []); - // Count the number of pull requests associated with each merge commit. This is - // used in the next step to make sure we only find pull requests with a - // merge commit that contain 1 merged commit. - const mergeCommitCount = {}; - for (const commit of commits) { - for (const pr of commit.associatedPullRequests.nodes) { - if ((_b = pr.mergeCommit) === null || _b === void 0 ? void 0 : _b.oid) { - (_c = mergeCommitCount[_j = pr.mergeCommit.oid]) !== null && _c !== void 0 ? _c : (mergeCommitCount[_j] = 0); - mergeCommitCount[pr.mergeCommit.oid]++; - } - } - } - const commitData = []; - for (const graphCommit of commits) { - const commit = { - sha: graphCommit.sha, - message: graphCommit.message, - }; - const mergePullRequest = graphCommit.associatedPullRequests.nodes.find(pr => { - return ( - // Only match the pull request with a merge commit if there is a - // single merged commit in the PR. This means merge commits and squash - // merges will be matched, but rebase merged PRs will only be matched - // if they contain a single commit. This is so PRs that are rebased - // and merged will have ßSfiles backfilled from each commit instead of - // the whole PR. - pr.mergeCommit && - pr.mergeCommit.oid === graphCommit.sha && - mergeCommitCount[pr.mergeCommit.oid] === 1); - }); - const pullRequest = mergePullRequest || graphCommit.associatedPullRequests.nodes[0]; - if (pullRequest) { - commit.pullRequest = { - sha: commit.sha, - number: pullRequest.number, - baseBranchName: pullRequest.baseRefName, - headBranchName: pullRequest.headRefName, - mergeCommitOid: (_d = pullRequest.mergeCommit) === null || _d === void 0 ? void 0 : _d.oid, - title: pullRequest.title, - body: pullRequest.body, - labels: pullRequest.labels.nodes.map(node => node.name), - files: (((_e = pullRequest.files) === null || _e === void 0 ? void 0 : _e.nodes) || []).map(node => node.path), - }; - } - if (mergePullRequest) { - if (((_g = (_f = mergePullRequest.files) === null || _f === void 0 ? void 0 : _f.pageInfo) === null || _g === void 0 ? void 0 : _g.hasNextPage) && - options.backfillFiles) { - this.logger.info(`PR #${mergePullRequest.number} has many files, backfilling`); - commit.files = await this.getCommitFiles(graphCommit.sha); - } - else { - // We cannot directly fetch files on commits via graphql, only provide file - // information for commits with associated pull requests - commit.files = (((_h = mergePullRequest.files) === null || _h === void 0 ? void 0 : _h.nodes) || []).map(node => node.path); - } - } - else if (options.backfillFiles) { - // In this case, there is no squashed merge commit. This could be a simple - // merge commit, a rebase merge commit, or a direct commit to the branch. - // Fallback to fetching the list of commits from the REST API. In the future - // we can perhaps lazy load these. - commit.files = await this.getCommitFiles(graphCommit.sha); - } - commitData.push(commit); - } - return { - pageInfo: history.pageInfo, - data: commitData, - }; - } - /** - * Iterate through merged pull requests with a max number of results scanned. - * - * @param {string} targetBranch The base branch of the pull request - * @param {string} status The status of the pull request - * @param {number} maxResults Limit the number of results searched. Defaults to - * unlimited. - * @param {boolean} includeFiles Whether to fetch the list of files included in - * the pull request. Defaults to `true`. - * @yields {PullRequest} - * @throws {GitHubAPIError} on an API error - */ - async *pullRequestIterator(targetBranch, status = 'MERGED', maxResults = Number.MAX_SAFE_INTEGER, includeFiles = true) { - const generator = includeFiles - ? this.pullRequestIteratorWithFiles(targetBranch, status, maxResults) - : this.pullRequestIteratorWithoutFiles(targetBranch, status, maxResults); - for await (const pullRequest of generator) { - yield pullRequest; - } - } - /** - * Helper implementation of pullRequestIterator that includes files via - * the graphQL API. - * - * @param {string} targetBranch The base branch of the pull request - * @param {string} status The status of the pull request - * @param {number} maxResults Limit the number of results searched - */ - async *pullRequestIteratorWithFiles(targetBranch, status = 'MERGED', maxResults = Number.MAX_SAFE_INTEGER) { - let cursor = undefined; - let results = 0; - while (results < maxResults) { - const response = await this.pullRequestsGraphQL(targetBranch, status, cursor); - // no response usually means we ran out of results - if (!response) { - break; - } - for (let i = 0; i < response.data.length; i++) { - results += 1; - yield response.data[i]; - } - if (!response.pageInfo.hasNextPage) { - break; - } - cursor = response.pageInfo.endCursor; - } - } - /** - * Helper implementation of pullRequestIterator that excludes files - * via the REST API. - * - * @param {string} targetBranch The base branch of the pull request - * @param {string} status The status of the pull request - * @param {number} maxResults Limit the number of results searched - */ - async *pullRequestIteratorWithoutFiles(targetBranch, status = 'MERGED', maxResults = Number.MAX_SAFE_INTEGER) { - const statusMap = { - OPEN: 'open', - CLOSED: 'closed', - MERGED: 'closed', - }; - let results = 0; - for await (const { data: pulls } of this.octokit.paginate.iterator('GET /repos/{owner}/{repo}/pulls', { - state: statusMap[status], - owner: this.repository.owner, - repo: this.repository.repo, - base: targetBranch, - sort: 'updated', - direction: 'desc', - })) { - for (const pull of pulls) { - // The REST API does not have an option for "merged" - // pull requests - they are closed with a `merged_at` timestamp - if (status !== 'MERGED' || pull.merged_at) { - results += 1; - yield { - headBranchName: pull.head.ref, - baseBranchName: pull.base.ref, - number: pull.number, - title: pull.title, - body: pull.body || '', - labels: pull.labels.map(label => label.name), - files: [], - sha: pull.merge_commit_sha || undefined, - }; - if (results >= maxResults) { - break; - } - } - } - if (results >= maxResults) { - break; - } - } - } - /** - * Return a list of merged pull requests. The list is not guaranteed to be sorted - * by merged_at, but is generally most recent first. - * - * @param {string} targetBranch - Base branch of the pull request. Defaults to - * the configured default branch. - * @param {number} page - Page of results. Defaults to 1. - * @param {number} perPage - Number of results per page. Defaults to 100. - * @returns {PullRequestHistory | null} - List of merged pull requests - * @throws {GitHubAPIError} on an API error - */ - async pullRequestsGraphQL(targetBranch, states = 'MERGED', cursor) { - var _a; - this.logger.debug(`Fetching ${states} pull requests on branch ${targetBranch} with cursor ${cursor}`); - const response = await this.graphqlRequest({ - query: `query mergedPullRequests($owner: String!, $repo: String!, $num: Int!, $maxFilesChanged: Int, $targetBranch: String!, $states: [PullRequestState!], $cursor: String) { - repository(owner: $owner, name: $repo) { - pullRequests(first: $num, after: $cursor, baseRefName: $targetBranch, states: $states, orderBy: {field: CREATED_AT, direction: DESC}) { - nodes { - number - title - baseRefName - headRefName - labels(first: 10) { - nodes { - name - } - } - body - mergeCommit { - oid - } - files(first: $maxFilesChanged) { - nodes { - path - } - pageInfo { - endCursor - hasNextPage - } - } - } - pageInfo { - endCursor - hasNextPage - } - } - } - }`, - cursor, - owner: this.repository.owner, - repo: this.repository.repo, - num: 25, - targetBranch, - states, - maxFilesChanged: 64, - }); - if (!((_a = response === null || response === void 0 ? void 0 : response.repository) === null || _a === void 0 ? void 0 : _a.pullRequests)) { - this.logger.warn(`Could not find merged pull requests for branch ${targetBranch} - it likely does not exist.`); - return null; - } - const pullRequests = (response.repository.pullRequests.nodes || - []); - return { - pageInfo: response.repository.pullRequests.pageInfo, - data: pullRequests.map(pullRequest => { - var _a, _b, _c; - return { - sha: (_a = pullRequest.mergeCommit) === null || _a === void 0 ? void 0 : _a.oid, - number: pullRequest.number, - baseBranchName: pullRequest.baseRefName, - headBranchName: pullRequest.headRefName, - labels: (((_b = pullRequest.labels) === null || _b === void 0 ? void 0 : _b.nodes) || []).map(l => l.name), - title: pullRequest.title, - body: pullRequest.body + '', - files: (((_c = pullRequest.files) === null || _c === void 0 ? void 0 : _c.nodes) || []).map(node => node.path), - }; - }), - }; - } - /** - * Iterate through releases with a max number of results scanned. - * - * @param {ReleaseIteratorOptions} options Query options - * @param {number} options.maxResults Limit the number of results searched. - * Defaults to unlimited. - * @yields {GitHubRelease} - * @throws {GitHubAPIError} on an API error - */ - async *releaseIterator(options = {}) { - var _a; - const maxResults = (_a = options.maxResults) !== null && _a !== void 0 ? _a : Number.MAX_SAFE_INTEGER; - let results = 0; - let cursor = undefined; - while (true) { - const response = await this.releaseGraphQL(cursor); - if (!response) { - break; - } - for (let i = 0; i < response.data.length; i++) { - if ((results += 1) > maxResults) { - break; - } - yield response.data[i]; - } - if (results > maxResults || !response.pageInfo.hasNextPage) { - break; - } - cursor = response.pageInfo.endCursor; - } - } - async releaseGraphQL(cursor) { - this.logger.debug(`Fetching releases with cursor ${cursor}`); - const response = await this.graphqlRequest({ - query: `query releases($owner: String!, $repo: String!, $num: Int!, $cursor: String) { - repository(owner: $owner, name: $repo) { - releases(first: $num, after: $cursor, orderBy: {field: CREATED_AT, direction: DESC}) { - nodes { - name - tag { - name - } - tagCommit { - oid - } - url - description - isDraft - } - pageInfo { - endCursor - hasNextPage - } - } - } - }`, - cursor, - owner: this.repository.owner, - repo: this.repository.repo, - num: 25, - }); - if (!response.repository.releases.nodes.length) { - this.logger.warn('Could not find releases.'); - return null; - } - const releases = response.repository.releases.nodes; - return { - pageInfo: response.repository.releases.pageInfo, - data: releases - .filter(release => !!release.tagCommit) - .map(release => { - if (!release.tag || !release.tagCommit) { - this.logger.debug(release); - } - return { - name: release.name || undefined, - tagName: release.tag ? release.tag.name : 'unknown', - sha: release.tagCommit.oid, - notes: release.description, - url: release.url, - draft: release.isDraft, - }; - }), - }; - } - /** - * Iterate through tags with a max number of results scanned. - * - * @param {TagIteratorOptions} options Query options - * @param {number} options.maxResults Limit the number of results searched. - * Defaults to unlimited. - * @yields {GitHubTag} - * @throws {GitHubAPIError} on an API error - */ - async *tagIterator(options = {}) { - const maxResults = options.maxResults || Number.MAX_SAFE_INTEGER; - let results = 0; - for await (const response of this.octokit.paginate.iterator('GET /repos/{owner}/{repo}/tags', { - owner: this.repository.owner, - repo: this.repository.repo, - })) { - for (const tag of response.data) { - if ((results += 1) > maxResults) { - break; - } - yield { - name: tag.name, - sha: tag.commit.sha, - }; - } - if (results > maxResults) - break; - } - } - /** - * Fetch the contents of a file from the configured branch - * - * @param {string} path The path to the file in the repository - * @returns {GitHubFileContents} - * @throws {GitHubAPIError} on other API errors - */ - async getFileContents(path) { - return await this.getFileContentsOnBranch(path, this.repository.defaultBranch); - } - /** - * Fetch the contents of a file - * - * @param {string} path The path to the file in the repository - * @param {string} branch The branch to fetch from - * @returns {GitHubFileContents} - * @throws {FileNotFoundError} if the file cannot be found - * @throws {GitHubAPIError} on other API errors - */ - async getFileContentsOnBranch(path, branch) { - this.logger.debug(`Fetching ${path} from branch ${branch}`); - try { - return await this.fileCache.getFileContents(path, branch); - } - catch (e) { - if (e instanceof git_file_utils_1.FileNotFoundError) { - throw new errors_1.FileNotFoundError(path); - } - throw e; - } - } - async getFileJson(path, branch) { - const content = await this.getFileContentsOnBranch(path, branch); - return JSON.parse(content.parsedContent); - } - /** - * Returns a list of paths to all files with a given name. - * - * If a prefix is specified, only return paths that match - * the provided prefix. - * - * @param filename The name of the file to find - * @param prefix Optional path prefix used to filter results - * @returns {string[]} List of file paths - * @throws {GitHubAPIError} on an API error - */ - async findFilesByFilename(filename, prefix) { - return this.findFilesByFilenameAndRef(filename, this.repository.defaultBranch, prefix); - } - /** - * Returns a list of paths to all files matching a glob pattern. - * - * If a prefix is specified, only return paths that match - * the provided prefix. - * - * @param glob The glob to match - * @param prefix Optional path prefix used to filter results - * @returns {string[]} List of file paths - * @throws {GitHubAPIError} on an API error - */ - async findFilesByGlob(glob, prefix) { - return this.findFilesByGlobAndRef(glob, this.repository.defaultBranch, prefix); - } - /** - * Open a pull request - * - * @deprecated This logic is handled by the Manifest class now as it - * can be more complicated if the release notes are too big - * @param {ReleasePullRequest} releasePullRequest Pull request data to update - * @param {string} targetBranch The base branch of the pull request - * @param {GitHubPR} options The pull request options - * @throws {GitHubAPIError} on an API error - */ - async createReleasePullRequest(releasePullRequest, targetBranch, options) { - let message = releasePullRequest.title.toString(); - if (options === null || options === void 0 ? void 0 : options.signoffUser) { - message = (0, signoff_commit_message_1.signoffCommitMessage)(message, options.signoffUser); - } - const pullRequestLabels = (options === null || options === void 0 ? void 0 : options.skipLabeling) - ? [] - : releasePullRequest.labels; - return await this.createPullRequest({ - headBranchName: releasePullRequest.headRefName, - baseBranchName: targetBranch, - number: -1, - title: releasePullRequest.title.toString(), - body: releasePullRequest.body.toString().slice(0, MAX_ISSUE_BODY_SIZE), - labels: pullRequestLabels, - files: [], - }, targetBranch, message, releasePullRequest.updates, { - fork: options === null || options === void 0 ? void 0 : options.fork, - draft: releasePullRequest.draft, - }); - } - /** - * Given a set of proposed updates, build a changeset to suggest. - * - * @param {Update[]} updates The proposed updates - * @param {string} defaultBranch The target branch - * @return {Changes} The changeset to suggest. - * @throws {GitHubAPIError} on an API error - */ - async buildChangeSet(updates, defaultBranch) { - const changes = new Map(); - for (const update of updates) { - let content; - try { - content = await this.getFileContentsOnBranch(update.path, defaultBranch); - } - catch (err) { - if (!(err instanceof errors_1.FileNotFoundError)) - throw err; - // if the file is missing and create = false, just continue - // to the next update, otherwise create the file. - if (!update.createIfMissing) { - this.logger.warn(`file ${update.path} did not exist`); - continue; - } - } - const contentText = content - ? Buffer.from(content.content, 'base64').toString('utf8') - : undefined; - const updatedContent = update.updater.updateContent(contentText, this.logger); - if (updatedContent) { - changes.set(update.path, { - content: updatedContent, - originalContent: (content === null || content === void 0 ? void 0 : content.parsedContent) || null, - mode: (content === null || content === void 0 ? void 0 : content.mode) || git_file_utils_1.DEFAULT_FILE_MODE, - }); - } - } - return changes; - } - /** - * Returns a list of paths to all files with a given file - * extension. - * - * If a prefix is specified, only return paths that match - * the provided prefix. - * - * @param extension The file extension used to filter results. - * Example: `js`, `java` - * @param prefix Optional path prefix used to filter results - * @returns {string[]} List of file paths - * @throws {GitHubAPIError} on an API error - */ - async findFilesByExtension(extension, prefix) { - return this.findFilesByExtensionAndRef(extension, this.repository.defaultBranch, prefix); - } - /** - * Generate release notes from GitHub at tag - * @param {string} tagName Name of new release tag - * @param {string} targetCommitish Target commitish for new tag - * @param {string} previousTag Optional. Name of previous tag to analyze commits since - */ - async generateReleaseNotes(tagName, targetCommitish, previousTag) { - const resp = await this.octokit.repos.generateReleaseNotes({ - owner: this.repository.owner, - repo: this.repository.repo, - tag_name: tagName, - previous_tag_name: previousTag, - target_commitish: targetCommitish, - }); - return resp.data.body; - } - /** - * Create a single file on a new branch based on an existing - * branch. This will force-push to that branch. - * @param {string} filename Filename with path in the repository - * @param {string} contents Contents of the file - * @param {string} newBranchName Name of the new branch - * @param {string} baseBranchName Name of the base branch (where - * new branch is forked from) - * @returns {string} HTML URL of the new file - */ - async createFileOnNewBranch(filename, contents, newBranchName, baseBranchName) { - // create or update new branch to match base branch - await this.forkBranch(newBranchName, baseBranchName); - // use the single file upload API - const { data: { content }, } = await this.octokit.repos.createOrUpdateFileContents({ - owner: this.repository.owner, - repo: this.repository.repo, - path: filename, - // contents need to be base64 encoded - content: Buffer.from(contents, 'binary').toString('base64'), - message: 'Saving release notes', - branch: newBranchName, - }); - if (!(content === null || content === void 0 ? void 0 : content.html_url)) { - throw new Error(`Failed to write to file: ${filename} on branch: ${newBranchName}`); - } - return content.html_url; - } - /** - * Helper to fetch the SHA of a branch - * @param {string} branchName The name of the branch - * @return {string | undefined} Returns the SHA of the branch - * or undefined if it can't be found. - */ - async getBranchSha(branchName) { - this.logger.debug(`Looking up SHA for branch: ${branchName}`); - try { - const { data: { object: { sha }, }, } = await this.octokit.git.getRef({ - owner: this.repository.owner, - repo: this.repository.repo, - ref: `heads/${branchName}`, - }); - this.logger.debug(`SHA for branch: ${sha}`); - return sha; - } - catch (e) { - if (e instanceof request_error_1.RequestError && e.status === 404) { - this.logger.debug(`Branch: ${branchName} does not exist`); - return undefined; - } - throw e; - } - } - /** - * Helper to fork a branch from an existing branch. Uses `force` so - * it will overwrite the contents of `targetBranchName` to match - * the current contents of `baseBranchName`. - * - * @param {string} targetBranchName The name of the new forked branch - * @param {string} baseBranchName The base branch from which to fork. - * @returns {string} The branch SHA - * @throws {ConfigurationError} if the base branch cannot be found. - */ - async forkBranch(targetBranchName, baseBranchName) { - const baseBranchSha = await this.getBranchSha(baseBranchName); - if (!baseBranchSha) { - // this is highly unlikely to be thrown as we will have - // already attempted to read from the branch - throw new errors_1.ConfigurationError(`Unable to find base branch: ${baseBranchName}`, 'core', `${this.repository.owner}/${this.repository.repo}`); - } - // see if newBranchName exists - if (await this.getBranchSha(targetBranchName)) { - // branch already exists, update it to the match the base branch - const branchSha = await this.updateBranchSha(targetBranchName, baseBranchSha); - this.logger.debug(`Updated ${targetBranchName} to match ${baseBranchName} at ${branchSha}`); - return branchSha; - } - else { - // branch does not exist, create a new branch from the base branch - const branchSha = await this.createNewBranch(targetBranchName, baseBranchSha); - this.logger.debug(`Forked ${targetBranchName} from ${baseBranchName} at ${branchSha}`); - return branchSha; - } - } - /** - * Helper to create a new branch from a given SHA. - * @param {string} branchName The new branch name - * @param {string} branchSha The SHA of the branch - * @returns {string} The SHA of the new branch - */ - async createNewBranch(branchName, branchSha) { - this.logger.debug(`Creating new branch: ${branchName} at ${branchSha}`); - const { data: { object: { sha }, }, } = await this.octokit.git.createRef({ - owner: this.repository.owner, - repo: this.repository.repo, - ref: `refs/heads/${branchName}`, - sha: branchSha, - }); - this.logger.debug(`New branch: ${branchName} at ${sha}`); - return sha; - } - async updateBranchSha(branchName, branchSha) { - this.logger.debug(`Updating branch ${branchName} to ${branchSha}`); - const { data: { object: { sha }, }, } = await this.octokit.git.updateRef({ - owner: this.repository.owner, - repo: this.repository.repo, - ref: `heads/${branchName}`, - sha: branchSha, - force: true, - }); - this.logger.debug(`Updated branch: ${branchName} to ${sha}`); - return sha; - } -} -exports.GitHub = GitHub; -/** - * Normalize a provided prefix by removing leading and trailing - * slashes. - * - * @param prefix String to normalize - */ -function normalizePrefix(prefix) { - const normalized = prefix.replace(/^[/\\]/, '').replace(/[/\\]$/, ''); - if (normalized === manifest_1.ROOT_PROJECT_PATH) { - return ''; - } - return normalized; -} -/** - * Wrap an async method with error handling - * - * @param fn Async function that can throw Errors - * @param errorHandler An optional error handler for rethrowing custom exceptions - */ -/* eslint-disable @typescript-eslint/no-explicit-any */ -const wrapAsync = (fn, errorHandler) => { - return async (...args) => { - try { - return await fn(...args); - } - catch (e) { - if (errorHandler) { - errorHandler(e); - } - if (e instanceof request_error_1.RequestError) { - throw new errors_1.GitHubAPIError(e); - } - throw e; - } - }; -}; -const sleepInMs = (ms) => new Promise(resolve => setTimeout(resolve, ms)); -exports.sleepInMs = sleepInMs; -//# sourceMappingURL=github.js.map - -/***/ }), - -/***/ 24363: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.VERSION = exports.manifestSchema = exports.configSchema = exports.GitHub = exports.setLogger = exports.registerVersioningStrategy = exports.getVersioningStrategyTypes = exports.registerPlugin = exports.getPluginTypes = exports.registerChangelogNotes = exports.getChangelogTypes = exports.registerReleaseType = exports.getReleaserTypes = exports.Manifest = exports.Errors = void 0; -exports.Errors = __nccwpck_require__(93637); -var manifest_1 = __nccwpck_require__(31999); -Object.defineProperty(exports, "Manifest", ({ enumerable: true, get: function () { return manifest_1.Manifest; } })); -var factory_1 = __nccwpck_require__(75695); -Object.defineProperty(exports, "getReleaserTypes", ({ enumerable: true, get: function () { return factory_1.getReleaserTypes; } })); -Object.defineProperty(exports, "registerReleaseType", ({ enumerable: true, get: function () { return factory_1.registerReleaseType; } })); -var changelog_notes_factory_1 = __nccwpck_require__(3095); -Object.defineProperty(exports, "getChangelogTypes", ({ enumerable: true, get: function () { return changelog_notes_factory_1.getChangelogTypes; } })); -Object.defineProperty(exports, "registerChangelogNotes", ({ enumerable: true, get: function () { return changelog_notes_factory_1.registerChangelogNotes; } })); -var plugin_factory_1 = __nccwpck_require__(56259); -Object.defineProperty(exports, "getPluginTypes", ({ enumerable: true, get: function () { return plugin_factory_1.getPluginTypes; } })); -Object.defineProperty(exports, "registerPlugin", ({ enumerable: true, get: function () { return plugin_factory_1.registerPlugin; } })); -var versioning_strategy_factory_1 = __nccwpck_require__(11833); -Object.defineProperty(exports, "getVersioningStrategyTypes", ({ enumerable: true, get: function () { return versioning_strategy_factory_1.getVersioningStrategyTypes; } })); -Object.defineProperty(exports, "registerVersioningStrategy", ({ enumerable: true, get: function () { return versioning_strategy_factory_1.registerVersioningStrategy; } })); -var logger_1 = __nccwpck_require__(68809); -Object.defineProperty(exports, "setLogger", ({ enumerable: true, get: function () { return logger_1.setLogger; } })); -var github_1 = __nccwpck_require__(19746); -Object.defineProperty(exports, "GitHub", ({ enumerable: true, get: function () { return github_1.GitHub; } })); -exports.configSchema = __nccwpck_require__(38623); -exports.manifestSchema = __nccwpck_require__(45314); -// x-release-please-start-version -exports.VERSION = '16.14.1'; -// x-release-please-end -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 31999: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Manifest = exports.MANIFEST_PULL_REQUEST_TITLE_PATTERN = exports.SNOOZE_LABEL = exports.DEFAULT_SNAPSHOT_LABELS = exports.DEFAULT_RELEASE_LABELS = exports.DEFAULT_LABELS = exports.DEFAULT_COMPONENT_NAME = exports.ROOT_PROJECT_PATH = exports.DEFAULT_RELEASE_PLEASE_MANIFEST = exports.DEFAULT_RELEASE_PLEASE_CONFIG = void 0; -const version_1 = __nccwpck_require__(17348); -const commit_1 = __nccwpck_require__(69158); -const logger_1 = __nccwpck_require__(68809); -const commit_split_1 = __nccwpck_require__(6941); -const tag_name_1 = __nccwpck_require__(36503); -const branch_name_1 = __nccwpck_require__(16344); -const pull_request_title_1 = __nccwpck_require__(1158); -const factory_1 = __nccwpck_require__(75695); -const merge_1 = __nccwpck_require__(90514); -const release_please_manifest_1 = __nccwpck_require__(9817); -const errors_1 = __nccwpck_require__(93637); -const pull_request_overflow_handler_1 = __nccwpck_require__(93937); -const signoff_commit_message_1 = __nccwpck_require__(2686); -const commit_exclude_1 = __nccwpck_require__(14702); -exports.DEFAULT_RELEASE_PLEASE_CONFIG = 'release-please-config.json'; -exports.DEFAULT_RELEASE_PLEASE_MANIFEST = '.release-please-manifest.json'; -exports.ROOT_PROJECT_PATH = '.'; -exports.DEFAULT_COMPONENT_NAME = ''; -exports.DEFAULT_LABELS = ['autorelease: pending']; -exports.DEFAULT_RELEASE_LABELS = ['autorelease: tagged']; -exports.DEFAULT_SNAPSHOT_LABELS = ['autorelease: snapshot']; -exports.SNOOZE_LABEL = 'autorelease: snooze'; -const DEFAULT_RELEASE_SEARCH_DEPTH = 400; -const DEFAULT_COMMIT_SEARCH_DEPTH = 500; -exports.MANIFEST_PULL_REQUEST_TITLE_PATTERN = 'chore: release ${branch}'; -class Manifest { - /** - * Create a Manifest from explicit config in code. This assumes that the - * repository has a single component at the root path. - * - * @param {GitHub} github GitHub client - * @param {string} targetBranch The releaseable base branch - * @param {RepositoryConfig} repositoryConfig Parsed configuration of path => release configuration - * @param {ReleasedVersions} releasedVersions Parsed versions of path => latest release version - * @param {ManifestOptions} manifestOptions Optional. Manifest options - * @param {string} manifestOptions.bootstrapSha If provided, use this SHA - * as the point to consider commits after - * @param {boolean} manifestOptions.alwaysLinkLocal Option for the node-workspace - * plugin - * @param {boolean} manifestOptions.updatePeerDependencies Option for the node-workspace - * plugin - * @param {boolean} manifestOptions.separatePullRequests If true, create separate pull - * requests instead of a single manifest release pull request - * @param {PluginType[]} manifestOptions.plugins Any plugins to use for this repository - * @param {boolean} manifestOptions.fork If true, create pull requests from a fork. Defaults - * to `false` - * @param {string} manifestOptions.signoff Add a Signed-off-by annotation to the commit - * @param {string} manifestOptions.manifestPath Path to the versions manifest - * @param {string[]} manifestOptions.labels Labels that denote a pending, untagged release - * pull request. Defaults to `[autorelease: pending]` - * @param {string[]} manifestOptions.releaseLabels Labels to apply to a tagged release - * pull request. Defaults to `[autorelease: tagged]` - */ - constructor(github, targetBranch, repositoryConfig, releasedVersions, manifestOptions) { - var _a, _b; - this.repository = github.repository; - this.github = github; - this.targetBranch = targetBranch; - this.repositoryConfig = repositoryConfig; - this.releasedVersions = releasedVersions; - this.manifestPath = - (manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.manifestPath) || exports.DEFAULT_RELEASE_PLEASE_MANIFEST; - this.separatePullRequests = - (_a = manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.separatePullRequests) !== null && _a !== void 0 ? _a : Object.keys(repositoryConfig).length === 1; - this.fork = (manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.fork) || false; - this.signoffUser = manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.signoff; - this.releaseLabels = - (manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.releaseLabels) || exports.DEFAULT_RELEASE_LABELS; - this.labels = (manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.labels) || exports.DEFAULT_LABELS; - this.skipLabeling = (manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.skipLabeling) || false; - this.sequentialCalls = (manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.sequentialCalls) || false; - this.snapshotLabels = - (manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.snapshotLabels) || exports.DEFAULT_SNAPSHOT_LABELS; - this.bootstrapSha = manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.bootstrapSha; - this.lastReleaseSha = manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.lastReleaseSha; - this.draft = manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.draft; - this.draftPullRequest = manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.draftPullRequest; - this.groupPullRequestTitlePattern = - manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.groupPullRequestTitlePattern; - this.releaseSearchDepth = - (manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.releaseSearchDepth) || DEFAULT_RELEASE_SEARCH_DEPTH; - this.commitSearchDepth = - (manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.commitSearchDepth) || DEFAULT_COMMIT_SEARCH_DEPTH; - this.logger = (_b = manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.logger) !== null && _b !== void 0 ? _b : logger_1.logger; - this.plugins = ((manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.plugins) || []).map(pluginType => (0, factory_1.buildPlugin)({ - type: pluginType, - github: this.github, - targetBranch: this.targetBranch, - repositoryConfig: this.repositoryConfig, - manifestPath: this.manifestPath, - })); - this.pullRequestOverflowHandler = new pull_request_overflow_handler_1.FilePullRequestOverflowHandler(this.github, this.logger); - } - /** - * Create a Manifest from config files in the repository. - * - * @param {GitHub} github GitHub client - * @param {string} targetBranch The releaseable base branch - * @param {string} configFile Optional. The path to the manifest config file - * @param {string} manifestFile Optional. The path to the manifest versions file - * @param {string} path The single path to check. Optional - * @returns {Manifest} - */ - static async fromManifest(github, targetBranch, configFile = exports.DEFAULT_RELEASE_PLEASE_CONFIG, manifestFile = exports.DEFAULT_RELEASE_PLEASE_MANIFEST, manifestOptionOverrides = {}, path, releaseAs) { - const [{ config: repositoryConfig, options: manifestOptions }, releasedVersions,] = await Promise.all([ - parseConfig(github, configFile, targetBranch, path, releaseAs), - parseReleasedVersions(github, manifestFile, targetBranch), - ]); - return new Manifest(github, targetBranch, repositoryConfig, releasedVersions, { - manifestPath: manifestFile, - ...manifestOptions, - ...manifestOptionOverrides, - }); - } - /** - * Create a Manifest from explicit config in code. This assumes that the - * repository has a single component at the root path. - * - * @param {GitHub} github GitHub client - * @param {string} targetBranch The releaseable base branch - * @param {ReleaserConfig} config Release strategy options - * @param {ManifestOptions} manifestOptions Optional. Manifest options - * @param {string} manifestOptions.bootstrapSha If provided, use this SHA - * as the point to consider commits after - * @param {boolean} manifestOptions.alwaysLinkLocal Option for the node-workspace - * plugin - * @param {boolean} manifestOptions.updatePeerDependencies Option for the node-workspace - * plugin - * @param {boolean} manifestOptions.separatePullRequests If true, create separate pull - * requests instead of a single manifest release pull request - * @param {PluginType[]} manifestOptions.plugins Any plugins to use for this repository - * @param {boolean} manifestOptions.fork If true, create pull requests from a fork. Defaults - * to `false` - * @param {string} manifestOptions.signoff Add a Signed-off-by annotation to the commit - * @param {string} manifestOptions.manifestPath Path to the versions manifest - * @param {string[]} manifestOptions.labels Labels that denote a pending, untagged release - * pull request. Defaults to `[autorelease: pending]` - * @param {string[]} manifestOptions.releaseLabels Labels to apply to a tagged release - * pull request. Defaults to `[autorelease: tagged]` - * @returns {Manifest} - */ - static async fromConfig(github, targetBranch, config, manifestOptions, path = exports.ROOT_PROJECT_PATH) { - const repositoryConfig = {}; - repositoryConfig[path] = config; - const strategy = await (0, factory_1.buildStrategy)({ - github, - ...config, - }); - const component = await strategy.getBranchComponent(); - const releasedVersions = {}; - const latestVersion = await latestReleaseVersion(github, targetBranch, version => isPublishedVersion(strategy, version), config, component, manifestOptions === null || manifestOptions === void 0 ? void 0 : manifestOptions.logger); - if (latestVersion) { - releasedVersions[path] = latestVersion; - } - return new Manifest(github, targetBranch, repositoryConfig, releasedVersions, { - separatePullRequests: true, - ...manifestOptions, - }); - } - /** - * Build all candidate pull requests for this repository. - * - * Iterates through each path and builds a candidate pull request for component. - * Applies any configured plugins. - * - * @returns {ReleasePullRequest[]} The candidate pull requests to open or update. - */ - async buildPullRequests() { - var _a; - this.logger.info('Building pull requests'); - const pathsByComponent = await this.getPathsByComponent(); - const strategiesByPath = await this.getStrategiesByPath(); - // Collect all the SHAs of the latest release packages - this.logger.info('Collecting release commit SHAs'); - let releasesFound = 0; - const expectedReleases = Object.keys(strategiesByPath).length; - // SHAs by path - const releaseShasByPath = {}; - // Releases by path - const releasesByPath = {}; - this.logger.debug(`release search depth: ${this.releaseSearchDepth}`); - for await (const release of this.github.releaseIterator({ - maxResults: this.releaseSearchDepth, - })) { - const tagName = tag_name_1.TagName.parse(release.tagName); - if (!tagName) { - this.logger.warn(`Unable to parse release name: ${release.name}`); - continue; - } - const component = tagName.component || exports.DEFAULT_COMPONENT_NAME; - const path = pathsByComponent[component]; - if (!path) { - this.logger.warn(`Found release tag with component '${component}', but not configured in manifest`); - continue; - } - const expectedVersion = this.releasedVersions[path]; - if (!expectedVersion) { - this.logger.warn(`Unable to find expected version for path '${path}' in manifest`); - continue; - } - if (expectedVersion.toString() === tagName.version.toString()) { - this.logger.debug(`Found release for path ${path}, ${release.tagName}`); - releaseShasByPath[path] = release.sha; - releasesByPath[path] = { - name: release.name, - tag: tagName, - sha: release.sha, - notes: release.notes || '', - }; - releasesFound += 1; - } - if (releasesFound >= expectedReleases) { - break; - } - } - if (releasesFound < expectedReleases) { - this.logger.warn(`Expected ${expectedReleases} releases, only found ${releasesFound}`); - // Fall back to looking for missing releases using expected tags - const missingPaths = Object.keys(strategiesByPath).filter(path => !releasesByPath[path]); - this.logger.warn(`Missing ${missingPaths.length} paths: ${missingPaths}`); - const missingReleases = await this.backfillReleasesFromTags(missingPaths, strategiesByPath); - for (const path in missingReleases) { - releaseShasByPath[path] = missingReleases[path].sha; - releasesByPath[path] = missingReleases[path]; - releasesFound++; - } - } - const needsBootstrap = releasesFound < expectedReleases; - if (releasesFound < expectedReleases) { - this.logger.warn(`Expected ${expectedReleases} releases, only found ${releasesFound}`); - } - for (const path in releasesByPath) { - const release = releasesByPath[path]; - this.logger.debug(`release for path: ${path}, version: ${release.tag.version.toString()}, sha: ${release.sha}`); - } - // iterate through commits and collect commits until we have - // seen all release commits - this.logger.info('Collecting commits since all latest releases'); - const commits = []; - this.logger.debug(`commit search depth: ${this.commitSearchDepth}`); - const commitGenerator = this.github.mergeCommitIterator(this.targetBranch, { - maxResults: this.commitSearchDepth, - backfillFiles: true, - }); - const releaseShas = new Set(Object.values(releaseShasByPath)); - this.logger.debug(releaseShas); - const expectedShas = releaseShas.size; - // sha => release pull request - const releasePullRequestsBySha = {}; - let releaseCommitsFound = 0; - for await (const commit of commitGenerator) { - if (releaseShas.has(commit.sha)) { - if (commit.pullRequest) { - releasePullRequestsBySha[commit.sha] = commit.pullRequest; - } - else { - this.logger.warn(`Release SHA ${commit.sha} did not have an associated pull request`); - } - releaseCommitsFound += 1; - } - if (this.lastReleaseSha && this.lastReleaseSha === commit.sha) { - this.logger.info(`Using configured lastReleaseSha ${this.lastReleaseSha} as last commit.`); - break; - } - else if (needsBootstrap && commit.sha === this.bootstrapSha) { - this.logger.info(`Needed bootstrapping, found configured bootstrapSha ${this.bootstrapSha}`); - break; - } - else if (!needsBootstrap && releaseCommitsFound >= expectedShas) { - // found enough commits - break; - } - commits.push({ - sha: commit.sha, - message: commit.message, - files: commit.files, - pullRequest: commit.pullRequest, - }); - } - if (releaseCommitsFound < expectedShas) { - this.logger.warn(`Expected ${expectedShas} commits, only found ${releaseCommitsFound}`); - } - // split commits by path - this.logger.info(`Splitting ${commits.length} commits by path`); - const cs = new commit_split_1.CommitSplit({ - includeEmpty: true, - packagePaths: Object.keys(this.repositoryConfig), - }); - const splitCommits = cs.split(commits); - // limit paths to ones since the last release - let commitsPerPath = {}; - for (const path in this.repositoryConfig) { - commitsPerPath[path] = commitsAfterSha(path === exports.ROOT_PROJECT_PATH ? commits : splitCommits[path], releaseShasByPath[path]); - } - const commitExclude = new commit_exclude_1.CommitExclude(this.repositoryConfig); - commitsPerPath = commitExclude.excludeCommits(commitsPerPath); - // backfill latest release tags from manifest - for (const path in this.repositoryConfig) { - const latestRelease = releasesByPath[path]; - if (!latestRelease && - this.releasedVersions[path] && - this.releasedVersions[path].toString() !== '0.0.0') { - const version = this.releasedVersions[path]; - const strategy = strategiesByPath[path]; - const component = await strategy.getComponent(); - this.logger.info(`No latest release found for path: ${path}, component: ${component}, but a previous version (${version.toString()}) was specified in the manifest.`); - releasesByPath[path] = { - tag: new tag_name_1.TagName(version, component, this.repositoryConfig[path].tagSeparator, this.repositoryConfig[path].includeVInTag), - sha: '', - notes: '', - }; - } - } - let strategies = strategiesByPath; - for (const plugin of this.plugins) { - strategies = await plugin.preconfigure(strategies, commitsPerPath, releasesByPath); - } - let newReleasePullRequests = []; - for (const path in this.repositoryConfig) { - const config = this.repositoryConfig[path]; - this.logger.info(`Building candidate release pull request for path: ${path}`); - this.logger.debug(`type: ${config.releaseType}`); - this.logger.debug(`targetBranch: ${this.targetBranch}`); - let pathCommits = (0, commit_1.parseConventionalCommits)(commitsPerPath[path], this.logger); - // The processCommits hook can be implemented by plugins to - // post-process commits. This can be used to perform cleanup, e.g,, sentence - // casing all commit messages: - for (const plugin of this.plugins) { - pathCommits = plugin.processCommits(pathCommits); - } - this.logger.debug(`commits: ${pathCommits.length}`); - const latestReleasePullRequest = releasePullRequestsBySha[releaseShasByPath[path]]; - if (!latestReleasePullRequest) { - this.logger.warn('No latest release pull request found.'); - } - const strategy = strategies[path]; - const latestRelease = releasesByPath[path]; - const releasePullRequest = await strategy.buildReleasePullRequest(pathCommits, latestRelease, (_a = config.draftPullRequest) !== null && _a !== void 0 ? _a : this.draftPullRequest, this.labels); - if (releasePullRequest) { - // Update manifest, but only for valid release version - this will skip SNAPSHOT from java strategy - if (releasePullRequest.version && - isPublishedVersion(strategy, releasePullRequest.version)) { - const versionsMap = new Map(); - versionsMap.set(path, releasePullRequest.version); - releasePullRequest.updates.push({ - path: this.manifestPath, - createIfMissing: false, - updater: new release_please_manifest_1.ReleasePleaseManifest({ - version: releasePullRequest.version, - versionsMap, - }), - }); - } - newReleasePullRequests.push({ - path, - config, - pullRequest: releasePullRequest, - }); - } - } - // Combine pull requests into 1 unless configured for separate - // pull requests - if (!this.separatePullRequests) { - const mergeOptions = { - pullRequestTitlePattern: this.groupPullRequestTitlePattern, - }; - // Find the first repositoryConfig item that has a set value - // for the options that can be passed to the merge plugin - for (const path in this.repositoryConfig) { - const config = this.repositoryConfig[path]; - if ('pullRequestHeader' in config && - !('pullRequestHeader' in mergeOptions)) { - mergeOptions.pullRequestHeader = config.pullRequestHeader; - } - if ('pullRequestFooter' in config && - !('pullRequestFooter' in mergeOptions)) { - mergeOptions.pullRequestFooter = config.pullRequestFooter; - } - if ('componentNoSpace' in config && - !('componentNoSpace' in mergeOptions)) { - mergeOptions.componentNoSpace = config.componentNoSpace; - } - } - this.plugins.push(new merge_1.Merge(this.github, this.targetBranch, this.repositoryConfig, mergeOptions)); - } - for (const plugin of this.plugins) { - this.logger.debug(`running plugin: ${plugin.constructor.name}`); - newReleasePullRequests = await plugin.run(newReleasePullRequests); - } - return newReleasePullRequests.map(pullRequestWithConfig => pullRequestWithConfig.pullRequest); - } - async backfillReleasesFromTags(missingPaths, strategiesByPath) { - const releasesByPath = {}; - const allTags = await this.getAllTags(); - for (const path of missingPaths) { - const expectedVersion = this.releasedVersions[path]; - if (!expectedVersion) { - this.logger.warn(`No version for path ${path}`); - continue; - } - const component = await strategiesByPath[path].getComponent(); - const expectedTag = new tag_name_1.TagName(expectedVersion, component, this.repositoryConfig[path].tagSeparator, this.repositoryConfig[path].includeVInTag); - this.logger.debug(`looking for tagName: ${expectedTag.toString()}`); - const foundTag = allTags[expectedTag.toString()]; - if (foundTag) { - this.logger.debug(`found: ${foundTag.name} ${foundTag.sha}`); - releasesByPath[path] = { - name: foundTag.name, - tag: expectedTag, - sha: foundTag.sha, - notes: '', - }; - } - else { - if (strategiesByPath[exports.ROOT_PROJECT_PATH] && - this.repositoryConfig[path].skipGithubRelease) { - this.logger.debug('could not find release, checking root package'); - const rootComponent = await strategiesByPath[exports.ROOT_PROJECT_PATH].getComponent(); - const rootTag = new tag_name_1.TagName(expectedVersion, rootComponent, this.repositoryConfig[exports.ROOT_PROJECT_PATH].tagSeparator, this.repositoryConfig[exports.ROOT_PROJECT_PATH].includeVInTag); - const foundTag = allTags[rootTag.toString()]; - if (foundTag) { - this.logger.debug(`found rootTag: ${foundTag.name} ${foundTag.sha}`); - releasesByPath[path] = { - name: foundTag.name, - tag: rootTag, - sha: foundTag.sha, - notes: '', - }; - } - } - } - } - return releasesByPath; - } - async getAllTags() { - const allTags = {}; - for await (const tag of this.github.tagIterator()) { - allTags[tag.name] = tag; - } - return allTags; - } - /** - * Opens/updates all candidate release pull requests for this repository. - * - * @returns {PullRequest[]} Pull request numbers of release pull requests - */ - async createPullRequests() { - const candidatePullRequests = await this.buildPullRequests(); - if (candidatePullRequests.length === 0) { - return []; - } - // if there are any merged, pending release pull requests, don't open - // any new release PRs - const mergedPullRequestsGenerator = this.findMergedReleasePullRequests(); - for await (const _ of mergedPullRequestsGenerator) { - this.logger.warn('There are untagged, merged release PRs outstanding - aborting'); - return []; - } - // collect open and snoozed release pull requests - const openPullRequests = await this.findOpenReleasePullRequests(); - const snoozedPullRequests = await this.findSnoozedReleasePullRequests(); - if (this.sequentialCalls) { - const pullRequests = []; - for (const pullRequest of candidatePullRequests) { - const resultPullRequest = await this.createOrUpdatePullRequest(pullRequest, openPullRequests, snoozedPullRequests); - if (resultPullRequest) - pullRequests.push(resultPullRequest); - } - return pullRequests; - } - else { - const promises = []; - for (const pullRequest of candidatePullRequests) { - promises.push(this.createOrUpdatePullRequest(pullRequest, openPullRequests, snoozedPullRequests)); - } - const pullNumbers = await Promise.all(promises); - // reject any pull numbers that were not created or updated - return pullNumbers.filter(number => !!number); - } - } - async findOpenReleasePullRequests() { - this.logger.info('Looking for open release pull requests'); - const openPullRequests = []; - const generator = this.github.pullRequestIterator(this.targetBranch, 'OPEN', Number.MAX_SAFE_INTEGER, false); - for await (const openPullRequest of generator) { - if (hasAllLabels(this.labels, openPullRequest.labels) || - hasAllLabels(this.snapshotLabels, openPullRequest.labels)) { - const body = await this.pullRequestOverflowHandler.parseOverflow(openPullRequest); - if (body) { - // maybe replace with overflow body - openPullRequests.push({ - ...openPullRequest, - body: body.toString(), - }); - } - } - } - this.logger.info(`found ${openPullRequests.length} open release pull requests.`); - return openPullRequests; - } - async findSnoozedReleasePullRequests() { - this.logger.info('Looking for snoozed release pull requests'); - const snoozedPullRequests = []; - const closedGenerator = this.github.pullRequestIterator(this.targetBranch, 'CLOSED', 200, false); - for await (const closedPullRequest of closedGenerator) { - if (hasAllLabels([exports.SNOOZE_LABEL], closedPullRequest.labels) && - branch_name_1.BranchName.parse(closedPullRequest.headBranchName, this.logger)) { - const body = await this.pullRequestOverflowHandler.parseOverflow(closedPullRequest); - if (body) { - // maybe replace with overflow body - snoozedPullRequests.push({ - ...closedPullRequest, - body: body.toString(), - }); - } - } - } - this.logger.info(`found ${snoozedPullRequests.length} snoozed release pull requests.`); - return snoozedPullRequests; - } - async createOrUpdatePullRequest(pullRequest, openPullRequests, snoozedPullRequests) { - // look for existing, open pull request - const existing = openPullRequests.find(openPullRequest => openPullRequest.headBranchName === pullRequest.headRefName); - if (existing) { - return await this.maybeUpdateExistingPullRequest(existing, pullRequest); - } - // look for closed, snoozed pull request - const snoozed = snoozedPullRequests.find(openPullRequest => openPullRequest.headBranchName === pullRequest.headRefName); - if (snoozed) { - return await this.maybeUpdateSnoozedPullRequest(snoozed, pullRequest); - } - const body = await this.pullRequestOverflowHandler.handleOverflow(pullRequest); - const message = this.signoffUser - ? (0, signoff_commit_message_1.signoffCommitMessage)(pullRequest.title.toString(), this.signoffUser) - : pullRequest.title.toString(); - const newPullRequest = await this.github.createPullRequest({ - headBranchName: pullRequest.headRefName, - baseBranchName: this.targetBranch, - number: -1, - title: pullRequest.title.toString(), - body, - labels: this.skipLabeling ? [] : pullRequest.labels, - files: [], - }, this.targetBranch, message, pullRequest.updates, { - fork: this.fork, - draft: pullRequest.draft, - }); - return newPullRequest; - } - /// only update an existing pull request if it has release note changes - async maybeUpdateExistingPullRequest(existing, pullRequest) { - // If unchanged, no need to push updates - if (existing.body === pullRequest.body.toString()) { - this.logger.info(`PR https://github.com/${this.repository.owner}/${this.repository.repo}/pull/${existing.number} remained the same`); - return undefined; - } - const updatedPullRequest = await this.github.updatePullRequest(existing.number, pullRequest, this.targetBranch, { - fork: this.fork, - signoffUser: this.signoffUser, - pullRequestOverflowHandler: this.pullRequestOverflowHandler, - }); - return updatedPullRequest; - } - /// only update an snoozed pull request if it has release note changes - async maybeUpdateSnoozedPullRequest(snoozed, pullRequest) { - // If unchanged, no need to push updates - if (snoozed.body === pullRequest.body.toString()) { - this.logger.info(`PR https://github.com/${this.repository.owner}/${this.repository.repo}/pull/${snoozed.number} remained the same`); - return undefined; - } - const updatedPullRequest = await this.github.updatePullRequest(snoozed.number, pullRequest, this.targetBranch, { - fork: this.fork, - signoffUser: this.signoffUser, - pullRequestOverflowHandler: this.pullRequestOverflowHandler, - }); - // TODO: consider leaving the snooze label - await this.github.removeIssueLabels([exports.SNOOZE_LABEL], snoozed.number); - return updatedPullRequest; - } - async *findMergedReleasePullRequests() { - // Find merged release pull requests - const pullRequestGenerator = this.github.pullRequestIterator(this.targetBranch, 'MERGED', 200, false); - for await (const pullRequest of pullRequestGenerator) { - if (!hasAllLabels(this.labels, pullRequest.labels)) { - continue; - } - this.logger.debug(`Found pull request #${pullRequest.number}: '${pullRequest.title}'`); - // if the pull request body overflows, handle it - const pullRequestBody = await this.pullRequestOverflowHandler.parseOverflow(pullRequest); - if (!pullRequestBody) { - this.logger.debug('could not parse pull request body as a release PR'); - continue; - } - // replace with the complete fetched body - yield { - ...pullRequest, - body: pullRequestBody.toString(), - }; - } - } - /** - * Find merged, untagged releases and build candidate releases to tag. - * - * @returns {CandidateRelease[]} List of release candidates - */ - async buildReleases() { - var _a; - this.logger.info('Building releases'); - const strategiesByPath = await this.getStrategiesByPath(); - // Find merged release pull requests - const generator = await this.findMergedReleasePullRequests(); - const candidateReleases = []; - for await (const pullRequest of generator) { - for (const path in this.repositoryConfig) { - const config = this.repositoryConfig[path]; - this.logger.info(`Building release for path: ${path}`); - this.logger.debug(`type: ${config.releaseType}`); - this.logger.debug(`targetBranch: ${this.targetBranch}`); - const strategy = strategiesByPath[path]; - const releases = await strategy.buildReleases(pullRequest, { - groupPullRequestTitlePattern: this.groupPullRequestTitlePattern, - }); - for (const release of releases) { - candidateReleases.push({ - ...release, - path, - pullRequest, - draft: (_a = config.draft) !== null && _a !== void 0 ? _a : this.draft, - prerelease: config.prerelease && - (!!release.tag.version.preRelease || - release.tag.version.major === 0), - }); - } - } - } - return candidateReleases; - } - /** - * Find merged, untagged releases. For each release, create a GitHub release, - * comment on the pull request used to generated it and update the pull request - * labels. - * - * @returns {GitHubRelease[]} List of created GitHub releases - */ - async createReleases() { - const releasesByPullRequest = {}; - const pullRequestsByNumber = {}; - for (const release of await this.buildReleases()) { - pullRequestsByNumber[release.pullRequest.number] = release.pullRequest; - if (releasesByPullRequest[release.pullRequest.number]) { - releasesByPullRequest[release.pullRequest.number].push(release); - } - else { - releasesByPullRequest[release.pullRequest.number] = [release]; - } - } - if (this.sequentialCalls) { - const resultReleases = []; - for (const pullNumber in releasesByPullRequest) { - const releases = await this.createReleasesForPullRequest(releasesByPullRequest[pullNumber], pullRequestsByNumber[pullNumber]); - resultReleases.push(...releases); - } - return resultReleases; - } - else { - const promises = []; - for (const pullNumber in releasesByPullRequest) { - promises.push(this.createReleasesForPullRequest(releasesByPullRequest[pullNumber], pullRequestsByNumber[pullNumber])); - } - const releases = await Promise.all(promises); - return releases.reduce((collection, r) => collection.concat(r), []); - } - } - async createReleasesForPullRequest(releases, pullRequest) { - this.logger.info(`Creating ${releases.length} releases for pull #${pullRequest.number}`); - const duplicateReleases = []; - const githubReleases = []; - let error; - for (const release of releases) { - // stop releasing once we hit an error - if (error) - continue; - try { - githubReleases.push(await this.createRelease(release)); - } - catch (err) { - if (err instanceof errors_1.DuplicateReleaseError) { - this.logger.warn(`Duplicate release tag: ${release.tag.toString()}`); - duplicateReleases.push(err); - } - else { - error = err; - } - } - } - if (githubReleases.length > 0) { - // comment on pull request about the successful releases - const releaseList = githubReleases - .map(({ tagName, url }) => `- [${tagName}](${url})`) - .join('\n'); - const comment = `🤖 Created releases:\n\n${releaseList}\n\n:sunflower:`; - await this.github.commentOnIssue(comment, pullRequest.number); - } - if (error) { - throw error; - } - if (duplicateReleases.length > 0) { - if (duplicateReleases.length + githubReleases.length === - releases.length) { - // we've either tagged all releases or they were duplicates: - // adjust tags on pullRequest - await this.github.removeIssueLabels(this.labels, pullRequest.number); - await this.github.addIssueLabels(this.releaseLabels, pullRequest.number); - } - if (githubReleases.length === 0) { - // If all releases were duplicate, throw a duplicate error - throw duplicateReleases[0]; - } - } - else { - // adjust tags on pullRequest - await this.github.removeIssueLabels(this.labels, pullRequest.number); - await this.github.addIssueLabels(this.releaseLabels, pullRequest.number); - } - return githubReleases; - } - async createRelease(release) { - const githubRelease = await this.github.createRelease(release, { - draft: release.draft, - prerelease: release.prerelease, - }); - return { - ...githubRelease, - path: release.path, - version: release.tag.version.toString(), - major: release.tag.version.major, - minor: release.tag.version.minor, - patch: release.tag.version.patch, - }; - } - async getStrategiesByPath() { - if (!this._strategiesByPath) { - this.logger.info('Building strategies by path'); - this._strategiesByPath = {}; - for (const path in this.repositoryConfig) { - const config = this.repositoryConfig[path]; - this.logger.debug(`${path}: ${config.releaseType}`); - const strategy = await (0, factory_1.buildStrategy)({ - ...config, - github: this.github, - path, - targetBranch: this.targetBranch, - }); - this._strategiesByPath[path] = strategy; - } - } - return this._strategiesByPath; - } - async getPathsByComponent() { - if (!this._pathsByComponent) { - this._pathsByComponent = {}; - const strategiesByPath = await this.getStrategiesByPath(); - for (const path in this.repositoryConfig) { - const strategy = strategiesByPath[path]; - const component = (await strategy.getComponent()) || ''; - if (this._pathsByComponent[component]) { - this.logger.warn(`Multiple paths for ${component}: ${this._pathsByComponent[component]}, ${path}`); - } - this._pathsByComponent[component] = path; - } - } - return this._pathsByComponent; - } -} -exports.Manifest = Manifest; -/** - * Helper to convert parsed JSON releaser config into ReleaserConfig for - * the Manifest. - * - * @param {ReleaserPackageConfig} config Parsed configuration from JSON file. - * @returns {ReleaserConfig} - */ -function extractReleaserConfig(config) { - var _a, _b, _c; - return { - releaseType: config['release-type'], - bumpMinorPreMajor: config['bump-minor-pre-major'], - bumpPatchForMinorPreMajor: config['bump-patch-for-minor-pre-major'], - prereleaseType: config['prerelease-type'], - versioning: config['versioning'], - changelogSections: config['changelog-sections'], - changelogPath: config['changelog-path'], - changelogHost: config['changelog-host'], - releaseAs: config['release-as'], - skipGithubRelease: config['skip-github-release'], - draft: config.draft, - prerelease: config.prerelease, - draftPullRequest: config['draft-pull-request'], - component: config['component'], - packageName: config['package-name'], - versionFile: config['version-file'], - extraFiles: config['extra-files'], - includeComponentInTag: config['include-component-in-tag'], - includeVInTag: config['include-v-in-tag'], - changelogType: config['changelog-type'], - pullRequestTitlePattern: config['pull-request-title-pattern'], - pullRequestHeader: config['pull-request-header'], - pullRequestFooter: config['pull-request-footer'], - componentNoSpace: config['component-no-space'], - tagSeparator: config['tag-separator'], - separatePullRequests: config['separate-pull-requests'], - labels: (_a = config['label']) === null || _a === void 0 ? void 0 : _a.split(','), - releaseLabels: (_b = config['release-label']) === null || _b === void 0 ? void 0 : _b.split(','), - extraLabels: (_c = config['extra-label']) === null || _c === void 0 ? void 0 : _c.split(','), - skipSnapshot: config['skip-snapshot'], - initialVersion: config['initial-version'], - excludePaths: config['exclude-paths'], - }; -} -/** - * Helper to convert fetch the manifest config from the repository and - * parse into configuration for the Manifest. - * - * @param {GitHub} github GitHub client - * @param {string} configFile Path in the repository to the manifest config - * @param {string} branch Branch to fetch the config file from - * @param {string} onlyPath Optional. Use only the given package - * @param {string} releaseAs Optional. Override release-as and use the given version - */ -async function parseConfig(github, configFile, branch, onlyPath, releaseAs) { - const config = await fetchManifestConfig(github, configFile, branch); - const defaultConfig = extractReleaserConfig(config); - const repositoryConfig = {}; - for (const path in config.packages) { - if (onlyPath && onlyPath !== path) - continue; - repositoryConfig[path] = mergeReleaserConfig(defaultConfig, extractReleaserConfig(config.packages[path])); - if (releaseAs) { - repositoryConfig[path].releaseAs = releaseAs; - } - } - const configLabel = config['label']; - const configReleaseLabel = config['release-label']; - const configSnapshotLabel = config['snapshot-label']; - const configExtraLabel = config['extra-label']; - const manifestOptions = { - bootstrapSha: config['bootstrap-sha'], - lastReleaseSha: config['last-release-sha'], - alwaysLinkLocal: config['always-link-local'], - separatePullRequests: config['separate-pull-requests'], - groupPullRequestTitlePattern: config['group-pull-request-title-pattern'], - plugins: config['plugins'], - signoff: config['signoff'], - labels: configLabel === null || configLabel === void 0 ? void 0 : configLabel.split(','), - releaseLabels: configReleaseLabel === null || configReleaseLabel === void 0 ? void 0 : configReleaseLabel.split(','), - snapshotLabels: configSnapshotLabel === null || configSnapshotLabel === void 0 ? void 0 : configSnapshotLabel.split(','), - extraLabels: configExtraLabel === null || configExtraLabel === void 0 ? void 0 : configExtraLabel.split(','), - releaseSearchDepth: config['release-search-depth'], - commitSearchDepth: config['commit-search-depth'], - sequentialCalls: config['sequential-calls'], - }; - return { config: repositoryConfig, options: manifestOptions }; -} -/** - * Helper to fetch manifest config - * - * @param {GitHub} github - * @param {string} configFile - * @param {string} branch - * @returns {ManifestConfig} - * @throws {ConfigurationError} if missing the manifest config file - */ -async function fetchManifestConfig(github, configFile, branch) { - try { - return await github.getFileJson(configFile, branch); - } - catch (e) { - if (e instanceof errors_1.FileNotFoundError) { - throw new errors_1.ConfigurationError(`Missing required manifest config: ${configFile}`, 'base', `${github.repository.owner}/${github.repository.repo}`); - } - else if (e instanceof SyntaxError) { - throw new errors_1.ConfigurationError(`Failed to parse manifest config JSON: ${configFile}\n${e.message}`, 'base', `${github.repository.owner}/${github.repository.repo}`); - } - throw e; - } -} -/** - * Helper to parse the manifest versions file. - * - * @param {GitHub} github GitHub client - * @param {string} manifestFile Path in the repository to the versions file - * @param {string} branch Branch to fetch the versions file from - * @returns {Record} - */ -async function parseReleasedVersions(github, manifestFile, branch) { - const manifestJson = await fetchReleasedVersions(github, manifestFile, branch); - const releasedVersions = {}; - for (const path in manifestJson) { - releasedVersions[path] = version_1.Version.parse(manifestJson[path]); - } - return releasedVersions; -} -/** - * Helper to fetch manifest config - * - * @param {GitHub} github - * @param {string} manifestFile - * @param {string} branch - * @throws {ConfigurationError} if missing the manifest config file - */ -async function fetchReleasedVersions(github, manifestFile, branch) { - try { - return await github.getFileJson(manifestFile, branch); - } - catch (e) { - if (e instanceof errors_1.FileNotFoundError) { - throw new errors_1.ConfigurationError(`Missing required manifest versions: ${manifestFile}`, 'base', `${github.repository.owner}/${github.repository.repo}`); - } - else if (e instanceof SyntaxError) { - throw new errors_1.ConfigurationError(`Failed to parse manifest versions JSON: ${manifestFile}\n${e.message}`, 'base', `${github.repository.owner}/${github.repository.repo}`); - } - throw e; - } -} -function isPublishedVersion(strategy, version) { - return strategy.isPublishedVersion - ? strategy.isPublishedVersion(version) - : true; -} -/** - * Find the most recent matching release tag on the branch we're - * configured for. - * - * @param github GitHub client instance. - * @param {string} targetBranch Name of the scanned branch. - * @param releaseFilter Validator function for release version. Used to filter-out SNAPSHOT releases for Java strategy. - * @param {string} prefix Limit the release to a specific component. - */ -async function latestReleaseVersion(github, targetBranch, releaseFilter, config, prefix, logger = logger_1.logger) { - const branchPrefix = prefix - ? prefix.endsWith('-') - ? prefix.replace(/-$/, '') - : prefix - : undefined; - logger.info(`Looking for latest release on branch: ${targetBranch} with prefix: ${prefix}`); - // collect set of recent commit SHAs seen to verify that the release - // is in the current branch - const commitShas = new Set(); - const candidateReleaseVersions = []; - // only look at the last 250 or so commits to find the latest tag - we - // don't want to scan the entire repository history if this repo has never - // been released - const generator = github.mergeCommitIterator(targetBranch, { - maxResults: 250, - }); - for await (const commitWithPullRequest of generator) { - commitShas.add(commitWithPullRequest.sha); - const mergedPullRequest = commitWithPullRequest.pullRequest; - if (!(mergedPullRequest === null || mergedPullRequest === void 0 ? void 0 : mergedPullRequest.mergeCommitOid)) { - logger.trace(`skipping commit: ${commitWithPullRequest.sha} missing merged pull request`); - continue; - } - const branchName = branch_name_1.BranchName.parse(mergedPullRequest.headBranchName, logger); - if (!branchName) { - logger.trace(`skipping commit: ${commitWithPullRequest.sha} unrecognized branch name: ${mergedPullRequest.headBranchName}`); - continue; - } - // If branchPrefix is specified, ensure it is found in the branch name. - // If branchPrefix is not specified, component should also be undefined. - if (branchName.getComponent() !== branchPrefix) { - logger.trace(`skipping commit: ${commitWithPullRequest.sha} branch component ${branchName.getComponent()} doesn't match expected prefix: ${branchPrefix}`); - continue; - } - const pullRequestTitle = pull_request_title_1.PullRequestTitle.parse(mergedPullRequest.title, config.pullRequestTitlePattern, config.componentNoSpace, logger); - if (!pullRequestTitle) { - logger.trace(`skipping commit: ${commitWithPullRequest.sha} couldn't parse pull request title: ${mergedPullRequest.title}`); - continue; - } - const version = pullRequestTitle.getVersion(); - if (version && releaseFilter(version)) { - logger.debug(`Found latest release pull request: ${mergedPullRequest.number} version: ${version}`); - candidateReleaseVersions.push(version); - break; - } - } - // If not found from recent pull requests, look at releases. Iterate - // through releases finding valid tags, then cross reference - const releaseGenerator = github.releaseIterator(); - for await (const release of releaseGenerator) { - const tagName = tag_name_1.TagName.parse(release.tagName); - if (!tagName) { - continue; - } - if (tagMatchesConfig(tagName, branchPrefix, config.includeComponentInTag)) { - logger.debug(`found release for ${prefix}`, tagName.version); - if (!commitShas.has(release.sha)) { - logger.debug(`SHA not found in recent commits to branch ${targetBranch}, skipping`); - continue; - } - candidateReleaseVersions.push(tagName.version); - } - } - logger.debug(`found ${candidateReleaseVersions.length} possible releases.`, candidateReleaseVersions); - if (candidateReleaseVersions.length > 0) { - // Find largest release number (sort descending then return first) - return candidateReleaseVersions.sort((a, b) => b.compare(a))[0]; - } - // If not found from recent pull requests or releases, look at tags. Iterate - // through tags and cross reference against SHAs in this branch - const tagGenerator = github.tagIterator(); - const candidateTagVersion = []; - for await (const tag of tagGenerator) { - const tagName = tag_name_1.TagName.parse(tag.name); - if (!tagName) { - continue; - } - if (tagMatchesConfig(tagName, branchPrefix, config.includeComponentInTag)) { - if (!commitShas.has(tag.sha)) { - logger.debug(`SHA not found in recent commits to branch ${targetBranch}, skipping`); - continue; - } - candidateTagVersion.push(tagName.version); - } - } - logger.debug(`found ${candidateTagVersion.length} possible tags.`, candidateTagVersion); - // Find largest release number (sort descending then return first) - return candidateTagVersion.sort((a, b) => b.compare(a))[0]; -} -function mergeReleaserConfig(defaultConfig, pathConfig) { - var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _0, _1, _2, _3, _4, _5, _6; - return { - releaseType: (_b = (_a = pathConfig.releaseType) !== null && _a !== void 0 ? _a : defaultConfig.releaseType) !== null && _b !== void 0 ? _b : 'node', - bumpMinorPreMajor: (_c = pathConfig.bumpMinorPreMajor) !== null && _c !== void 0 ? _c : defaultConfig.bumpMinorPreMajor, - bumpPatchForMinorPreMajor: (_d = pathConfig.bumpPatchForMinorPreMajor) !== null && _d !== void 0 ? _d : defaultConfig.bumpPatchForMinorPreMajor, - prereleaseType: (_e = pathConfig.prereleaseType) !== null && _e !== void 0 ? _e : defaultConfig.prereleaseType, - versioning: (_f = pathConfig.versioning) !== null && _f !== void 0 ? _f : defaultConfig.versioning, - changelogSections: (_g = pathConfig.changelogSections) !== null && _g !== void 0 ? _g : defaultConfig.changelogSections, - changelogPath: (_h = pathConfig.changelogPath) !== null && _h !== void 0 ? _h : defaultConfig.changelogPath, - changelogHost: (_j = pathConfig.changelogHost) !== null && _j !== void 0 ? _j : defaultConfig.changelogHost, - changelogType: (_k = pathConfig.changelogType) !== null && _k !== void 0 ? _k : defaultConfig.changelogType, - releaseAs: (_l = pathConfig.releaseAs) !== null && _l !== void 0 ? _l : defaultConfig.releaseAs, - skipGithubRelease: (_m = pathConfig.skipGithubRelease) !== null && _m !== void 0 ? _m : defaultConfig.skipGithubRelease, - draft: (_o = pathConfig.draft) !== null && _o !== void 0 ? _o : defaultConfig.draft, - draftPullRequest: (_p = pathConfig.draftPullRequest) !== null && _p !== void 0 ? _p : defaultConfig.draftPullRequest, - prerelease: (_q = pathConfig.prerelease) !== null && _q !== void 0 ? _q : defaultConfig.prerelease, - component: (_r = pathConfig.component) !== null && _r !== void 0 ? _r : defaultConfig.component, - packageName: (_s = pathConfig.packageName) !== null && _s !== void 0 ? _s : defaultConfig.packageName, - versionFile: (_t = pathConfig.versionFile) !== null && _t !== void 0 ? _t : defaultConfig.versionFile, - extraFiles: (_u = pathConfig.extraFiles) !== null && _u !== void 0 ? _u : defaultConfig.extraFiles, - includeComponentInTag: (_v = pathConfig.includeComponentInTag) !== null && _v !== void 0 ? _v : defaultConfig.includeComponentInTag, - includeVInTag: (_w = pathConfig.includeVInTag) !== null && _w !== void 0 ? _w : defaultConfig.includeVInTag, - tagSeparator: (_x = pathConfig.tagSeparator) !== null && _x !== void 0 ? _x : defaultConfig.tagSeparator, - pullRequestTitlePattern: (_y = pathConfig.pullRequestTitlePattern) !== null && _y !== void 0 ? _y : defaultConfig.pullRequestTitlePattern, - pullRequestHeader: (_z = pathConfig.pullRequestHeader) !== null && _z !== void 0 ? _z : defaultConfig.pullRequestHeader, - pullRequestFooter: (_0 = pathConfig.pullRequestFooter) !== null && _0 !== void 0 ? _0 : defaultConfig.pullRequestFooter, - componentNoSpace: (_1 = pathConfig.componentNoSpace) !== null && _1 !== void 0 ? _1 : defaultConfig.componentNoSpace, - separatePullRequests: (_2 = pathConfig.separatePullRequests) !== null && _2 !== void 0 ? _2 : defaultConfig.separatePullRequests, - skipSnapshot: (_3 = pathConfig.skipSnapshot) !== null && _3 !== void 0 ? _3 : defaultConfig.skipSnapshot, - initialVersion: (_4 = pathConfig.initialVersion) !== null && _4 !== void 0 ? _4 : defaultConfig.initialVersion, - extraLabels: (_5 = pathConfig.extraLabels) !== null && _5 !== void 0 ? _5 : defaultConfig.extraLabels, - excludePaths: (_6 = pathConfig.excludePaths) !== null && _6 !== void 0 ? _6 : defaultConfig.excludePaths, - }; -} -/** - * Helper to compare if a list of labels fully contains another list of labels - * @param {string[]} expected List of labels expected to be contained - * @param {string[]} existing List of existing labels to consider - */ -function hasAllLabels(expected, existing) { - const existingSet = new Set(existing); - for (const label of expected) { - if (!existingSet.has(label)) { - return false; - } - } - return true; -} -function commitsAfterSha(commits, lastReleaseSha) { - if (!commits) { - return []; - } - const index = commits.findIndex(commit => commit.sha === lastReleaseSha); - if (index === -1) { - return commits; - } - return commits.slice(0, index); -} -/** - * Returns true if the release tag matches the configured component. Returns - * true if `includeComponentInTag` is false and there is no component in the - * tag, OR if the tag's component matches the release component. - */ -function tagMatchesConfig(tag, branchComponent, includeComponentInTag) { - return ((includeComponentInTag && tag.component === branchComponent) || - (!includeComponentInTag && !tag.component)); -} -//# sourceMappingURL=manifest.js.map - -/***/ }), - -/***/ 31651: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ManifestPlugin = void 0; -const logger_1 = __nccwpck_require__(68809); -/** - * A plugin runs after a repository manifest has built candidate - * pull requests and can make updates that span across multiple - * components. A plugin *might* choose to merge pull requests or add - * or update existing files. - */ -class ManifestPlugin { - constructor(github, targetBranch, repositoryConfig, logger = logger_1.logger) { - this.github = github; - this.targetBranch = targetBranch; - this.repositoryConfig = repositoryConfig; - this.logger = logger; - } - /** - * Perform post-processing on commits, e.g, sentence casing them. - * @param {Commit[]} commits The set of commits that will feed into release pull request. - * @returns {Commit[]} The modified commit objects. - */ - processCommits(commits) { - return commits; - } - /** - * Post-process candidate pull requests. - * @param {CandidateReleasePullRequest[]} pullRequests Candidate pull requests - * @returns {CandidateReleasePullRequest[]} Updated pull requests - */ - async run(pullRequests) { - return pullRequests; - } - /** - * Pre-configure strategies. - * @param {Record} strategiesByPath Strategies indexed by path - * @returns {Record} Updated strategies indexed by path - */ - async preconfigure(strategiesByPath, _commitsByPath, _releasesByPath) { - return strategiesByPath; - } -} -exports.ManifestPlugin = ManifestPlugin; -//# sourceMappingURL=plugin.js.map - -/***/ }), - -/***/ 77430: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.CargoWorkspace = void 0; -const manifest_1 = __nccwpck_require__(31999); -const workspace_1 = __nccwpck_require__(44226); -const common_1 = __nccwpck_require__(11659); -const version_1 = __nccwpck_require__(17348); -const cargo_toml_1 = __nccwpck_require__(90420); -const raw_content_1 = __nccwpck_require__(62648); -const changelog_1 = __nccwpck_require__(3325); -const pull_request_title_1 = __nccwpck_require__(1158); -const pull_request_body_1 = __nccwpck_require__(70774); -const branch_name_1 = __nccwpck_require__(16344); -const versioning_strategy_1 = __nccwpck_require__(41941); -const cargo_lock_1 = __nccwpck_require__(68875); -const errors_1 = __nccwpck_require__(93637); -/** - * The plugin analyzed a cargo workspace and will bump dependencies - * of managed packages if those dependencies are being updated. - * - * If multiple rust packages are being updated, it will merge them - * into a single rust package. - */ -class CargoWorkspace extends workspace_1.WorkspacePlugin { - constructor() { - super(...arguments); - this.strategiesByPath = {}; - this.releasesByPath = {}; - } - async buildAllPackages(candidates) { - var _a, _b, _c, _d; - const cargoManifestContent = await this.github.getFileContentsOnBranch('Cargo.toml', this.targetBranch); - const cargoManifest = (0, common_1.parseCargoManifest)(cargoManifestContent.parsedContent); - if (!((_a = cargoManifest.workspace) === null || _a === void 0 ? void 0 : _a.members)) { - this.logger.warn("cargo-workspace plugin used, but top-level Cargo.toml isn't a cargo workspace"); - return { allPackages: [], candidatesByPackage: {} }; - } - const allCrates = []; - const candidatesByPackage = {}; - const members = (await Promise.all(cargoManifest.workspace.members.map(member => this.github.findFilesByGlobAndRef(member, this.targetBranch)))).flat(); - members.push(manifest_1.ROOT_PROJECT_PATH); - for (const path of members) { - const manifestPath = (0, workspace_1.addPath)(path, 'Cargo.toml'); - this.logger.info(`looking for candidate with path: ${path}`); - const candidate = candidates.find(c => c.path === path); - // get original content of the crate - const manifestContent = ((_b = candidate === null || candidate === void 0 ? void 0 : candidate.pullRequest.updates.find(update => update.path === manifestPath)) === null || _b === void 0 ? void 0 : _b.cachedFileContents) || - (await this.github.getFileContentsOnBranch(manifestPath, this.targetBranch)); - const manifest = (0, common_1.parseCargoManifest)(manifestContent.parsedContent); - const packageName = (_c = manifest.package) === null || _c === void 0 ? void 0 : _c.name; - if (!packageName) { - this.logger.warn(`package manifest at ${manifestPath} is missing [package.name]`); - continue; - } - if (candidate) { - candidatesByPackage[packageName] = candidate; - } - const version = (_d = manifest.package) === null || _d === void 0 ? void 0 : _d.version; - if (!version) { - throw new errors_1.ConfigurationError(`package manifest at ${manifestPath} is missing [package.version]`, 'cargo-workspace', `${this.github.repository.owner}/${this.github.repository.repo}`); - } - else if (typeof version !== 'string') { - throw new errors_1.ConfigurationError(`package manifest at ${manifestPath} has an invalid [package.version]`, 'cargo-workspace', `${this.github.repository.owner}/${this.github.repository.repo}`); - } - allCrates.push({ - path, - name: packageName, - version, - manifest, - manifestContent: manifestContent.parsedContent, - manifestPath, - }); - } - return { - allPackages: allCrates, - candidatesByPackage, - }; - } - bumpVersion(pkg) { - const version = version_1.Version.parse(pkg.version); - return new versioning_strategy_1.PatchVersionUpdate().bump(version); - } - updateCandidate(existingCandidate, pkg, updatedVersions) { - const version = updatedVersions.get(pkg.name); - if (!version) { - throw new Error(`Didn't find updated version for ${pkg.name}`); - } - const updater = new cargo_toml_1.CargoToml({ - version, - versionsMap: updatedVersions, - }); - const updatedContent = updater.updateContent(pkg.manifestContent); - const originalManifest = (0, common_1.parseCargoManifest)(pkg.manifestContent); - const updatedManifest = (0, common_1.parseCargoManifest)(updatedContent); - const dependencyNotes = getChangelogDepsNotes(originalManifest, updatedManifest); - existingCandidate.pullRequest.updates = - existingCandidate.pullRequest.updates.map(update => { - if (update.path === (0, workspace_1.addPath)(existingCandidate.path, 'Cargo.toml')) { - update.updater = new raw_content_1.RawContent(updatedContent); - } - else if (update.updater instanceof changelog_1.Changelog && dependencyNotes) { - update.updater.changelogEntry = (0, workspace_1.appendDependenciesSectionToChangelog)(update.updater.changelogEntry, dependencyNotes, this.logger); - } - else if (update.path === (0, workspace_1.addPath)(existingCandidate.path, 'Cargo.lock')) { - update.updater = new cargo_lock_1.CargoLock(updatedVersions); - } - return update; - }); - // append dependency notes - if (dependencyNotes) { - if (existingCandidate.pullRequest.body.releaseData.length > 0) { - existingCandidate.pullRequest.body.releaseData[0].notes = - (0, workspace_1.appendDependenciesSectionToChangelog)(existingCandidate.pullRequest.body.releaseData[0].notes, dependencyNotes, this.logger); - } - else { - existingCandidate.pullRequest.body.releaseData.push({ - component: pkg.name, - version: existingCandidate.pullRequest.version, - notes: (0, workspace_1.appendDependenciesSectionToChangelog)('', dependencyNotes, this.logger), - }); - } - } - return existingCandidate; - } - async newCandidate(pkg, updatedVersions) { - const version = updatedVersions.get(pkg.name); - if (!version) { - throw new Error(`Didn't find updated version for ${pkg.name}`); - } - const updater = new cargo_toml_1.CargoToml({ - version, - versionsMap: updatedVersions, - }); - const updatedContent = updater.updateContent(pkg.manifestContent); - const originalManifest = (0, common_1.parseCargoManifest)(pkg.manifestContent); - const updatedManifest = (0, common_1.parseCargoManifest)(updatedContent); - const dependencyNotes = getChangelogDepsNotes(originalManifest, updatedManifest); - const updatedPackage = { - ...pkg, - version: version.toString(), - }; - const strategy = this.strategiesByPath[updatedPackage.path]; - const latestRelease = this.releasesByPath[updatedPackage.path]; - const basePullRequest = strategy - ? await strategy.buildReleasePullRequest([], latestRelease, false, [], { - newVersion: version, - }) - : undefined; - if (basePullRequest) { - return this.updateCandidate({ - path: pkg.path, - pullRequest: basePullRequest, - config: { - releaseType: 'rust', - }, - }, pkg, updatedVersions); - } - const pullRequest = { - title: pull_request_title_1.PullRequestTitle.ofTargetBranch(this.targetBranch), - body: new pull_request_body_1.PullRequestBody([ - { - component: pkg.name, - version, - notes: (0, workspace_1.appendDependenciesSectionToChangelog)('', dependencyNotes, this.logger), - }, - ]), - updates: [ - { - path: (0, workspace_1.addPath)(pkg.path, 'Cargo.toml'), - createIfMissing: false, - updater: new raw_content_1.RawContent(updatedContent), - }, - { - path: (0, workspace_1.addPath)(pkg.path, 'CHANGELOG.md'), - createIfMissing: false, - updater: new changelog_1.Changelog({ - version, - changelogEntry: dependencyNotes, - }), - }, - ], - labels: [], - headRefName: branch_name_1.BranchName.ofTargetBranch(this.targetBranch).toString(), - version, - draft: false, - }; - return { - path: pkg.path, - pullRequest, - config: { - releaseType: 'rust', - }, - }; - } - postProcessCandidates(candidates, updatedVersions) { - let rootCandidate = candidates.find(c => c.path === manifest_1.ROOT_PROJECT_PATH); - if (!rootCandidate) { - this.logger.warn('Unable to find root candidate pull request'); - rootCandidate = candidates.find(c => c.config.releaseType === 'rust'); - } - if (!rootCandidate) { - this.logger.warn('Unable to find a rust candidate pull request'); - return candidates; - } - // Update the root Cargo.lock if it exists - rootCandidate.pullRequest.updates.push({ - path: 'Cargo.lock', - createIfMissing: false, - updater: new cargo_lock_1.CargoLock(updatedVersions), - }); - return candidates; - } - async buildGraph(allPackages) { - var _a, _b, _c, _d, _e, _f; - const workspaceCrateNames = new Set(allPackages.map(crateInfo => crateInfo.name)); - const graph = new Map(); - for (const crateInfo of allPackages) { - const allDeps = Object.keys({ - ...((_a = crateInfo.manifest.dependencies) !== null && _a !== void 0 ? _a : {}), - ...((_b = crateInfo.manifest['dev-dependencies']) !== null && _b !== void 0 ? _b : {}), - ...((_c = crateInfo.manifest['build-dependencies']) !== null && _c !== void 0 ? _c : {}), - }); - const targets = crateInfo.manifest.target; - if (targets) { - for (const targetName in targets) { - const target = targets[targetName]; - allDeps.push(...Object.keys({ - ...((_d = target.dependencies) !== null && _d !== void 0 ? _d : {}), - ...((_e = target['dev-dependencies']) !== null && _e !== void 0 ? _e : {}), - ...((_f = target['build-dependencies']) !== null && _f !== void 0 ? _f : {}), - })); - } - } - const workspaceDeps = allDeps.filter(dep => workspaceCrateNames.has(dep)); - graph.set(crateInfo.name, { - deps: workspaceDeps, - value: crateInfo, - }); - } - return graph; - } - inScope(candidate) { - return candidate.config.releaseType === 'rust'; - } - packageNameFromPackage(pkg) { - return pkg.name; - } - pathFromPackage(pkg) { - return pkg.path; - } - async preconfigure(strategiesByPath, _commitsByPath, _releasesByPath) { - // Using preconfigure to siphon releases and strategies. - this.strategiesByPath = strategiesByPath; - this.releasesByPath = _releasesByPath; - return strategiesByPath; - } -} -exports.CargoWorkspace = CargoWorkspace; -function getChangelogDepsNotes(originalManifest, updatedManifest) { - let depUpdateNotes = ''; - const depTypes = [ - 'dependencies', - 'dev-dependencies', - 'build-dependencies', - ]; - const depVer = (s) => { - if (s === undefined) { - return undefined; - } - if (typeof s === 'string') { - return s; - } - else { - return s.version; - } - }; - const getDepMap = (cargoDeps) => { - const result = {}; - for (const [key, val] of Object.entries(cargoDeps)) { - const ver = depVer(val); - if (ver) { - result[key] = ver; - } - } - return result; - }; - const populateUpdates = (originalScope, updatedScope, updates) => { - var _a; - for (const depType of depTypes) { - const depUpdates = []; - const pkgDepTypes = updatedScope[depType]; - if (pkgDepTypes === undefined) { - continue; - } - for (const [depName, currentDepVer] of Object.entries(getDepMap(pkgDepTypes))) { - const origDepVer = depVer((_a = originalScope[depType]) === null || _a === void 0 ? void 0 : _a[depName]); - if (currentDepVer !== origDepVer) { - depUpdates.push(`\n * ${depName} bumped from ${origDepVer} to ${currentDepVer}`); - } - } - if (depUpdates.length > 0) { - const updatesForType = updates.get(depType) || new Set(); - depUpdates.forEach(update => updatesForType.add(update)); - updates.set(depType, updatesForType); - } - } - }; - const updates = new Map(); - populateUpdates(originalManifest, updatedManifest, updates); - if (updatedManifest.target && originalManifest.target) { - for (const targetName in updatedManifest.target) { - populateUpdates(originalManifest.target[targetName], updatedManifest.target[targetName], updates); - } - } - for (const [dt, notes] of updates) { - depUpdateNotes += `\n * ${dt}`; - for (const note of notes) { - depUpdateNotes += note; - } - } - if (depUpdateNotes) { - return `* The following workspace dependencies were updated${depUpdateNotes}`; - } - return ''; -} -//# sourceMappingURL=cargo-workspace.js.map - -/***/ }), - -/***/ 83172: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.GroupPriority = void 0; -const plugin_1 = __nccwpck_require__(31651); -/** - * This plugin allows configuring a priority of release groups. For example, you could - * prioritize Java snapshot pull requests over other releases. - */ -class GroupPriority extends plugin_1.ManifestPlugin { - /** - * Instantiate a new GroupPriority plugin. - * - * @param {GitHub} github GitHub client - * @param {string} targetBranch Release branch - * @param {RepositoryConfig} repositoryConfig Parsed configuration for the entire - * repository. This allows plugins to know how components interact. - * @param {string[]} groups List of group names ordered with highest priority first - */ - constructor(github, targetBranch, repositoryConfig, groups) { - super(github, targetBranch, repositoryConfig); - this.groups = groups; - } - /** - * Group candidate release PRs by grouping and check our list of preferred - * groups in order. If a preferred group is found, only return pull requests for - * that group. - * @param {CandidateReleasePullRequest[]} pullRequests Candidate pull requests - * @returns {CandidateReleasePullRequest[]} Possibly a subset of the candidate - * pull requests if a preferred group is found. - */ - async run(pullRequests) { - this.logger.debug(`Group priority plugin running with groups: ${this.groups}`); - const groupedCandidates = groupCandidatesByType(pullRequests); - for (const group of this.groups) { - this.logger.debug(`Considering group: ${group}`); - const groupCandidates = groupedCandidates.get(group); - if (groupCandidates) { - this.logger.debug(`Found preferred group: ${group} with ${groupCandidates.length} candidate pull requests`); - return groupCandidates; - } - } - // fallback to returning all candidates - this.logger.debug('No preferred group found, returning full set.'); - return pullRequests; - } -} -exports.GroupPriority = GroupPriority; -/** - * Helper to group candidates by their `type` field. - * @param {CandidateReleasePullRequest[]} inScopeCandidates The candidates to group. - * @returns {Map} The grouped - * pull requests. - */ -function groupCandidatesByType(inScopeCandidates) { - const groupedCandidates = new Map(); - for (const candidatePullRequest of inScopeCandidates) { - const candidates = groupedCandidates.get(candidatePullRequest.pullRequest.group); - if (candidates) { - candidates.push(candidatePullRequest); - } - else { - groupedCandidates.set(candidatePullRequest.pullRequest.group, [ - candidatePullRequest, - ]); - } - } - return groupedCandidates; -} -//# sourceMappingURL=group-priority.js.map - -/***/ }), - -/***/ 59641: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.LinkedVersions = void 0; -const plugin_1 = __nccwpck_require__(31651); -const commit_1 = __nccwpck_require__(69158); -const factory_1 = __nccwpck_require__(75695); -const merge_1 = __nccwpck_require__(90514); -const branch_name_1 = __nccwpck_require__(16344); -/** - * This plugin reconfigures strategies by linking multiple components - * together. - * - * Release notes are broken up using ``/`
` blocks. - */ -class LinkedVersions extends plugin_1.ManifestPlugin { - constructor(github, targetBranch, repositoryConfig, groupName, components, options = {}) { - var _a; - super(github, targetBranch, repositoryConfig, options.logger); - this.groupName = groupName; - this.components = new Set(components); - this.merge = (_a = options.merge) !== null && _a !== void 0 ? _a : true; - } - /** - * Pre-configure strategies. - * @param {Record} strategiesByPath Strategies indexed by path - * @returns {Record} Updated strategies indexed by path - */ - async preconfigure(strategiesByPath, commitsByPath, releasesByPath) { - // Find all strategies in the group - const groupStrategies = {}; - for (const path in strategiesByPath) { - const strategy = strategiesByPath[path]; - const component = await strategy.getComponent(); - if (!component) { - continue; - } - if (this.components.has(component)) { - groupStrategies[path] = strategy; - } - } - this.logger.info(`Found ${Object.keys(groupStrategies).length} group components for ${this.groupName}`); - const groupVersions = {}; - const missingReleasePaths = new Set(); - for (const path in groupStrategies) { - const strategy = groupStrategies[path]; - const latestRelease = releasesByPath[path]; - const releasePullRequest = await strategy.buildReleasePullRequest((0, commit_1.parseConventionalCommits)(commitsByPath[path], this.logger), latestRelease); - if (releasePullRequest === null || releasePullRequest === void 0 ? void 0 : releasePullRequest.version) { - groupVersions[path] = releasePullRequest.version; - } - else { - missingReleasePaths.add(path); - } - } - const versions = Object.values(groupVersions); - if (versions.length === 0) { - return strategiesByPath; - } - const primaryVersion = versions.reduce((collector, version) => collector.compare(version) > 0 ? collector : version, versions[0]); - const newStrategies = {}; - for (const path in strategiesByPath) { - if (path in groupStrategies) { - const component = await strategiesByPath[path].getComponent(); - this.logger.info(`Replacing strategy for path ${path} with forced version: ${primaryVersion}`); - newStrategies[path] = await (0, factory_1.buildStrategy)({ - ...this.repositoryConfig[path], - github: this.github, - path, - targetBranch: this.targetBranch, - releaseAs: primaryVersion.toString(), - }); - if (missingReleasePaths.has(path)) { - this.logger.debug(`Appending fake commit for path: ${path}`); - commitsByPath[path].push({ - sha: '', - message: `chore(${component}): Synchronize ${this.groupName} versions\n\nRelease-As: ${primaryVersion.toString()}`, - }); - } - } - else { - newStrategies[path] = strategiesByPath[path]; - } - } - return newStrategies; - } - /** - * Post-process candidate pull requests. - * @param {CandidateReleasePullRequest[]} pullRequests Candidate pull requests - * @returns {CandidateReleasePullRequest[]} Updated pull requests - */ - async run(candidates) { - if (!this.merge) { - return candidates; - } - const [inScopeCandidates, outOfScopeCandidates] = candidates.reduce((collection, candidate) => { - if (!candidate.pullRequest.version) { - this.logger.warn('pull request missing version', candidate); - collection[1].push(candidate); - return collection; - } - if (this.components.has(candidate.config.component || '')) { - collection[0].push(candidate); - } - else { - collection[1].push(candidate); - } - return collection; - }, [[], []]); - this.logger.info(`found ${inScopeCandidates.length} linked-versions candidates`); - // delegate to the merge plugin and add merged pull request - if (inScopeCandidates.length > 0) { - const merge = new merge_1.Merge(this.github, this.targetBranch, this.repositoryConfig, { - pullRequestTitlePattern: `chore\${scope}: release ${this.groupName} libraries`, - forceMerge: true, - headBranchName: branch_name_1.BranchName.ofGroupTargetBranch(this.groupName, this.targetBranch).toString(), - }); - const merged = await merge.run(inScopeCandidates); - outOfScopeCandidates.push(...merged); - } - return outOfScopeCandidates; - } -} -exports.LinkedVersions = LinkedVersions; -//# sourceMappingURL=linked-versions.js.map - -/***/ }), - -/***/ 66113: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.MavenWorkspace = void 0; -const workspace_1 = __nccwpck_require__(44226); -const version_1 = __nccwpck_require__(17348); -const dom = __nccwpck_require__(49213); -const xpath = __nccwpck_require__(65319); -const path_1 = __nccwpck_require__(71017); -const pom_xml_1 = __nccwpck_require__(60255); -const changelog_1 = __nccwpck_require__(3325); -const pull_request_title_1 = __nccwpck_require__(1158); -const pull_request_body_1 = __nccwpck_require__(70774); -const branch_name_1 = __nccwpck_require__(16344); -const logger_1 = __nccwpck_require__(68809); -const java_snapshot_1 = __nccwpck_require__(66860); -const always_bump_patch_1 = __nccwpck_require__(82926); -const composite_1 = __nccwpck_require__(40911); -const JAVA_RELEASE_TYPES = new Set([ - 'java', - 'java-bom', - 'java-yoshi', - 'java-yoshi-mono-repo', - 'maven', -]); -const XPATH_PROJECT_GROUP = '/*[local-name()="project"]/*[local-name()="groupId"]'; -const XPATH_PROJECT_ARTIFACT = '/*[local-name()="project"]/*[local-name()="artifactId"]'; -const XPATH_PROJECT_VERSION = '/*[local-name()="project"]/*[local-name()="version"]'; -const XPATH_PROJECT_DEPENDENCIES = '/*[local-name()="project"]/*[local-name()="dependencies"]/*[local-name()="dependency"]'; -const XPATH_PROJECT_DEPENDENCY_MANAGEMENT_DEPENDENCIES = '/*[local-name()="project"]/*[local-name()="dependencyManagement"]/*[local-name()="dependencies"]/*[local-name()="dependency"]'; -class MavenWorkspace extends workspace_1.WorkspacePlugin { - constructor(github, targetBranch, repositoryConfig, options = {}) { - var _a; - super(github, targetBranch, repositoryConfig, options); - this.considerAllArtifacts = (_a = options.considerAllArtifacts) !== null && _a !== void 0 ? _a : true; - } - async fetchPom(path) { - const content = await this.github.getFileContentsOnBranch(path, this.targetBranch); - return parseMavenArtifact(content.parsedContent, path, this.logger); - } - async buildAllPackages(candidates) { - const allPackages = []; - const candidatesByPackage = {}; - // find all pom.xml files and build a dependency graph - const pomFiles = await this.github.findFilesByFilenameAndRef('pom.xml', this.targetBranch); - for (const pomFile of pomFiles) { - const path = (0, path_1.dirname)(pomFile); - const config = this.repositoryConfig[path]; - if (!config) { - if (!this.considerAllArtifacts) { - this.logger.info(`path '${path}' not configured, ignoring '${pomFile}'`); - continue; - } - this.logger.info(`path '${path}' not configured, but 'considerAllArtifacts' option enabled`); - } - const mavenArtifact = await this.fetchPom(pomFile); - if (!mavenArtifact) { - continue; - } - allPackages.push(mavenArtifact); - const candidate = candidates.find(candidate => candidate.path === path); - if (candidate) { - candidatesByPackage[this.packageNameFromPackage(mavenArtifact)] = - candidate; - } - else { - this.logger.warn(`found ${pomFile} in path ${path}, but did not find an associated candidate PR`); - } - } - return { - allPackages, - candidatesByPackage, - }; - } - /** - * Our maven components can have multiple artifacts if using - * `considerAllArtifacts`. Find the candidate release for the component - * that contains that maven artifact. - * @param {MavenArtifact} pkg The artifact to search for - * @param {Record pkg.path.startsWith(`${candidate.path}/`)); - } - /** - * Helper to determine which packages we will use to base our search - * for touched packages upon. These are usually the packages that - * have candidate pull requests open. - * - * If you configure `updateAllPackages`, we fill force update all - * packages as if they had a release. - * @param {DependencyGraph} graph All the packages in the repository - * @param {Record candidate.path); - // Find artifacts that are in an existing candidate release - return Array.from(graph.values()) - .filter(({ value }) => candidatePaths.find(path => value.path === path || value.path.startsWith(`${path}/`))) - .map(({ value }) => this.packageNameFromPackage(value)); - } - return super.packageNamesToUpdate(graph, candidatesByPackage); - } - /** - * Helper to build up all the versions we are modifying in this - * repository. - * @param {DependencyGraph} graph All the packages in the repository - * @param {T[]} orderedPackages A list of packages that are currently - * updated by the existing candidate pull requests - * @param {Record Version) and a - * map of all updated versions (component path => Version). - */ - async buildUpdatedVersions(_graph, orderedPackages, candidatesByPackage) { - const updatedVersions = new Map(); - const updatedPathVersions = new Map(); - // Look for updated pom.xml files - for (const [_, candidate] of Object.entries(candidatesByPackage)) { - const pomUpdates = candidate.pullRequest.updates.filter(update => update.path.endsWith('pom.xml')); - for (const pomUpdate of pomUpdates) { - if (!pomUpdate.cachedFileContents) { - pomUpdate.cachedFileContents = - await this.github.getFileContentsOnBranch(pomUpdate.path, this.targetBranch); - } - if (pomUpdate.cachedFileContents) { - // pre-run the version updater on this artifact and extract the - // new version - const updatedArtifact = parseMavenArtifact(pomUpdate.updater.updateContent(pomUpdate.cachedFileContents.parsedContent), pomUpdate.path, this.logger); - if (updatedArtifact) { - this.logger.debug(`updating ${updatedArtifact.name} to ${updatedArtifact.version}`); - updatedVersions.set(updatedArtifact.name, version_1.Version.parse(updatedArtifact.version)); - } - } - else { - this.logger.warn(`${pomUpdate.path} does not have cached contents`); - } - } - if (candidate.pullRequest.version && - this.isReleaseVersion(candidate.pullRequest.version)) { - updatedPathVersions.set(candidate.path, candidate.pullRequest.version); - } - } - for (const pkg of orderedPackages) { - const packageName = this.packageNameFromPackage(pkg); - this.logger.debug(`Looking for next version for: ${packageName}`); - const existingCandidate = candidatesByPackage[packageName]; - if (existingCandidate) { - const version = existingCandidate.pullRequest.version; - this.logger.debug(`version: ${version} from release-please`); - updatedVersions.set(packageName, version); - } - else { - const version = this.bumpVersion(pkg); - if (updatedVersions.get(packageName)) { - this.logger.debug('version already set'); - } - else { - this.logger.debug(`version: ${version} forced bump`); - updatedVersions.set(packageName, version); - if (this.isReleaseVersion(version)) { - updatedPathVersions.set(this.pathFromPackage(pkg), version); - } - } - } - } - return { - updatedVersions, - updatedPathVersions, - }; - } - async buildGraph(allPackages) { - this.logger.trace('building graph', allPackages); - const artifactsByName = allPackages.reduce((collection, mavenArtifact) => { - collection[mavenArtifact.name] = mavenArtifact; - return collection; - }, {}); - this.logger.trace('artifacts by name', artifactsByName); - const graph = new Map(); - for (const mavenArtifact of allPackages) { - const allDeps = [ - ...mavenArtifact.dependencies, - ...mavenArtifact.testDependencies, - ...mavenArtifact.managedDependencies, - ]; - const workspaceDeps = allDeps.filter(dep => artifactsByName[packageNameFromGav(dep)]); - graph.set(mavenArtifact.name, { - deps: workspaceDeps.map(dep => packageNameFromGav(dep)), - value: mavenArtifact, - }); - } - return graph; - } - /** - * Given a release version, determine if we should bump the manifest - * version as well. For maven artifacts, SNAPSHOT versions are not - * considered releases. - * @param {Version} version The release version - */ - isReleaseVersion(version) { - var _a; - return !((_a = version.preRelease) === null || _a === void 0 ? void 0 : _a.includes('SNAPSHOT')); - } - bumpVersion(artifact) { - const strategy = new java_snapshot_1.JavaSnapshot(new always_bump_patch_1.AlwaysBumpPatch()); - return strategy.bump(version_1.Version.parse(artifact.version), [FAKE_COMMIT]); - } - updateCandidate(existingCandidate, artifact, updatedVersions) { - const version = updatedVersions.get(artifact.name); - if (!version) { - throw new Error(`Didn't find updated version for ${artifact.name}`); - } - const updater = new pom_xml_1.PomXml(version, updatedVersions); - const dependencyNotes = getChangelogDepsNotes(artifact, updater, updatedVersions, this.logger); - existingCandidate.pullRequest.updates = - existingCandidate.pullRequest.updates.map(update => { - if (update.path === (0, workspace_1.addPath)(existingCandidate.path, 'pom.xml')) { - update.updater = new composite_1.CompositeUpdater(update.updater, updater); - } - else if (update.updater instanceof changelog_1.Changelog) { - if (dependencyNotes) { - update.updater.changelogEntry = - (0, workspace_1.appendDependenciesSectionToChangelog)(update.updater.changelogEntry, dependencyNotes, this.logger); - } - } - return update; - }); - // append dependency notes - if (dependencyNotes) { - if (existingCandidate.pullRequest.body.releaseData.length > 0) { - existingCandidate.pullRequest.body.releaseData[0].notes = - (0, workspace_1.appendDependenciesSectionToChangelog)(existingCandidate.pullRequest.body.releaseData[0].notes, dependencyNotes, this.logger); - } - else { - existingCandidate.pullRequest.body.releaseData.push({ - component: artifact.name, - version: existingCandidate.pullRequest.version, - notes: (0, workspace_1.appendDependenciesSectionToChangelog)('', dependencyNotes, this.logger), - }); - } - } - return existingCandidate; - } - async newCandidate(artifact, updatedVersions) { - const version = updatedVersions.get(artifact.name); - if (!version) { - throw new Error(`Didn't find updated version for ${artifact.name}`); - } - const updater = new pom_xml_1.PomXml(version, updatedVersions); - const dependencyNotes = getChangelogDepsNotes(artifact, updater, updatedVersions, this.logger); - const pullRequest = { - title: pull_request_title_1.PullRequestTitle.ofTargetBranch(this.targetBranch), - body: new pull_request_body_1.PullRequestBody([ - { - component: artifact.name, - version, - notes: (0, workspace_1.appendDependenciesSectionToChangelog)('', dependencyNotes, this.logger), - }, - ]), - updates: [ - { - path: (0, workspace_1.addPath)(artifact.path, 'pom.xml'), - createIfMissing: false, - updater, - }, - { - path: (0, workspace_1.addPath)(artifact.path, 'CHANGELOG.md'), - createIfMissing: false, - updater: new changelog_1.Changelog({ - version, - changelogEntry: dependencyNotes, - }), - }, - ], - labels: [], - headRefName: branch_name_1.BranchName.ofTargetBranch(this.targetBranch).toString(), - version, - draft: false, - }; - return { - path: artifact.path, - pullRequest, - config: { - releaseType: 'maven', - }, - }; - } - inScope(candidate) { - return JAVA_RELEASE_TYPES.has(candidate.config.releaseType); - } - packageNameFromPackage(artifact) { - return artifact.name; - } - pathFromPackage(artifact) { - return artifact.path; - } - postProcessCandidates(candidates, _updatedVersions) { - // NOP for maven workspaces - return candidates; - } -} -exports.MavenWorkspace = MavenWorkspace; -function packageNameFromGav(gav) { - return `${gav.groupId}:${gav.artifactId}`; -} -function getChangelogDepsNotes(artifact, updater, updatedVersions, logger = logger_1.logger) { - const document = new dom.DOMParser().parseFromString(artifact.pomContent); - const dependencyUpdates = updater.dependencyUpdates(document, updatedVersions); - const depUpdateNotes = []; - for (const dependencyUpdate of dependencyUpdates) { - depUpdateNotes.push(`\n * ${dependencyUpdate.name} bumped to ${dependencyUpdate.version}`); - logger.info(`bumped ${dependencyUpdate.name} to ${dependencyUpdate.version}`); - } - if (depUpdateNotes.length > 0) { - return `* The following workspace dependencies were updated${depUpdateNotes.join()}`; - } - return ''; -} -/** - * Helper to parse a pom.xml file and extract important fields - * @param {string} pomContent The XML contents as a string - * @param {string} path The path to the file in the repository including the filename. - * @param {Logger} logger Context logger - * @returns {MavenArtifact | undefined} Returns undefined if we are missing key - * attributes. We log a warning in these cases. - */ -function parseMavenArtifact(pomContent, path, logger) { - const document = new dom.DOMParser().parseFromString(pomContent); - const groupNodes = xpath.select(XPATH_PROJECT_GROUP, document); - if (groupNodes.length === 0) { - logger.warn(`Missing project.groupId in ${path}`); - return; - } - const artifactNodes = xpath.select(XPATH_PROJECT_ARTIFACT, document); - if (artifactNodes.length === 0) { - logger.warn(`Missing project.artifactId in ${path}`); - return; - } - const versionNodes = xpath.select(XPATH_PROJECT_VERSION, document); - if (versionNodes.length === 0) { - logger.warn(`Missing project.version in ${path}`); - return; - } - const dependencies = []; - const testDependencies = []; - for (const dependencyNode of xpath.select(XPATH_PROJECT_DEPENDENCIES, document)) { - const parsedNode = (0, pom_xml_1.parseDependencyNode)(dependencyNode); - if (!parsedNode.version) { - continue; - } - if (parsedNode.scope === 'test') { - testDependencies.push({ - groupId: parsedNode.groupId, - artifactId: parsedNode.artifactId, - version: parsedNode.version, - }); - } - else { - dependencies.push({ - groupId: parsedNode.groupId, - artifactId: parsedNode.artifactId, - version: parsedNode.version, - }); - } - } - const managedDependencies = []; - for (const dependencyNode of xpath.select(XPATH_PROJECT_DEPENDENCY_MANAGEMENT_DEPENDENCIES, document)) { - const parsedNode = (0, pom_xml_1.parseDependencyNode)(dependencyNode); - if (!parsedNode.version) { - continue; - } - managedDependencies.push({ - groupId: parsedNode.groupId, - artifactId: parsedNode.artifactId, - version: parsedNode.version, - }); - } - const groupId = groupNodes[0].firstChild.textContent; - const artifactId = artifactNodes[0].firstChild.textContent; - return { - path: (0, path_1.dirname)(path), - groupId, - artifactId, - name: `${groupId}:${artifactId}`, - version: versionNodes[0].firstChild.textContent, - dependencies, - testDependencies, - managedDependencies, - pomContent, - }; -} -// We use a fake commit to leverage the Java versioning strategy -// (it should be a patch version bump and potentially remove the -// -SNAPSHOT portion of the version) -const FAKE_COMMIT = { - message: 'fix: fake fix', - type: 'fix', - scope: null, - notes: [], - references: [], - bareMessage: 'fake fix', - breaking: false, - sha: 'abc123', - files: [], -}; -//# sourceMappingURL=maven-workspace.js.map - -/***/ }), - -/***/ 90514: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Merge = void 0; -const plugin_1 = __nccwpck_require__(31651); -const manifest_1 = __nccwpck_require__(31999); -const pull_request_title_1 = __nccwpck_require__(1158); -const pull_request_body_1 = __nccwpck_require__(70774); -const branch_name_1 = __nccwpck_require__(16344); -const composite_1 = __nccwpck_require__(40911); -/** - * This plugin merges multiple pull requests into a single - * release pull request. - * - * Release notes are broken up using ``/`
` blocks. - */ -class Merge extends plugin_1.ManifestPlugin { - constructor(github, targetBranch, repositoryConfig, options = {}) { - var _a, _b; - super(github, targetBranch, repositoryConfig); - this.pullRequestTitlePattern = - (_a = options.pullRequestTitlePattern) !== null && _a !== void 0 ? _a : manifest_1.MANIFEST_PULL_REQUEST_TITLE_PATTERN; - this.pullRequestHeader = options.pullRequestHeader; - this.pullRequestFooter = options.pullRequestFooter; - this.componentNoSpace = options.componentNoSpace; - this.headBranchName = options.headBranchName; - this.forceMerge = (_b = options.forceMerge) !== null && _b !== void 0 ? _b : false; - } - async run(candidates) { - var _a; - if (candidates.length < 1) { - return candidates; - } - this.logger.info(`Merging ${candidates.length} pull requests`); - const [inScopeCandidates, outOfScopeCandidates] = candidates.reduce((collection, candidate) => { - if (candidate.config.separatePullRequests && !this.forceMerge) { - collection[1].push(candidate); - } - else { - collection[0].push(candidate); - } - return collection; - }, [[], []]); - const releaseData = []; - const labels = new Set(); - let rawUpdates = []; - let rootRelease = null; - for (const candidate of inScopeCandidates) { - const pullRequest = candidate.pullRequest; - rawUpdates = rawUpdates.concat(...pullRequest.updates); - for (const label of pullRequest.labels) { - labels.add(label); - } - releaseData.push(...pullRequest.body.releaseData); - if (candidate.path === '.') { - rootRelease = candidate; - } - } - const updates = (0, composite_1.mergeUpdates)(rawUpdates); - const pullRequest = { - title: pull_request_title_1.PullRequestTitle.ofComponentTargetBranchVersion(rootRelease === null || rootRelease === void 0 ? void 0 : rootRelease.pullRequest.title.component, this.targetBranch, rootRelease === null || rootRelease === void 0 ? void 0 : rootRelease.pullRequest.title.version, this.pullRequestTitlePattern, this.componentNoSpace), - body: new pull_request_body_1.PullRequestBody(releaseData, { - useComponents: true, - header: this.pullRequestHeader, - footer: this.pullRequestFooter, - }), - updates, - labels: Array.from(labels), - headRefName: (_a = this.headBranchName) !== null && _a !== void 0 ? _a : branch_name_1.BranchName.ofTargetBranch(this.targetBranch).toString(), - draft: !candidates.some(candidate => !candidate.pullRequest.draft), - }; - const releaseTypes = new Set(candidates.map(candidate => candidate.config.releaseType)); - const releaseType = releaseTypes.size === 1 ? releaseTypes.values().next().value : 'simple'; - return [ - { - path: manifest_1.ROOT_PROJECT_PATH, - pullRequest, - config: { - releaseType, - }, - }, - ...outOfScopeCandidates, - ]; - } -} -exports.Merge = Merge; -//# sourceMappingURL=merge.js.map - -/***/ }), - -/***/ 23256: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NodeWorkspace = void 0; -const package_lock_json_1 = __nccwpck_require__(23443); -const version_1 = __nccwpck_require__(17348); -const pull_request_title_1 = __nccwpck_require__(1158); -const pull_request_body_1 = __nccwpck_require__(70774); -const branch_name_1 = __nccwpck_require__(16344); -const changelog_1 = __nccwpck_require__(3325); -const workspace_1 = __nccwpck_require__(44226); -const composite_1 = __nccwpck_require__(40911); -const package_json_1 = __nccwpck_require__(26588); -const versioning_strategy_1 = __nccwpck_require__(41941); -/** - * The plugin analyzed a cargo workspace and will bump dependencies - * of managed packages if those dependencies are being updated. - * - * If multiple node packages are being updated, it will merge them - * into a single node package. - */ -class NodeWorkspace extends workspace_1.WorkspacePlugin { - constructor(github, targetBranch, repositoryConfig, options = {}) { - super(github, targetBranch, repositoryConfig, options); - this.strategiesByPath = {}; - this.releasesByPath = {}; - this.alwaysLinkLocal = options.alwaysLinkLocal === false ? false : true; - this.updatePeerDependencies = options.updatePeerDependencies === true; - } - async buildAllPackages(candidates) { - var _a; - const candidatesByPath = new Map(); - for (const candidate of candidates) { - candidatesByPath.set(candidate.path, candidate); - } - const candidatesByPackage = {}; - const packagesByPath = new Map(); - for (const path in this.repositoryConfig) { - const config = this.repositoryConfig[path]; - if (config.releaseType !== 'node') { - continue; - } - const candidate = candidatesByPath.get(path); - if (candidate) { - this.logger.debug(`Found candidate pull request for path: ${candidate.path}`); - const packagePath = (0, workspace_1.addPath)(candidate.path, 'package.json'); - const packageUpdate = candidate.pullRequest.updates.find(update => update.path === packagePath); - const contents = (_a = packageUpdate === null || packageUpdate === void 0 ? void 0 : packageUpdate.cachedFileContents) !== null && _a !== void 0 ? _a : (await this.github.getFileContentsOnBranch(packagePath, this.targetBranch)); - const packageJson = JSON.parse(contents.parsedContent); - const pkg = { - name: packageJson.name, - path, - version: packageJson.version, - dependencies: packageJson.dependencies || {}, - devDependencies: packageJson.devDependencies || {}, - peerDependencies: packageJson.peerDependencies || {}, - optionalDependencies: packageJson.optionalDependencies || {}, - jsonContent: contents.parsedContent, - }; - packagesByPath.set(candidate.path, pkg); - candidatesByPackage[pkg.name] = candidate; - // } - } - else { - const packagePath = (0, workspace_1.addPath)(path, 'package.json'); - this.logger.debug(`No candidate pull request for path: ${path} - inspect package from ${packagePath}`); - const contents = await this.github.getFileContentsOnBranch(packagePath, this.targetBranch); - const packageJson = JSON.parse(contents.parsedContent); - const pkg = { - name: packageJson.name, - path, - version: packageJson.version, - dependencies: packageJson.dependencies || {}, - devDependencies: packageJson.devDependencies || {}, - peerDependencies: packageJson.peerDependencies || {}, - optionalDependencies: packageJson.optionalDependencies || {}, - jsonContent: contents.parsedContent, - }; - packagesByPath.set(path, pkg); - } - } - const allPackages = Array.from(packagesByPath.values()); - return { - allPackages, - candidatesByPackage, - }; - } - bumpVersion(pkg) { - const version = version_1.Version.parse(pkg.version); - const strategy = this.strategiesByPath[pkg.path]; - if (strategy) - return strategy.versioningStrategy.bump(version, []); - return new versioning_strategy_1.PatchVersionUpdate().bump(version); - } - updateCandidate(existingCandidate, pkg, updatedVersions) { - // Update version of the package - const newVersion = updatedVersions.get(pkg.name); - if (!newVersion) { - throw new Error(`Didn't find updated version for ${pkg.name}`); - } - const updatedPackage = { - ...pkg, - version: newVersion.toString(), - }; - const updater = new package_json_1.PackageJson({ - version: newVersion, - versionsMap: updatedVersions, - updatePeerDependencies: this.updatePeerDependencies, - }); - const dependencyNotes = getChangelogDepsNotes(pkg, updatedPackage, updatedVersions, this.logger); - existingCandidate.pullRequest.updates = - existingCandidate.pullRequest.updates.map(update => { - if (update.path === (0, workspace_1.addPath)(existingCandidate.path, 'package.json')) { - update.updater = new composite_1.CompositeUpdater(update.updater, updater); - } - else if (update.path === (0, workspace_1.addPath)(existingCandidate.path, 'package-lock.json')) { - update.updater = new package_lock_json_1.PackageLockJson({ - version: newVersion, - versionsMap: updatedVersions, - }); - } - else if (update.updater instanceof changelog_1.Changelog) { - if (dependencyNotes) { - update.updater.changelogEntry = - (0, workspace_1.appendDependenciesSectionToChangelog)(update.updater.changelogEntry, dependencyNotes, this.logger); - } - } - return update; - }); - // append dependency notes - if (dependencyNotes) { - if (existingCandidate.pullRequest.body.releaseData.length > 0) { - existingCandidate.pullRequest.body.releaseData[0].notes = - (0, workspace_1.appendDependenciesSectionToChangelog)(existingCandidate.pullRequest.body.releaseData[0].notes, dependencyNotes, this.logger); - } - else { - existingCandidate.pullRequest.body.releaseData.push({ - component: updatedPackage.name, - version: existingCandidate.pullRequest.version, - notes: (0, workspace_1.appendDependenciesSectionToChangelog)('', dependencyNotes, this.logger), - }); - } - } - return existingCandidate; - } - async newCandidate(pkg, updatedVersions) { - // Update version of the package - const newVersion = updatedVersions.get(pkg.name); - if (!newVersion) { - throw new Error(`Didn't find updated version for ${pkg.name}`); - } - const updatedPackage = { - ...pkg, - version: newVersion.toString(), - }; - const dependencyNotes = getChangelogDepsNotes(pkg, updatedPackage, updatedVersions, this.logger); - const strategy = this.strategiesByPath[updatedPackage.path]; - const latestRelease = this.releasesByPath[updatedPackage.path]; - const basePullRequest = strategy - ? await strategy.buildReleasePullRequest([], latestRelease, false, [], { - newVersion, - }) - : undefined; - if (basePullRequest) { - return this.updateCandidate({ - path: pkg.path, - pullRequest: basePullRequest, - config: { - releaseType: 'node', - }, - }, pkg, updatedVersions); - } - const pullRequest = { - title: pull_request_title_1.PullRequestTitle.ofTargetBranch(this.targetBranch), - body: new pull_request_body_1.PullRequestBody([ - { - component: updatedPackage.name, - version: newVersion, - notes: (0, workspace_1.appendDependenciesSectionToChangelog)('', dependencyNotes, this.logger), - }, - ]), - updates: [ - { - path: (0, workspace_1.addPath)(updatedPackage.path, 'package.json'), - createIfMissing: false, - updater: new package_json_1.PackageJson({ - version: newVersion, - versionsMap: updatedVersions, - updatePeerDependencies: this.updatePeerDependencies, - }), - }, - { - path: (0, workspace_1.addPath)(updatedPackage.path, 'package-lock.json'), - createIfMissing: false, - updater: new package_json_1.PackageJson({ - version: newVersion, - versionsMap: updatedVersions, - updatePeerDependencies: this.updatePeerDependencies, - }), - }, - { - path: (0, workspace_1.addPath)(updatedPackage.path, 'CHANGELOG.md'), - createIfMissing: false, - updater: new changelog_1.Changelog({ - version: newVersion, - changelogEntry: (0, workspace_1.appendDependenciesSectionToChangelog)('', dependencyNotes, this.logger), - }), - }, - ], - labels: [], - headRefName: branch_name_1.BranchName.ofTargetBranch(this.targetBranch).toString(), - version: newVersion, - draft: false, - }; - return { - path: updatedPackage.path, - pullRequest, - config: { - releaseType: 'node', - }, - }; - } - postProcessCandidates(candidates, _updatedVersions) { - if (candidates.length === 0) { - return candidates; - } - const [candidate] = candidates; - // check for root lock file in pull request - let hasRootLockFile; - for (let i = 0; i < candidate.pullRequest.updates.length; i++) { - if (candidate.pullRequest.updates[i].path === '.package-lock.json' || - candidate.pullRequest.updates[i].path === './package-lock.json' || - candidate.pullRequest.updates[i].path === 'package-lock.json' || - candidate.pullRequest.updates[i].path === '/package-lock.json') { - hasRootLockFile = true; - break; - } - } - // if there is a root lock file, then there is no additional pull request update necessary. - if (hasRootLockFile) { - return candidates; - } - candidate.pullRequest.updates.push({ - path: 'package-lock.json', - createIfMissing: false, - updater: new package_lock_json_1.PackageLockJson({ - versionsMap: _updatedVersions, - }), - }); - return candidates; - } - async buildGraph(allPackages) { - const graph = new Map(); - const workspacePackageNames = new Set(allPackages.map(packageJson => packageJson.name)); - for (const packageJson of allPackages) { - const allDeps = Object.keys(this.combineDeps(packageJson)); - const workspaceDeps = allDeps.filter(dep => workspacePackageNames.has(dep)); - graph.set(packageJson.name, { - deps: workspaceDeps, - value: packageJson, - }); - } - return graph; - } - inScope(candidate) { - return candidate.config.releaseType === 'node'; - } - packageNameFromPackage(pkg) { - return pkg.name; - } - pathFromPackage(pkg) { - return pkg.path; - } - combineDeps(packageJson) { - var _a, _b, _c, _d; - return { - ...((_a = packageJson.dependencies) !== null && _a !== void 0 ? _a : {}), - ...((_b = packageJson.devDependencies) !== null && _b !== void 0 ? _b : {}), - ...((_c = packageJson.optionalDependencies) !== null && _c !== void 0 ? _c : {}), - ...(this.updatePeerDependencies - ? (_d = packageJson.peerDependencies) !== null && _d !== void 0 ? _d : {} - : {}), - }; - } - async preconfigure(strategiesByPath, _commitsByPath, _releasesByPath) { - // Using preconfigure to siphon releases and strategies. - this.strategiesByPath = strategiesByPath; - this.releasesByPath = _releasesByPath; - return strategiesByPath; - } -} -exports.NodeWorkspace = NodeWorkspace; -function getChangelogDepsNotes(original, updated, updateVersions, logger) { - var _a; - let depUpdateNotes = ''; - const depTypes = [ - 'dependencies', - 'devDependencies', - 'peerDependencies', - 'optionalDependencies', - ]; - const updates = new Map(); - for (const depType of depTypes) { - const depUpdates = []; - const pkgDepTypes = updated[depType]; - if (pkgDepTypes === undefined) { - continue; - } - for (const [depName, currentDepVer] of Object.entries(pkgDepTypes)) { - const newVersion = updateVersions.get(depName); - if (!newVersion) { - logger.debug(`${depName} was not bumped, ignoring`); - continue; - } - const origDepVer = (_a = original[depType]) === null || _a === void 0 ? void 0 : _a[depName]; - const newVersionString = (0, package_json_1.newVersionWithRange)(origDepVer, newVersion); - if (currentDepVer.startsWith('workspace:')) { - depUpdates.push(`\n * ${depName} bumped to ${newVersionString}`); - } - else if (newVersionString !== origDepVer) { - depUpdates.push(`\n * ${depName} bumped from ${origDepVer} to ${newVersionString}`); - //handle case when "workspace:" version is used - } - } - if (depUpdates.length > 0) { - updates.set(depType, depUpdates); - } - } - for (const [dt, notes] of updates) { - depUpdateNotes += `\n * ${dt}`; - for (const note of notes) { - depUpdateNotes += note; - } - } - if (depUpdateNotes) { - return `* The following workspace dependencies were updated${depUpdateNotes}`; - } - return ''; -} -//# sourceMappingURL=node-workspace.js.map - -/***/ }), - -/***/ 36662: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.SentenceCase = void 0; -const plugin_1 = __nccwpck_require__(31651); -// A list of words that should not be converted to uppercase: -const SPECIAL_WORDS = ['gRPC', 'npm']; -/** - * This plugin converts commit messages to sentence case, for the benefit - * of the generated CHANGELOG. - */ -class SentenceCase extends plugin_1.ManifestPlugin { - constructor(github, targetBranch, repositoryConfig, specialWords) { - super(github, targetBranch, repositoryConfig); - this.specialWords = new Set(specialWords ? [...specialWords] : SPECIAL_WORDS); - } - /** - * Perform post-processing on commits, e.g, sentence casing them. - * @param {Commit[]} commits The set of commits that will feed into release pull request. - * @returns {Commit[]} The modified commit objects. - */ - processCommits(commits) { - this.logger.info(`SentenceCase processing ${commits.length} commits`); - for (const commit of commits) { - // The parsed conventional commit message, without the type: - console.info(commit.bareMessage); - commit.bareMessage = this.toUpperCase(commit.bareMessage); - // Check whether commit is in conventional commit format, if it is - // we'll split the string by type and description: - if (commit.message.includes(':')) { - const splitMessage = commit.message.split(':'); - let prefix = splitMessage[0]; - prefix += ': '; - let suffix = splitMessage.slice(1).join(':').trim(); - // Extract the first word from the rest of the string: - const match = /\s|$/.exec(suffix); - if (match) { - const endFirstWord = match.index; - const firstWord = suffix.slice(0, endFirstWord); - suffix = suffix.slice(endFirstWord); - // Put the string back together again: - commit.message = `${prefix}${this.toUpperCase(firstWord)}${suffix}`; - } - } - } - return commits; - } - /* - * Convert a string to upper case, taking into account a dictionary of - * common lowercase words, e.g., gRPC, npm. - * - * @param {string} word The original word. - * @returns {string} The word, now upper case. - */ - toUpperCase(word) { - if (this.specialWords.has(word)) { - return word; - } - if (word.match(/^[a-z]/)) { - return word.charAt(0).toUpperCase() + word.slice(1); - } - else { - return word; - } - } -} -exports.SentenceCase = SentenceCase; -//# sourceMappingURL=sentence-case.js.map - -/***/ }), - -/***/ 44226: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.addPath = exports.appendDependenciesSectionToChangelog = exports.WorkspacePlugin = void 0; -const plugin_1 = __nccwpck_require__(31651); -const manifest_1 = __nccwpck_require__(31999); -const logger_1 = __nccwpck_require__(68809); -const merge_1 = __nccwpck_require__(90514); -const release_please_manifest_1 = __nccwpck_require__(9817); -/** - * The plugin generalizes the logic for handling a workspace and - * will bump dependencies of managed packages if those dependencies - * are being updated. - * - * If multiple in-scope packages are being updated, it will merge them - * into a single package. - * - * This class is templatized with `T` which should be information about - * the package including the name and current version. - */ -class WorkspacePlugin extends plugin_1.ManifestPlugin { - constructor(github, targetBranch, repositoryConfig, options = {}) { - var _a, _b, _c; - super(github, targetBranch, repositoryConfig, options.logger); - this.manifestPath = (_a = options.manifestPath) !== null && _a !== void 0 ? _a : manifest_1.DEFAULT_RELEASE_PLEASE_MANIFEST; - this.updateAllPackages = (_b = options.updateAllPackages) !== null && _b !== void 0 ? _b : false; - this.merge = (_c = options.merge) !== null && _c !== void 0 ? _c : true; - } - async run(candidates) { - this.logger.info('Running workspace plugin'); - const [inScopeCandidates, outOfScopeCandidates] = candidates.reduce((collection, candidate) => { - if (!candidate.pullRequest.version) { - this.logger.warn('pull request missing version', candidate); - return collection; - } - if (this.inScope(candidate)) { - collection[0].push(candidate); - } - else { - collection[1].push(candidate); - } - return collection; - }, [[], []]); - this.logger.info(`Found ${inScopeCandidates.length} in-scope releases`); - if (inScopeCandidates.length === 0) { - return outOfScopeCandidates; - } - this.logger.info('Building list of all packages'); - const { allPackages, candidatesByPackage } = await this.buildAllPackages(inScopeCandidates); - this.logger.info(`Building dependency graph for ${allPackages.length} packages`); - const graph = await this.buildGraph(allPackages); - const packageNamesToUpdate = this.packageNamesToUpdate(graph, candidatesByPackage); - const orderedPackages = this.buildGraphOrder(graph, packageNamesToUpdate); - this.logger.info(`Updating ${orderedPackages.length} packages`); - const { updatedVersions, updatedPathVersions } = await this.buildUpdatedVersions(graph, orderedPackages, candidatesByPackage); - let newCandidates = []; - // In some cases, there are multiple packages within a single candidate. We - // only want to process each candidate package once. - const newCandidatePaths = new Set(); - for (const pkg of orderedPackages) { - const existingCandidate = this.findCandidateForPackage(pkg, candidatesByPackage); - if (existingCandidate) { - // if already has an pull request, update the changelog and update - this.logger.info(`Updating existing candidate pull request for ${this.packageNameFromPackage(pkg)}, path: ${existingCandidate.path}`); - if (newCandidatePaths.has(existingCandidate.path)) { - this.logger.info(`Already updated candidate for path: ${existingCandidate.path}`); - } - else { - const newCandidate = this.updateCandidate(existingCandidate, pkg, updatedVersions); - newCandidatePaths.add(newCandidate.path); - newCandidates.push(newCandidate); - } - } - else { - // otherwise, build a new pull request with changelog and entry update - this.logger.info(`Creating new candidate pull request for ${this.packageNameFromPackage(pkg)}`); - const newCandidate = await this.newCandidate(pkg, updatedVersions); - if (newCandidatePaths.has(newCandidate.path)) { - this.logger.info(`Already created new candidate for path: ${newCandidate.path}`); - } - else { - newCandidatePaths.add(newCandidate.path); - newCandidates.push(newCandidate); - } - } - } - if (this.merge) { - this.logger.info(`Merging ${newCandidates.length} in-scope candidates`); - const mergePlugin = new merge_1.Merge(this.github, this.targetBranch, this.repositoryConfig); - newCandidates = await mergePlugin.run(newCandidates); - } - const newUpdates = newCandidates[0].pullRequest.updates; - newUpdates.push({ - path: this.manifestPath, - createIfMissing: false, - updater: new release_please_manifest_1.ReleasePleaseManifest({ - version: newCandidates[0].pullRequest.version, - versionsMap: updatedPathVersions, - }), - }); - this.logger.info(`Post-processing ${newCandidates.length} in-scope candidates`); - newCandidates = this.postProcessCandidates(newCandidates, updatedVersions); - return [...outOfScopeCandidates, ...newCandidates]; - } - /** - * Helper for finding a candidate release based on the package name. - * By default, we assume that the package name matches the release - * component. - * @param {T} pkg The package being released - * @param {Record} graph All the packages in the repository - * @param {Record this.packageNameFromPackage(value)); - } - return Object.keys(candidatesByPackage); - } - /** - * Helper to build up all the versions we are modifying in this - * repository. - * @param {DependencyGraph} _graph All the packages in the repository - * @param {T[]} orderedPackages A list of packages that are currently - * updated by the existing candidate pull requests - * @param {Record Version) and a - * map of all updated versions (component path => Version). - */ - async buildUpdatedVersions(_graph, orderedPackages, candidatesByPackage) { - const updatedVersions = new Map(); - const updatedPathVersions = new Map(); - for (const pkg of orderedPackages) { - const packageName = this.packageNameFromPackage(pkg); - this.logger.debug(`package: ${packageName}`); - const existingCandidate = candidatesByPackage[packageName]; - if (existingCandidate) { - const version = existingCandidate.pullRequest.version; - this.logger.debug(`version: ${version} from release-please`); - updatedVersions.set(packageName, version); - } - else { - const version = this.bumpVersion(pkg); - this.logger.debug(`version: ${version} forced bump`); - updatedVersions.set(packageName, version); - if (this.isReleaseVersion(version)) { - updatedPathVersions.set(this.pathFromPackage(pkg), version); - } - } - } - return { - updatedVersions, - updatedPathVersions, - }; - } - /** - * Given a release version, determine if we should bump the manifest - * version as well. - * @param {Version} _version The release version - */ - isReleaseVersion(_version) { - return true; - } - /** - * Helper to invert the graph from package => packages that it depends on - * to package => packages that depend on it. - * @param {DependencyGraph} graph - * @returns {DependencyGraph} - */ - invertGraph(graph) { - const dependentGraph = new Map(); - for (const [packageName, node] of graph) { - dependentGraph.set(packageName, { - deps: [], - value: node.value, - }); - } - for (const [packageName, node] of graph) { - for (const depName of node.deps) { - if (dependentGraph.has(depName)) { - dependentGraph.get(depName).deps.push(packageName); - } - } - } - return dependentGraph; - } - /** - * Determine all the packages which need to be updated and sort them. - * @param {DependencyGraph} graph The graph of package => packages it depends on - * @param {string} packageNamesToUpdate Names of the packages which are already - * being updated. - */ - buildGraphOrder(graph, packageNamesToUpdate) { - this.logger.info(`building graph order, existing package names: ${packageNamesToUpdate}`); - // invert the graph so it's dependency name => packages that depend on it - const dependentGraph = this.invertGraph(graph); - const visited = new Set(); - // we're iterating the `Map` in insertion order (as per ECMA262), but - // that does not reflect any particular traversal of the graph, so we - // visit all nodes, opportunistically short-circuiting leafs when we've - // already visited them. - for (const name of packageNamesToUpdate) { - this.visitPostOrder(dependentGraph, name, visited, []); - } - return Array.from(visited).sort((a, b) => this.packageNameFromPackage(a).localeCompare(this.packageNameFromPackage(b))); - } - visitPostOrder(graph, name, visited, path) { - this.logger.debug(`visiting ${name}, path: ${path}`); - if (path.indexOf(name) !== -1) { - throw new Error(`found cycle in dependency graph: ${path.join(' -> ')} -> ${name}`); - } - const node = graph.get(name); - if (!node) { - this.logger.warn(`Didn't find node: ${name} in graph`); - return; - } - const nextPath = [...path, name]; - for (const depName of node.deps) { - const dep = graph.get(depName); - if (!dep) { - this.logger.warn(`dependency not found in graph: ${depName}`); - return; - } - this.logger.info(`visiting ${depName} next`); - this.visitPostOrder(graph, depName, visited, nextPath); - } - if (!visited.has(node.value)) { - this.logger.debug(`marking ${name} as visited and adding ${this.packageNameFromPackage(node.value)} to order`); - visited.add(node.value); - } - else { - this.logger.debug(`${node.value} already visited`); - } - } -} -exports.WorkspacePlugin = WorkspacePlugin; -const DEPENDENCY_HEADER = new RegExp('### Dependencies'); -function appendDependenciesSectionToChangelog(changelog, notes, logger = logger_1.logger) { - if (!changelog) { - return `### Dependencies\n\n${notes}`; - } - logger.info('appending dependency notes to changelog'); - const newLines = []; - let seenDependenciesSection = false; - let seenDependencySectionSpacer = false; - let injected = false; - for (const line of changelog.split('\n')) { - if (seenDependenciesSection) { - const trimmedLine = line.trim(); - if (seenDependencySectionSpacer && - !injected && - !trimmedLine.startsWith('*')) { - newLines.push(changelog); - injected = true; - } - if (trimmedLine === '') { - seenDependencySectionSpacer = true; - } - } - if (line.match(DEPENDENCY_HEADER)) { - seenDependenciesSection = true; - } - newLines.push(line); - } - if (injected) { - return newLines.join('\n'); - } - if (seenDependenciesSection) { - return `${changelog}\n${notes}`; - } - return `${changelog}\n\n\n### Dependencies\n\n${notes}`; -} -exports.appendDependenciesSectionToChangelog = appendDependenciesSectionToChangelog; -function addPath(path, file) { - return path === manifest_1.ROOT_PROJECT_PATH ? file : `${path}/${file}`; -} -exports.addPath = addPath; -//# sourceMappingURL=workspace.js.map - -/***/ }), - -/***/ 95081: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.BaseStrategy = void 0; -const manifest_1 = __nccwpck_require__(31999); -const default_1 = __nccwpck_require__(94073); -const default_2 = __nccwpck_require__(71480); -const version_1 = __nccwpck_require__(17348); -const tag_name_1 = __nccwpck_require__(36503); -const logger_1 = __nccwpck_require__(68809); -const pull_request_title_1 = __nccwpck_require__(1158); -const branch_name_1 = __nccwpck_require__(16344); -const pull_request_body_1 = __nccwpck_require__(70774); -const composite_1 = __nccwpck_require__(40911); -const generic_1 = __nccwpck_require__(96323); -const generic_json_1 = __nccwpck_require__(15011); -const generic_xml_1 = __nccwpck_require__(15591); -const pom_xml_1 = __nccwpck_require__(60255); -const generic_yaml_1 = __nccwpck_require__(61024); -const generic_toml_1 = __nccwpck_require__(53530); -const DEFAULT_CHANGELOG_PATH = 'CHANGELOG.md'; -/** - * A strategy is responsible for determining which files are - * necessary to update in a release pull request. - */ -class BaseStrategy { - constructor(options) { - var _a, _b, _c; - this.logger = (_a = options.logger) !== null && _a !== void 0 ? _a : logger_1.logger; - this.path = options.path || manifest_1.ROOT_PROJECT_PATH; - this.github = options.github; - this.packageName = options.packageName; - this.component = - options.component || this.normalizeComponent(this.packageName); - this.versioningStrategy = - options.versioningStrategy || - new default_1.DefaultVersioningStrategy({ logger: this.logger }); - this.targetBranch = options.targetBranch; - this.repository = options.github.repository; - this.changelogPath = options.changelogPath || DEFAULT_CHANGELOG_PATH; - this.changelogHost = options.changelogHost; - this.changelogSections = options.changelogSections; - this.tagSeparator = options.tagSeparator; - this.skipGitHubRelease = options.skipGitHubRelease || false; - this.releaseAs = options.releaseAs; - this.changelogNotes = - options.changelogNotes || new default_2.DefaultChangelogNotes(options); - this.includeComponentInTag = (_b = options.includeComponentInTag) !== null && _b !== void 0 ? _b : true; - this.includeVInTag = (_c = options.includeVInTag) !== null && _c !== void 0 ? _c : true; - this.pullRequestTitlePattern = options.pullRequestTitlePattern; - this.pullRequestHeader = options.pullRequestHeader; - this.pullRequestFooter = options.pullRequestFooter; - this.componentNoSpace = options.componentNoSpace; - this.extraFiles = options.extraFiles || []; - this.initialVersion = options.initialVersion; - this.extraLabels = options.extraLabels || []; - } - /** - * Return the component for this strategy. This may be a computed field. - * @returns {string} - */ - async getComponent() { - if (!this.includeComponentInTag) { - return ''; - } - return this.component || (await this.getDefaultComponent()); - } - async getDefaultComponent() { - var _a; - return this.normalizeComponent((_a = this.packageName) !== null && _a !== void 0 ? _a : (await this.getDefaultPackageName())); - } - async getBranchComponent() { - return this.component || (await this.getDefaultComponent()); - } - async getPackageName() { - var _a; - return (_a = this.packageName) !== null && _a !== void 0 ? _a : (await this.getDefaultPackageName()); - } - async getDefaultPackageName() { - var _a; - return (_a = this.packageName) !== null && _a !== void 0 ? _a : ''; - } - normalizeComponent(component) { - if (!component) { - return ''; - } - return component; - } - /** - * Override this method to post process commits - * @param {ConventionalCommit[]} commits parsed commits - * @returns {ConventionalCommit[]} modified commits - */ - async postProcessCommits(commits) { - return commits; - } - async buildReleaseNotes(conventionalCommits, newVersion, newVersionTag, latestRelease, commits) { - var _a; - return await this.changelogNotes.buildNotes(conventionalCommits, { - host: this.changelogHost, - owner: this.repository.owner, - repository: this.repository.repo, - version: newVersion.toString(), - previousTag: (_a = latestRelease === null || latestRelease === void 0 ? void 0 : latestRelease.tag) === null || _a === void 0 ? void 0 : _a.toString(), - currentTag: newVersionTag.toString(), - targetBranch: this.targetBranch, - changelogSections: this.changelogSections, - commits: commits, - }); - } - async buildPullRequestBody(component, newVersion, releaseNotesBody, _conventionalCommits, _latestRelease, pullRequestHeader, pullRequestFooter) { - return new pull_request_body_1.PullRequestBody([ - { - component, - version: newVersion, - notes: releaseNotesBody, - }, - ], { - header: pullRequestHeader, - footer: pullRequestFooter, - }); - } - /** - * Builds a candidate release pull request - * @param {Commit[]} commits Raw commits to consider for this release. - * @param {Release} latestRelease Optional. The last release for this - * component if available. - * @param {boolean} draft Optional. Whether or not to create the pull - * request as a draft. Defaults to `false`. - * @returns {ReleasePullRequest | undefined} The release pull request to - * open for this path/component. Returns undefined if we should not - * open a pull request. - */ - async buildReleasePullRequest(commits, latestRelease, draft, labels = [], bumpOnlyOptions) { - var _a; - const conventionalCommits = await this.postProcessCommits(commits); - this.logger.info(`Considering: ${conventionalCommits.length} commits`); - if (!bumpOnlyOptions && conventionalCommits.length === 0) { - this.logger.info(`No commits for path: ${this.path}, skipping`); - return undefined; - } - const newVersion = (_a = bumpOnlyOptions === null || bumpOnlyOptions === void 0 ? void 0 : bumpOnlyOptions.newVersion) !== null && _a !== void 0 ? _a : (await this.buildNewVersion(conventionalCommits, latestRelease)); - const versionsMap = await this.updateVersionsMap(await this.buildVersionsMap(conventionalCommits), conventionalCommits, newVersion); - const component = await this.getComponent(); - this.logger.debug('component:', component); - const newVersionTag = new tag_name_1.TagName(newVersion, this.includeComponentInTag ? component : undefined, this.tagSeparator, this.includeVInTag); - this.logger.debug('pull request title pattern:', this.pullRequestTitlePattern); - this.logger.debug('componentNoSpace:', this.componentNoSpace); - const pullRequestTitle = pull_request_title_1.PullRequestTitle.ofComponentTargetBranchVersion(component || '', this.targetBranch, newVersion, this.pullRequestTitlePattern, this.componentNoSpace); - const branchComponent = await this.getBranchComponent(); - const branchName = branchComponent - ? branch_name_1.BranchName.ofComponentTargetBranch(branchComponent, this.targetBranch) - : branch_name_1.BranchName.ofTargetBranch(this.targetBranch); - const releaseNotesBody = await this.buildReleaseNotes(conventionalCommits, newVersion, newVersionTag, latestRelease, commits); - if (!bumpOnlyOptions && this.changelogEmpty(releaseNotesBody)) { - this.logger.info(`No user facing commits found since ${latestRelease ? latestRelease.sha : 'beginning of time'} - skipping`); - return undefined; - } - const updates = await this.buildUpdates({ - changelogEntry: releaseNotesBody, - newVersion, - versionsMap, - latestVersion: latestRelease === null || latestRelease === void 0 ? void 0 : latestRelease.tag.version, - commits: conventionalCommits, - }); - const updatesWithExtras = (0, composite_1.mergeUpdates)(updates.concat(...(await this.extraFileUpdates(newVersion, versionsMap)))); - const pullRequestBody = await this.buildPullRequestBody(component, newVersion, releaseNotesBody, conventionalCommits, latestRelease, this.pullRequestHeader, this.pullRequestFooter); - return { - title: pullRequestTitle, - body: pullRequestBody, - updates: updatesWithExtras, - labels: [...labels, ...this.extraLabels], - headRefName: branchName.toString(), - version: newVersion, - draft: draft !== null && draft !== void 0 ? draft : false, - }; - } - // Helper to convert extra files with globs to the file paths to add - async extraFilePaths(extraFile) { - if (typeof extraFile !== 'object') { - return [extraFile]; - } - if (!extraFile.glob) { - return [extraFile.path]; - } - if (extraFile.path.startsWith('/')) { - // glob is relative to root, strip the leading `/` for glob matching - // and re-add the leading `/` to make the file relative to the root - return (await this.github.findFilesByGlobAndRef(extraFile.path.slice(1), this.targetBranch)).map(file => `/${file}`); - } - else if (this.path === manifest_1.ROOT_PROJECT_PATH) { - // root component, ignore path prefix - return this.github.findFilesByGlobAndRef(extraFile.path, this.targetBranch); - } - else { - // glob is relative to current path - return this.github.findFilesByGlobAndRef(extraFile.path, this.targetBranch, this.path); - } - } - async extraFileUpdates(version, versionsMap) { - const extraFileUpdates = []; - for (const extraFile of this.extraFiles) { - if (typeof extraFile === 'object') { - const paths = await this.extraFilePaths(extraFile); - for (const path of paths) { - switch (extraFile.type) { - case 'generic': - extraFileUpdates.push({ - path: this.addPath(path), - createIfMissing: false, - updater: new generic_1.Generic({ version, versionsMap }), - }); - break; - case 'json': - extraFileUpdates.push({ - path: this.addPath(path), - createIfMissing: false, - updater: new generic_json_1.GenericJson(extraFile.jsonpath, version), - }); - break; - case 'yaml': - extraFileUpdates.push({ - path: this.addPath(path), - createIfMissing: false, - updater: new generic_yaml_1.GenericYaml(extraFile.jsonpath, version), - }); - break; - case 'toml': - extraFileUpdates.push({ - path: this.addPath(path), - createIfMissing: false, - updater: new generic_toml_1.GenericToml(extraFile.jsonpath, version), - }); - break; - case 'xml': - extraFileUpdates.push({ - path: this.addPath(path), - createIfMissing: false, - updater: new generic_xml_1.GenericXml(extraFile.xpath, version), - }); - break; - case 'pom': - extraFileUpdates.push({ - path: this.addPath(path), - createIfMissing: false, - updater: new pom_xml_1.PomXml(version), - }); - break; - default: - throw new Error(`unsupported extraFile type: ${extraFile.type}`); - } - } - } - else if (extraFile.endsWith('.json')) { - extraFileUpdates.push({ - path: this.addPath(extraFile), - createIfMissing: false, - updater: new composite_1.CompositeUpdater(new generic_json_1.GenericJson('$.version', version), new generic_1.Generic({ version, versionsMap })), - }); - } - else if (extraFile.endsWith('.yaml') || extraFile.endsWith('.yml')) { - extraFileUpdates.push({ - path: this.addPath(extraFile), - createIfMissing: false, - updater: new composite_1.CompositeUpdater(new generic_yaml_1.GenericYaml('$.version', version), new generic_1.Generic({ version, versionsMap })), - }); - } - else if (extraFile.endsWith('.toml')) { - extraFileUpdates.push({ - path: this.addPath(extraFile), - createIfMissing: false, - updater: new composite_1.CompositeUpdater(new generic_toml_1.GenericToml('$.version', version), new generic_1.Generic({ version, versionsMap })), - }); - } - else if (extraFile.endsWith('.xml')) { - extraFileUpdates.push({ - path: this.addPath(extraFile), - createIfMissing: false, - updater: new composite_1.CompositeUpdater( - // Updates "version" element that is a child of the root element. - new generic_xml_1.GenericXml('/*/version', version), new generic_1.Generic({ version, versionsMap })), - }); - } - else { - extraFileUpdates.push({ - path: this.addPath(extraFile), - createIfMissing: false, - updater: new generic_1.Generic({ version, versionsMap }), - }); - } - } - return extraFileUpdates; - } - changelogEmpty(changelogEntry) { - return changelogEntry.split('\n').length <= 1; - } - async updateVersionsMap(versionsMap, conventionalCommits, _newVersion) { - for (const [component, version] of versionsMap.entries()) { - versionsMap.set(component, await this.versioningStrategy.bump(version, conventionalCommits)); - } - return versionsMap; - } - async buildNewVersion(conventionalCommits, latestRelease) { - if (this.releaseAs) { - this.logger.warn(`Setting version for ${this.path} from release-as configuration`); - return version_1.Version.parse(this.releaseAs); - } - const releaseAsCommit = conventionalCommits.find(conventionalCommit => conventionalCommit.notes.find(note => note.title === 'RELEASE AS')); - if (releaseAsCommit) { - const note = releaseAsCommit.notes.find(note => note.title === 'RELEASE AS'); - if (note) { - return version_1.Version.parse(note.text); - } - } - if (latestRelease) { - return await this.versioningStrategy.bump(latestRelease.tag.version, conventionalCommits); - } - return this.initialReleaseVersion(); - } - async buildVersionsMap(_conventionalCommits) { - return new Map(); - } - async parsePullRequestBody(pullRequestBody) { - return pull_request_body_1.PullRequestBody.parse(pullRequestBody, this.logger); - } - /** - * Given a merged pull request, build the candidate release. - * @param {PullRequest} mergedPullRequest The merged release pull request. - * @returns {Release} The candidate release. - * @deprecated Use buildReleases() instead. - */ - async buildRelease(mergedPullRequest, options) { - var _a; - if (this.skipGitHubRelease) { - this.logger.info('Release skipped from strategy config'); - return; - } - if (!mergedPullRequest.sha) { - this.logger.error('Pull request should have been merged'); - return; - } - const mergedTitlePattern = (_a = options === null || options === void 0 ? void 0 : options.groupPullRequestTitlePattern) !== null && _a !== void 0 ? _a : manifest_1.MANIFEST_PULL_REQUEST_TITLE_PATTERN; - const pullRequestTitle = pull_request_title_1.PullRequestTitle.parse(mergedPullRequest.title, this.pullRequestTitlePattern, this.componentNoSpace, this.logger) || - pull_request_title_1.PullRequestTitle.parse(mergedPullRequest.title, mergedTitlePattern, this.componentNoSpace, this.logger); - if (!pullRequestTitle) { - this.logger.error(`Bad pull request title: '${mergedPullRequest.title}'`); - return; - } - const branchName = branch_name_1.BranchName.parse(mergedPullRequest.headBranchName, this.logger); - if (!branchName) { - this.logger.error(`Bad branch name: ${mergedPullRequest.headBranchName}`); - return; - } - const pullRequestBody = await this.parsePullRequestBody(mergedPullRequest.body); - if (!pullRequestBody) { - this.logger.error('Could not parse pull request body as a release PR'); - return; - } - const component = await this.getComponent(); - let releaseData; - if (pullRequestBody.releaseData.length === 1 && - !pullRequestBody.releaseData[0].component) { - const branchComponent = await this.getBranchComponent(); - // standalone release PR, ensure the components match - if (this.normalizeComponent(branchName.component) !== - this.normalizeComponent(branchComponent)) { - this.logger.warn(`PR component: ${branchName.component} does not match configured component: ${branchComponent}`); - return; - } - releaseData = pullRequestBody.releaseData[0]; - } - else { - // manifest release with multiple components - find the release notes - // for the component to see if it was included in this release (parsed - // from the release pull request body) - releaseData = pullRequestBody.releaseData.find(datum => { - return (this.normalizeComponent(datum.component) === - this.normalizeComponent(component)); - }); - if (!releaseData && pullRequestBody.releaseData.length > 0) { - this.logger.info(`Pull request contains releases, but not for component: ${component}`); - return; - } - } - const notes = releaseData === null || releaseData === void 0 ? void 0 : releaseData.notes; - if (notes === undefined) { - this.logger.warn('Failed to find release notes'); - } - let version = pullRequestTitle.getVersion(); - if (!version || - (pullRequestBody.releaseData.length > 1 && (releaseData === null || releaseData === void 0 ? void 0 : releaseData.version))) { - // prioritize pull-request body version for multi-component releases - version = releaseData === null || releaseData === void 0 ? void 0 : releaseData.version; - } - if (!version) { - this.logger.error('Pull request should have included version'); - return; - } - if (!this.isPublishedVersion(version)) { - this.logger.warn(`Skipping non-published version: ${version.toString()}`); - return; - } - const tag = new tag_name_1.TagName(version, this.includeComponentInTag ? component : undefined, this.tagSeparator, this.includeVInTag); - const releaseName = component && this.includeComponentInTag - ? `${component}: v${version.toString()}` - : `v${version.toString()}`; - return { - name: releaseName, - tag, - notes: notes || '', - sha: mergedPullRequest.sha, - }; - } - /** - * Given a merged pull request, build the candidate releases. - * @param {PullRequest} mergedPullRequest The merged release pull request. - * @returns {Release} The candidate release. - */ - async buildReleases(mergedPullRequest, options) { - const release = await this.buildRelease(mergedPullRequest, options); - if (release) { - return [release]; - } - return []; - } - isPublishedVersion(_version) { - return true; - } - /** - * Override this to handle the initial version of a new library. - */ - initialReleaseVersion() { - if (this.initialVersion) { - return version_1.Version.parse(this.initialVersion); - } - return version_1.Version.parse('1.0.0'); - } - /** - * Adds a given file path to the strategy path. - * @param {string} file Desired file path. - * @returns {string} The file relative to the strategy. - * @throws {Error} If the file path contains relative pathing characters, i.e. ../, ~/ - */ - addPath(file) { - // There is no strategy path to join, the strategy is at the root, or the - // file is at the root (denoted by a leading slash or tilde) - if (!this.path || this.path === manifest_1.ROOT_PROJECT_PATH || file.startsWith('/')) { - file = file.replace(/^\/+/, ''); - } - // Otherwise, the file is relative to the strategy path - else { - file = `${this.path.replace(/\/+$/, '')}/${file}`; - } - // Ensure the file path does not escape the workspace - if (/((^|\/)\.{1,2}|^~|^\/*)+\//.test(file)) { - throw new Error(`illegal pathing characters in path: ${file}`); - } - // Strip any trailing slashes and return - return file.replace(/\/+$/, ''); - } -} -exports.BaseStrategy = BaseStrategy; -//# sourceMappingURL=base.js.map - -/***/ }), - -/***/ 76082: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2024 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Bazel = void 0; -const module_bazel_1 = __nccwpck_require__(75219); -const changelog_1 = __nccwpck_require__(3325); -const base_1 = __nccwpck_require__(95081); -class Bazel extends base_1.BaseStrategy { - constructor(options) { - var _a; - super(options); - this.versionFile = (_a = options.versionFile) !== null && _a !== void 0 ? _a : 'MODULE.bazel'; - } - async buildUpdates(options) { - const updates = []; - const version = options.newVersion; - updates.push({ - path: this.addPath(this.changelogPath), - createIfMissing: true, - updater: new changelog_1.Changelog({ - version, - changelogEntry: options.changelogEntry, - }), - }); - updates.push({ - path: this.addPath(this.versionFile), - createIfMissing: false, - updater: new module_bazel_1.ModuleBazel({ - version, - }), - }); - return updates; - } -} -exports.Bazel = Bazel; -//# sourceMappingURL=bazel.js.map - -/***/ }), - -/***/ 86518: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Dart = void 0; -// Generic -const changelog_1 = __nccwpck_require__(3325); -const yaml = __nccwpck_require__(21917); -// pubspec -const pubspec_yaml_1 = __nccwpck_require__(62861); -const base_1 = __nccwpck_require__(95081); -const errors_1 = __nccwpck_require__(93637); -class Dart extends base_1.BaseStrategy { - async buildUpdates(options) { - const updates = []; - const version = options.newVersion; - updates.push({ - path: this.addPath(this.changelogPath), - createIfMissing: true, - updater: new changelog_1.Changelog({ - version, - changelogEntry: options.changelogEntry, - }), - }); - updates.push({ - path: this.addPath('pubspec.yaml'), - createIfMissing: false, - cachedFileContents: this.pubspecYmlContents, - updater: new pubspec_yaml_1.PubspecYaml({ - version, - }), - }); - return updates; - } - async getDefaultPackageName() { - const pubspecYmlContents = await this.getPubspecYmlContents(); - const pubspec = yaml.load(pubspecYmlContents.parsedContent, { json: true }); - if (typeof pubspec === 'object') { - return pubspec.name; - } - else { - return undefined; - } - } - async getPubspecYmlContents() { - if (!this.pubspecYmlContents) { - try { - this.pubspecYmlContents = await this.github.getFileContentsOnBranch(this.addPath('pubspec.yaml'), this.targetBranch); - } - catch (e) { - if (e instanceof errors_1.FileNotFoundError) { - throw new errors_1.MissingRequiredFileError(this.addPath('pubspec.yaml'), Dart.name, `${this.repository.owner}/${this.repository.repo}`); - } - throw e; - } - } - return this.pubspecYmlContents; - } -} -exports.Dart = Dart; -//# sourceMappingURL=dart.js.map - -/***/ }), - -/***/ 98175: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DotnetYoshi = void 0; -const base_1 = __nccwpck_require__(95081); -const changelog_1 = __nccwpck_require__(3325); -const apis_1 = __nccwpck_require__(4856); -const errors_1 = __nccwpck_require__(93637); -const CHANGELOG_SECTIONS = [ - { type: 'feat', section: 'New features' }, - { type: 'fix', section: 'Bug fixes' }, - { type: 'perf', section: 'Performance improvements' }, - { type: 'revert', section: 'Reverts' }, - { type: 'chore', section: 'Miscellaneous chores', hidden: true }, - { type: 'docs', section: 'Documentation improvements' }, - { type: 'style', section: 'Styles', hidden: true }, - { type: 'refactor', section: 'Code Refactoring', hidden: true }, - { type: 'test', section: 'Tests', hidden: true }, - { type: 'build', section: 'Build System', hidden: true }, - { type: 'ci', section: 'Continuous Integration', hidden: true }, -]; -const DEFAULT_CHANGELOG_PATH = 'docs/history.md'; -const DEFAULT_PULL_REQUEST_TITLE_PATTERN = 'Release${component} version ${version}'; -const DEFAULT_PULL_REQUEST_HEADER = ':robot: I have created a release *beep* *boop*'; -const DEFAULT_PULL_REQUEST_FOOTER = 'This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please).'; -const RELEASE_NOTES_HEADER_PATTERN = /#{2,3} \[?(\d+\.\d+\.\d+-?[^\]]*)\]?.* \((\d{4}-\d{2}-\d{2})\)/; -class DotnetYoshi extends base_1.BaseStrategy { - constructor(options) { - var _a, _b, _c, _d, _e, _f; - options.changelogSections = (_a = options.changelogSections) !== null && _a !== void 0 ? _a : CHANGELOG_SECTIONS; - options.changelogPath = (_b = options.changelogPath) !== null && _b !== void 0 ? _b : DEFAULT_CHANGELOG_PATH; - options.pullRequestTitlePattern = - (_c = options.pullRequestTitlePattern) !== null && _c !== void 0 ? _c : DEFAULT_PULL_REQUEST_TITLE_PATTERN; - options.pullRequestHeader = - (_d = options.pullRequestHeader) !== null && _d !== void 0 ? _d : DEFAULT_PULL_REQUEST_HEADER; - options.pullRequestFooter = - (_e = options.pullRequestFooter) !== null && _e !== void 0 ? _e : DEFAULT_PULL_REQUEST_FOOTER; - options.includeVInTag = (_f = options.includeVInTag) !== null && _f !== void 0 ? _f : false; - super(options); - } - async buildReleaseNotes(conventionalCommits, newVersion, newVersionTag, latestRelease) { - const notes = await super.buildReleaseNotes(conventionalCommits, newVersion, newVersionTag, latestRelease); - return notes.replace(RELEASE_NOTES_HEADER_PATTERN, '## Version $1, released $2'); - } - async getApi() { - try { - const contents = await this.github.getFileContentsOnBranch('apis/apis.json', this.targetBranch); - const apis = JSON.parse(contents.parsedContent); - const component = await this.getComponent(); - return apis.apis.find(api => api.id === component); - } - catch (e) { - if (e instanceof errors_1.FileNotFoundError) { - throw new errors_1.MissingRequiredFileError('apis/apis.json', DotnetYoshi.name, `${this.repository.owner}/${this.repository.repo}`); - } - throw e; - } - } - async getDefaultComponent() { - // default component is based on the path - const pathParts = this.path.split('/'); - return pathParts[pathParts.length - 1]; - } - async buildUpdates(options) { - const updates = []; - const version = options.newVersion; - const component = await this.getComponent(); - const api = await this.getApi(); - if (api === null || api === void 0 ? void 0 : api.noVersionHistory) { - this.logger.info(`Skipping changelog for ${component} via noVersionHistory configuration`); - } - else { - updates.push({ - path: this.addPath(this.changelogPath), - createIfMissing: true, - updater: new changelog_1.Changelog({ - version, - changelogEntry: options.changelogEntry, - versionHeaderRegex: '\n## Version [0-9[]+', - }), - }); - } - if (!component) { - this.logger.warn('Dotnet strategy expects to use components, could not update all files'); - return updates; - } - updates.push({ - path: 'apis/apis.json', - createIfMissing: false, - updater: new apis_1.Apis(component, version), - }); - return updates; - } -} -exports.DotnetYoshi = DotnetYoshi; -//# sourceMappingURL=dotnet-yoshi.js.map - -/***/ }), - -/***/ 91731: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Elixir = void 0; -// Generic -const changelog_1 = __nccwpck_require__(3325); -// mix.exs support -const elixir_mix_exs_1 = __nccwpck_require__(31612); -const base_1 = __nccwpck_require__(95081); -class Elixir extends base_1.BaseStrategy { - async buildUpdates(options) { - const updates = []; - const version = options.newVersion; - updates.push({ - path: this.addPath(this.changelogPath), - createIfMissing: true, - updater: new changelog_1.Changelog({ - version, - changelogEntry: options.changelogEntry, - }), - }); - updates.push({ - path: this.addPath('mix.exs'), - createIfMissing: false, - updater: new elixir_mix_exs_1.ElixirMixExs({ - version, - }), - }); - return updates; - } -} -exports.Elixir = Elixir; -//# sourceMappingURL=elixir.js.map - -/***/ }), - -/***/ 181: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Expo = void 0; -const node_1 = __nccwpck_require__(78957); -const app_json_1 = __nccwpck_require__(20059); -const version_1 = __nccwpck_require__(17348); -/** - * Strategy for building Expo based React Native projects. This strategy extends - * the Node strategy to additionally update the `app.json` file of a project. - */ -class Expo extends node_1.Node { - async buildUpdates(options) { - const version = options.newVersion; - const updates = await super.buildUpdates(options); - const expoSDKVersion = await this.getExpoSDKVersion(); - updates.push({ - path: this.addPath('app.json'), - createIfMissing: false, - updater: new app_json_1.AppJson({ version, expoSDKVersion }), - }); - return updates; - } - /** - * Determine the Expo SDK version by parsing the package.json dependencies. - */ - async getExpoSDKVersion() { - var _a, _b, _c, _d; - const pkgJsonContents = await this.getPkgJsonContents(); - const pkg = JSON.parse(pkgJsonContents.parsedContent); - return version_1.Version.parse(((_a = pkg.dependencies) === null || _a === void 0 ? void 0 : _a.expo) || - ((_b = pkg.devDependencies) === null || _b === void 0 ? void 0 : _b.expo) || - ((_c = pkg.peerDependencies) === null || _c === void 0 ? void 0 : _c.expo) || - ((_d = pkg.optionalDependencies) === null || _d === void 0 ? void 0 : _d.expo) || - '0.0.0'); - } -} -exports.Expo = Expo; -//# sourceMappingURL=expo.js.map - -/***/ }), - -/***/ 6492: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.GoYoshi = void 0; -const base_1 = __nccwpck_require__(95081); -const changelog_1 = __nccwpck_require__(3325); -const version_1 = __nccwpck_require__(17348); -const version_go_1 = __nccwpck_require__(54988); -const path_1 = __nccwpck_require__(71017); -const CHANGELOG_SECTIONS = [ - { type: 'feat', section: 'Features' }, - { type: 'fix', section: 'Bug Fixes' }, - { type: 'perf', section: 'Performance Improvements' }, - { type: 'revert', section: 'Reverts' }, - { type: 'docs', section: 'Documentation' }, - { type: 'style', section: 'Styles', hidden: true }, - { type: 'chore', section: 'Miscellaneous Chores', hidden: true }, - { type: 'refactor', section: 'Code Refactoring', hidden: true }, - { type: 'test', section: 'Tests', hidden: true }, - { type: 'build', section: 'Build System', hidden: true }, - { type: 'ci', section: 'Continuous Integration', hidden: true }, -]; -const REGEN_PR_REGEX = /.*auto-regenerate.*/; -const REGEN_ISSUE_REGEX = /(?.*)\(#(?.*)\)(\n|$)/; -class GoYoshi extends base_1.BaseStrategy { - constructor(options) { - var _a; - options.changelogPath = (_a = options.changelogPath) !== null && _a !== void 0 ? _a : 'CHANGES.md'; - super({ - ...options, - changelogSections: CHANGELOG_SECTIONS, - }); - } - async buildUpdates(options) { - const updates = []; - const version = options.newVersion; - updates.push({ - path: this.addPath(this.changelogPath), - createIfMissing: true, - updater: new changelog_1.Changelog({ - version, - changelogEntry: options.changelogEntry, - }), - }); - updates.push({ - path: this.addPath('internal/version.go'), - createIfMissing: false, - updater: new version_go_1.VersionGo({ - version, - }), - }); - return updates; - } - async postProcessCommits(commits) { - let regenCommit; - const component = await this.getComponent(); - this.logger.debug('Filtering commits'); - const ignoredSubmodules = await this.getIgnoredSubModules(); - return commits.filter(commit => { - var _a, _b; - // Only have a single entry of the nightly regen listed in the changelog. - // If there are more than one of these commits, append associated PR. - if (this.repository.owner === 'googleapis' && - this.repository.repo === 'google-api-go-client' && - REGEN_PR_REGEX.test(commit.message)) { - if (regenCommit) { - const match = commit.message.match(REGEN_ISSUE_REGEX); - if ((_a = match === null || match === void 0 ? void 0 : match.groups) === null || _a === void 0 ? void 0 : _a.pr) { - regenCommit.references.push({ - action: 'refs', - issue: match.groups.pr, - prefix: '#', - }); - } - return false; - } - else { - commit.sha = ''; - regenCommit = commit; - const match = commit.bareMessage.match(REGEN_ISSUE_REGEX); - if ((_b = match === null || match === void 0 ? void 0 : match.groups) === null || _b === void 0 ? void 0 : _b.pr) { - regenCommit.references.push({ - action: 'refs', - issue: match.groups.pr, - prefix: '#', - }); - regenCommit.bareMessage = match.groups.prefix.trim(); - } - } - } - // For google-cloud-go, filter into 2 cases, a subset of modules - // released independently, and the remainder - if (this.repository.owner === 'googleapis' && - this.repository.repo === 'google-cloud-go') { - // Skip commits that don't have a scope as we don't know where to - // put them - if (!commit.scope) { - this.logger.debug(`Skipping commit without scope: ${commit.message}`); - return false; - } - // Skip commits related to sub-modules as they are not part of - // the parent module. - if (this.includeComponentInTag) { - // This is a submodule release, so only include commits in this - // scope - if (!commitMatchesScope(commit.scope, component)) { - this.logger.debug(`Skipping commit scope: ${commit.scope} != ${component}`); - return false; - } - } - else { - // This is the main module release, so ignore sub modules that - // are released independently - for (const submodule of ignoredSubmodules) { - if (commitMatchesScope(commit.scope, submodule)) { - this.logger.debug(`Skipping ignored commit scope: ${commit.scope}`); - return false; - } - } - } - } - return true; - }); - } - async getIgnoredSubModules() { - // ignored submodules only applies to the root component of - // googleapis/google-cloud-go - if (this.repository.owner !== 'googleapis' || - this.repository.repo !== 'google-cloud-go' || - this.includeComponentInTag) { - return new Set(); - } - this.logger.info('Looking for go.mod files'); - const paths = (await this.github.findFilesByFilenameAndRef('go.mod', this.targetBranch)) - .filter(path => !path.includes('internal') && path !== 'go.mod') - .map(path => (0, path_1.dirname)(path)); - this.logger.info(`Found ${paths.length} submodules`); - this.logger.debug(JSON.stringify(paths)); - return new Set(paths); - } - // "closes" is a little presumptuous, let's just indicate that the - // PR references these other commits: - async buildReleaseNotes(conventionalCommits, newVersion, newVersionTag, latestRelease, commits) { - const releaseNotes = await super.buildReleaseNotes(conventionalCommits, newVersion, newVersionTag, latestRelease, commits); - return releaseNotes.replace(/, closes /g, ', refs '); - } - initialReleaseVersion() { - return version_1.Version.parse('0.1.0'); - } -} -exports.GoYoshi = GoYoshi; -function commitMatchesScope(commitScope, scope) { - return commitScope === scope || commitScope.startsWith(`${scope}/`); -} -//# sourceMappingURL=go-yoshi.js.map - -/***/ }), - -/***/ 45953: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Go = void 0; -// Generic -const changelog_1 = __nccwpck_require__(3325); -const base_1 = __nccwpck_require__(95081); -class Go extends base_1.BaseStrategy { - async buildUpdates(options) { - const updates = []; - const version = options.newVersion; - updates.push({ - path: this.addPath(this.changelogPath), - createIfMissing: true, - updater: new changelog_1.Changelog({ - version, - changelogEntry: options.changelogEntry, - }), - }); - return updates; - } -} -exports.Go = Go; -//# sourceMappingURL=go.js.map - -/***/ }), - -/***/ 97687: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Helm = void 0; -// Generic -const changelog_1 = __nccwpck_require__(3325); -const yaml = __nccwpck_require__(21917); -// helm -const chart_yaml_1 = __nccwpck_require__(88368); -const base_1 = __nccwpck_require__(95081); -const errors_1 = __nccwpck_require__(93637); -class Helm extends base_1.BaseStrategy { - async buildUpdates(options) { - const updates = []; - const version = options.newVersion; - updates.push({ - path: this.addPath(this.changelogPath), - createIfMissing: true, - updater: new changelog_1.Changelog({ - version, - changelogEntry: options.changelogEntry, - }), - }); - updates.push({ - path: this.addPath('Chart.yaml'), - createIfMissing: false, - cachedFileContents: this.chartYmlContents, - updater: new chart_yaml_1.ChartYaml({ - version, - }), - }); - return updates; - } - async getDefaultPackageName() { - const chartYmlContents = await this.getChartYmlContents(); - const chart = yaml.load(chartYmlContents.parsedContent, { json: true }); - if (typeof chart === 'object') { - return chart.name; - } - else { - return undefined; - } - } - async getChartYmlContents() { - if (!this.chartYmlContents) { - try { - this.chartYmlContents = await this.github.getFileContents(this.addPath('Chart.yaml')); - } - catch (e) { - if (e instanceof errors_1.FileNotFoundError) { - throw new errors_1.MissingRequiredFileError(this.addPath('Chart.yaml'), Helm.name, `${this.repository.owner}/${this.repository.repo}`); - } - throw e; - } - } - return this.chartYmlContents; - } -} -exports.Helm = Helm; -//# sourceMappingURL=helm.js.map - -/***/ }), - -/***/ 77664: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.JavaYoshiMonoRepo = void 0; -const versions_manifest_1 = __nccwpck_require__(78345); -const version_1 = __nccwpck_require__(17348); -const changelog_1 = __nccwpck_require__(3325); -const changelog_json_1 = __nccwpck_require__(23719); -const commit_split_1 = __nccwpck_require__(6941); -const composite_1 = __nccwpck_require__(40911); -const errors_1 = __nccwpck_require__(93637); -const java_1 = __nccwpck_require__(46892); -const java_update_1 = __nccwpck_require__(90276); -const filter_commits_1 = __nccwpck_require__(26498); -class JavaYoshiMonoRepo extends java_1.Java { - /** - * Override this method to post process commits - * @param {ConventionalCommit[]} commits parsed commits - * @returns {ConventionalCommit[]} modified commits - */ - async postProcessCommits(commits) { - if (commits.length === 0) { - // For Java commits, push a fake commit so we force a - // SNAPSHOT release - commits.push({ - type: 'fake', - bareMessage: 'fake commit', - message: 'fake commit', - breaking: false, - scope: null, - notes: [], - files: [], - references: [], - sha: 'fake', - }); - } - return commits; - } - async needsSnapshot() { - return versions_manifest_1.VersionsManifest.needsSnapshot((await this.getVersionsContent()).parsedContent); - } - async buildVersionsMap() { - this.versionsContent = await this.getVersionsContent(); - return versions_manifest_1.VersionsManifest.parseVersions(this.versionsContent.parsedContent); - } - async getVersionsContent() { - if (!this.versionsContent) { - try { - this.versionsContent = await this.github.getFileContentsOnBranch(this.addPath('versions.txt'), this.targetBranch); - } - catch (err) { - if (err instanceof errors_1.GitHubAPIError) { - throw new errors_1.MissingRequiredFileError(this.addPath('versions.txt'), JavaYoshiMonoRepo.name, `${this.repository.owner}/${this.repository.repo}`); - } - throw err; - } - } - return this.versionsContent; - } - async buildUpdates(options) { - const updates = []; - const version = options.newVersion; - const versionsMap = options.versionsMap; - updates.push({ - path: this.addPath('versions.txt'), - createIfMissing: false, - cachedFileContents: this.versionsContent, - updater: new versions_manifest_1.VersionsManifest({ - version, - versionsMap, - }), - }); - const pomFilesSearch = this.github.findFilesByFilenameAndRef('pom.xml', this.targetBranch, this.path); - const buildFilesSearch = this.github.findFilesByFilenameAndRef('build.gradle', this.targetBranch, this.path); - const dependenciesSearch = this.github.findFilesByFilenameAndRef('dependencies.properties', this.targetBranch, this.path); - const readmeFilesSearch = this.github.findFilesByFilenameAndRef('README.md', this.targetBranch, this.path); - const pomFiles = await pomFilesSearch; - pomFiles.forEach(path => { - updates.push({ - path: this.addPath(path), - createIfMissing: false, - updater: new java_update_1.JavaUpdate({ - version, - versionsMap, - isSnapshot: options.isSnapshot, - }), - }); - }); - const buildFiles = await buildFilesSearch; - buildFiles.forEach(path => { - updates.push({ - path: this.addPath(path), - createIfMissing: false, - updater: new java_update_1.JavaUpdate({ - version, - versionsMap, - isSnapshot: options.isSnapshot, - }), - }); - }); - const dependenciesFiles = await dependenciesSearch; - dependenciesFiles.forEach(path => { - updates.push({ - path: this.addPath(path), - createIfMissing: false, - updater: new java_update_1.JavaUpdate({ - version, - versionsMap, - isSnapshot: options.isSnapshot, - }), - }); - }); - const readmeFiles = await readmeFilesSearch; - readmeFiles.forEach(path => { - updates.push({ - path: this.addPath(path), - createIfMissing: false, - updater: new java_update_1.JavaUpdate({ - version, - versionsMap, - isSnapshot: options.isSnapshot, - }), - }); - }); - this.extraFiles.forEach(extraFile => { - if (typeof extraFile === 'object') { - return; - } - updates.push({ - path: extraFile, - createIfMissing: false, - updater: new java_update_1.JavaUpdate({ - version, - versionsMap, - isSnapshot: options.isSnapshot, - }), - }); - }); - if (!options.isSnapshot) { - updates.push({ - path: this.addPath(this.changelogPath), - createIfMissing: true, - updater: new changelog_1.Changelog({ - version, - changelogEntry: options.changelogEntry, - }), - }); - // Bail early if the repository has no root changelog.json. - // This file is used to opt into machine readable commits. - const hasChangelogJson = await this.hasChangelogJson(); - if (hasChangelogJson && options.commits) { - const changelogUpdates = []; - const cs = new commit_split_1.CommitSplit({ - includeEmpty: false, - }); - const splitCommits = cs.split((0, filter_commits_1.filterCommits)(options.commits, this.changelogSections)); - for (const path of Object.keys(splitCommits)) { - const repoMetadata = await this.getRepoMetadata(path); - const artifactName = repoMetadata - ? repoMetadata['distribution_name'] - : null; - if (repoMetadata && artifactName) { - this.logger.info(`Found artifact ${artifactName} for ${path}`); - changelogUpdates.push(new changelog_json_1.ChangelogJson({ - artifactName, - version, - // We filter out "chore:" commits, to reduce noise in the upstream - // release notes. We will only show a product release note entry - // if there has been a substantial change, such as a fix or feature. - commits: splitCommits[path], - language: 'JAVA', - })); - } - } - updates.push({ - path: 'changelog.json', - createIfMissing: false, - updater: new composite_1.CompositeUpdater(...changelogUpdates), - }); - } - } - return updates; - } - async hasChangelogJson() { - try { - const content = await this.github.getFileContentsOnBranch('changelog.json', this.targetBranch); - return !!content; - } - catch (e) { - if (e instanceof errors_1.FileNotFoundError) - return false; - else - throw e; - } - } - async getRepoMetadata(path) { - try { - const content = await this.github.getFileContentsOnBranch(this.addPath(`${path}/.repo-metadata.json`), this.targetBranch); - return content ? JSON.parse(content.parsedContent) : null; - } - catch (e) { - if (e instanceof errors_1.FileNotFoundError) - return null; - else - throw e; - } - } - async updateVersionsMap(versionsMap, conventionalCommits) { - let isPromotion = false; - const modifiedCommits = []; - for (const commit of conventionalCommits) { - if (isPromotionCommit(commit)) { - isPromotion = true; - modifiedCommits.push({ - ...commit, - notes: commit.notes.filter(note => !isPromotionNote(note)), - }); - } - else { - modifiedCommits.push(commit); - } - } - for (const versionKey of versionsMap.keys()) { - const version = versionsMap.get(versionKey); - if (!version) { - this.logger.warn(`didn't find version for ${versionKey}`); - continue; - } - if (isPromotion && isStableArtifact(versionKey)) { - versionsMap.set(versionKey, version_1.Version.parse('1.0.0')); - } - else { - const newVersion = await this.versioningStrategy.bump(version, modifiedCommits); - versionsMap.set(versionKey, newVersion); - } - } - return versionsMap; - } - initialReleaseVersion() { - return version_1.Version.parse('0.1.0'); - } -} -exports.JavaYoshiMonoRepo = JavaYoshiMonoRepo; -const VERSIONED_ARTIFACT_REGEX = /^.*-(v\d+[^-]*)$/; -const VERSION_REGEX = /^v\d+(.*)$/; -/** - * Returns true if the artifact should be considered stable - * @param artifact name of the artifact to check - */ -function isStableArtifact(artifact) { - const match = artifact.match(VERSIONED_ARTIFACT_REGEX); - if (!match) { - // The artifact does not have a version qualifier at the end - return true; - } - const versionMatch = match[1].match(VERSION_REGEX); - if (versionMatch && versionMatch[1]) { - // The version is not stable (probably alpha/beta/rc) - return false; - } - return true; -} -function isPromotionCommit(commit) { - return commit.notes.some(isPromotionNote); -} -function isPromotionNote(note) { - return note.title === 'RELEASE AS' && note.text === '1.0.0'; -} -//# sourceMappingURL=java-yoshi-mono-repo.js.map - -/***/ }), - -/***/ 35330: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.JavaYoshi = void 0; -const versions_manifest_1 = __nccwpck_require__(78345); -const version_1 = __nccwpck_require__(17348); -const changelog_1 = __nccwpck_require__(3325); -const errors_1 = __nccwpck_require__(93637); -const java_1 = __nccwpck_require__(46892); -const java_update_1 = __nccwpck_require__(90276); -class JavaYoshi extends java_1.Java { - /** - * Override this method to post process commits - * @param {ConventionalCommit[]} commits parsed commits - * @returns {ConventionalCommit[]} modified commits - */ - async postProcessCommits(commits) { - if (commits.length === 0) { - // For Java commits, push a fake commit so we force a - // SNAPSHOT release - commits.push({ - type: 'fake', - bareMessage: 'fake commit', - message: 'fake commit', - breaking: false, - scope: null, - notes: [], - files: [], - references: [], - sha: 'fake', - }); - } - return commits; - } - async needsSnapshot() { - return versions_manifest_1.VersionsManifest.needsSnapshot((await this.getVersionsContent()).parsedContent); - } - async buildVersionsMap() { - this.versionsContent = await this.getVersionsContent(); - return versions_manifest_1.VersionsManifest.parseVersions(this.versionsContent.parsedContent); - } - async getVersionsContent() { - if (!this.versionsContent) { - try { - this.versionsContent = await this.github.getFileContentsOnBranch(this.addPath('versions.txt'), this.targetBranch); - } - catch (err) { - if (err instanceof errors_1.FileNotFoundError) { - throw new errors_1.MissingRequiredFileError(this.addPath('versions.txt'), JavaYoshi.name, `${this.repository.owner}/${this.repository.repo}`); - } - throw err; - } - } - return this.versionsContent; - } - async buildUpdates(options) { - const updates = []; - const version = options.newVersion; - const versionsMap = options.versionsMap; - updates.push({ - path: this.addPath('versions.txt'), - createIfMissing: false, - cachedFileContents: this.versionsContent, - updater: new versions_manifest_1.VersionsManifest({ - version, - versionsMap, - }), - }); - const pomFilesSearch = this.github.findFilesByFilenameAndRef('pom.xml', this.targetBranch, this.path); - const buildFilesSearch = this.github.findFilesByFilenameAndRef('build.gradle', this.targetBranch, this.path); - const dependenciesSearch = this.github.findFilesByFilenameAndRef('dependencies.properties', this.targetBranch, this.path); - const pomFiles = await pomFilesSearch; - pomFiles.forEach(path => { - updates.push({ - path: this.addPath(path), - createIfMissing: false, - updater: new java_update_1.JavaUpdate({ - version, - versionsMap, - isSnapshot: options.isSnapshot, - }), - }); - }); - const buildFiles = await buildFilesSearch; - buildFiles.forEach(path => { - updates.push({ - path: this.addPath(path), - createIfMissing: false, - updater: new java_update_1.JavaUpdate({ - version, - versionsMap, - isSnapshot: options.isSnapshot, - }), - }); - }); - const dependenciesFiles = await dependenciesSearch; - dependenciesFiles.forEach(path => { - updates.push({ - path: this.addPath(path), - createIfMissing: false, - updater: new java_update_1.JavaUpdate({ - version, - versionsMap, - isSnapshot: options.isSnapshot, - }), - }); - }); - this.extraFiles.forEach(extraFile => { - if (typeof extraFile === 'object') { - return; - } - updates.push({ - path: extraFile, - createIfMissing: false, - updater: new java_update_1.JavaUpdate({ - version, - versionsMap, - isSnapshot: options.isSnapshot, - }), - }); - }); - if (!options.isSnapshot) { - updates.push({ - path: this.addPath(this.changelogPath), - createIfMissing: true, - updater: new changelog_1.Changelog({ - version, - changelogEntry: options.changelogEntry, - }), - }); - } - return updates; - } - async updateVersionsMap(versionsMap, conventionalCommits) { - let isPromotion = false; - const modifiedCommits = []; - for (const commit of conventionalCommits) { - if (isPromotionCommit(commit)) { - isPromotion = true; - modifiedCommits.push({ - ...commit, - notes: commit.notes.filter(note => !isPromotionNote(note)), - }); - } - else { - modifiedCommits.push(commit); - } - } - for (const versionKey of versionsMap.keys()) { - const version = versionsMap.get(versionKey); - if (!version) { - this.logger.warn(`didn't find version for ${versionKey}`); - continue; - } - if (isPromotion && isStableArtifact(versionKey)) { - versionsMap.set(versionKey, version_1.Version.parse('1.0.0')); - } - else { - const newVersion = await this.versioningStrategy.bump(version, modifiedCommits); - versionsMap.set(versionKey, newVersion); - } - } - return versionsMap; - } - initialReleaseVersion() { - return version_1.Version.parse('0.1.0'); - } -} -exports.JavaYoshi = JavaYoshi; -const VERSIONED_ARTIFACT_REGEX = /^.*-(v\d+[^-]*)$/; -const VERSION_REGEX = /^v\d+(.*)$/; -/** - * Returns true if the artifact should be considered stable - * @param artifact name of the artifact to check - */ -function isStableArtifact(artifact) { - const match = artifact.match(VERSIONED_ARTIFACT_REGEX); - if (!match) { - // The artifact does not have a version qualifier at the end - return true; - } - const versionMatch = match[1].match(VERSION_REGEX); - if (versionMatch && versionMatch[1]) { - // The version is not stable (probably alpha/beta/rc) - return false; - } - return true; -} -function isPromotionCommit(commit) { - return commit.notes.some(isPromotionNote); -} -function isPromotionNote(note) { - return note.title === 'RELEASE AS' && note.text === '1.0.0'; -} -//# sourceMappingURL=java-yoshi.js.map - -/***/ }), - -/***/ 46892: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Java = void 0; -const base_1 = __nccwpck_require__(95081); -const changelog_1 = __nccwpck_require__(3325); -const java_snapshot_1 = __nccwpck_require__(66860); -const pull_request_title_1 = __nccwpck_require__(1158); -const branch_name_1 = __nccwpck_require__(16344); -const pull_request_body_1 = __nccwpck_require__(70774); -const default_1 = __nccwpck_require__(94073); -const java_add_snapshot_1 = __nccwpck_require__(87719); -const manifest_1 = __nccwpck_require__(31999); -const java_released_1 = __nccwpck_require__(16255); -const composite_1 = __nccwpck_require__(40911); -const logger_1 = __nccwpck_require__(68809); -const CHANGELOG_SECTIONS = [ - { type: 'feat', section: 'Features' }, - { type: 'fix', section: 'Bug Fixes' }, - { type: 'perf', section: 'Performance Improvements' }, - { type: 'deps', section: 'Dependencies' }, - { type: 'revert', section: 'Reverts' }, - { type: 'docs', section: 'Documentation' }, - { type: 'style', section: 'Styles', hidden: true }, - { type: 'chore', section: 'Miscellaneous Chores', hidden: true }, - { type: 'refactor', section: 'Code Refactoring', hidden: true }, - { type: 'test', section: 'Tests', hidden: true }, - { type: 'build', section: 'Build System', hidden: true }, - { type: 'ci', section: 'Continuous Integration', hidden: true }, -]; -/** - * A strategy that generates SNAPSHOT version after each release, which is standard especially in Maven projects. - * - * This is universal strategy that does not update any files on its own. Use maven strategy for Maven projects. - */ -class Java extends base_1.BaseStrategy { - constructor(options) { - var _a, _b, _c; - options.changelogSections = (_a = options.changelogSections) !== null && _a !== void 0 ? _a : CHANGELOG_SECTIONS; - // wrap the configured versioning strategy with snapshotting - const parentVersioningStrategy = options.versioningStrategy || - new default_1.DefaultVersioningStrategy({ logger: (_b = options.logger) !== null && _b !== void 0 ? _b : logger_1.logger }); - options.versioningStrategy = new java_snapshot_1.JavaSnapshot(parentVersioningStrategy); - super(options); - this.snapshotVersioning = new java_add_snapshot_1.JavaAddSnapshot(parentVersioningStrategy); - this.snapshotLabels = options.snapshotLabels || manifest_1.DEFAULT_SNAPSHOT_LABELS; - this.skipSnapshot = (_c = options.skipSnapshot) !== null && _c !== void 0 ? _c : false; - } - async buildReleasePullRequest(commits, latestRelease, draft, labels = [], _bumpOnlyOptions) { - if (await this.needsSnapshot(commits, latestRelease)) { - this.logger.info('Repository needs a snapshot bump.'); - return await this.buildSnapshotPullRequest(latestRelease, draft, this.snapshotLabels); - } - this.logger.info('No Java snapshot needed'); - return await super.buildReleasePullRequest(commits, latestRelease, draft, labels); - } - async buildSnapshotPullRequest(latestRelease, draft, labels = []) { - const component = await this.getComponent(); - const newVersion = latestRelease - ? await this.snapshotVersioning.bump(latestRelease.tag.version, []) - : this.initialReleaseVersion(); - const versionsMap = await this.buildVersionsMap([]); - for (const [component, version] of versionsMap.entries()) { - versionsMap.set(component, await this.snapshotVersioning.bump(version, [])); - } - const pullRequestTitle = pull_request_title_1.PullRequestTitle.ofComponentTargetBranchVersion(component || '', this.targetBranch, newVersion); - const branchName = component - ? branch_name_1.BranchName.ofComponentTargetBranch(component, this.targetBranch) - : branch_name_1.BranchName.ofTargetBranch(this.targetBranch); - const notes = '### Updating meta-information for bleeding-edge SNAPSHOT release.'; - // TODO use pullrequest header here? - const pullRequestBody = new pull_request_body_1.PullRequestBody([ - { - component, - version: newVersion, - notes, - }, - ]); - const updates = await this.buildUpdates({ - newVersion, - versionsMap, - changelogEntry: notes, - isSnapshot: true, - commits: [], - }); - const updatesWithExtras = (0, composite_1.mergeUpdates)(updates.concat(...(await this.extraFileUpdates(newVersion, versionsMap)))); - return { - title: pullRequestTitle, - body: pullRequestBody, - updates: updatesWithExtras, - labels: [...labels, ...this.extraLabels], - headRefName: branchName.toString(), - version: newVersion, - draft: draft !== null && draft !== void 0 ? draft : false, - group: 'snapshot', - }; - } - isPublishedVersion(version) { - return !version.preRelease || version.preRelease.indexOf('SNAPSHOT') < 0; - } - async needsSnapshot(commits, latestRelease) { - var _a; - if (this.skipSnapshot) { - return false; - } - const component = await this.getComponent(); - this.logger.debug('component:', component); - const version = (_a = latestRelease === null || latestRelease === void 0 ? void 0 : latestRelease.tag) === null || _a === void 0 ? void 0 : _a.version; - if (!version) { - // Don't bump snapshots for the first release ever - return false; - } - // Found snapshot as a release, this is unexpected, but use it - if (!this.isPublishedVersion(version)) { - return false; - } - // Search commits for snapshot bump - const pullRequests = commits - .map(commit => { - var _a; - return pull_request_title_1.PullRequestTitle.parse(((_a = commit.pullRequest) === null || _a === void 0 ? void 0 : _a.title) || commit.message, this.pullRequestTitlePattern, this.componentNoSpace, this.logger); - }) - .filter(pullRequest => pullRequest); - const snapshotCommits = pullRequests - .filter(pullRequest => ((pullRequest === null || pullRequest === void 0 ? void 0 : pullRequest.component) || '') === component) - .map(pullRequest => pullRequest === null || pullRequest === void 0 ? void 0 : pullRequest.getVersion()) - .filter(version => version && !this.isPublishedVersion(version)); - return snapshotCommits.length === 0; - } - async buildUpdates(options) { - const version = options.newVersion; - const versionsMap = options.versionsMap; - const updates = []; - if (!options.isSnapshot) { - // Append java-specific updater for extraFiles - this.extraFiles.forEach(extraFile => { - if (typeof extraFile === 'string') { - updates.push({ - path: this.addPath(extraFile), - createIfMissing: false, - updater: new java_released_1.JavaReleased({ version, versionsMap }), - }); - } - }); - // Update changelog - updates.push({ - path: this.addPath(this.changelogPath), - createIfMissing: true, - updater: new changelog_1.Changelog({ - version, - changelogEntry: options.changelogEntry, - }), - }); - } - return updates; - } -} -exports.Java = Java; -//# sourceMappingURL=java.js.map - -/***/ }), - -/***/ 76397: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.KRMBlueprint = void 0; -// Generic -const changelog_1 = __nccwpck_require__(3325); -// KRM specific. -const krm_blueprint_version_1 = __nccwpck_require__(17490); -const base_1 = __nccwpck_require__(95081); -const version_1 = __nccwpck_require__(17348); -const KRMBlueprintAttribAnnotation = 'cnrm.cloud.google.com/blueprint'; -const hasKRMBlueprintAttrib = (content) => content.includes(KRMBlueprintAttribAnnotation); -class KRMBlueprint extends base_1.BaseStrategy { - async buildUpdates(options) { - const updates = []; - const version = options.newVersion; - updates.push({ - path: this.addPath(this.changelogPath), - createIfMissing: true, - updater: new changelog_1.Changelog({ - version, - changelogEntry: options.changelogEntry, - }), - }); - const versionsMap = new Map(); - if (options.latestVersion) { - versionsMap.set('previousVersion', options.latestVersion); - } - // Update version in all yaml files with attribution annotation - const yamlPaths = await this.github.findFilesByExtensionAndRef('yaml', this.targetBranch, this.path); - for (const yamlPath of yamlPaths) { - const contents = await this.github.getFileContents(this.addPath(yamlPath)); - if (hasKRMBlueprintAttrib(contents.parsedContent)) { - updates.push({ - path: this.addPath(yamlPath), - createIfMissing: false, - cachedFileContents: contents, - updater: new krm_blueprint_version_1.KRMBlueprintVersion({ - version, - versionsMap, - }), - }); - } - } - return updates; - } - initialReleaseVersion() { - return version_1.Version.parse('0.1.0'); - } -} -exports.KRMBlueprint = KRMBlueprint; -//# sourceMappingURL=krm-blueprint.js.map - -/***/ }), - -/***/ 60899: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Maven = void 0; -const java_1 = __nccwpck_require__(46892); -const java_released_1 = __nccwpck_require__(16255); -const generic_1 = __nccwpck_require__(96323); -const pom_xml_1 = __nccwpck_require__(60255); -/** - * Strategy for Maven projects. It generates SNAPSHOT version after each release, and updates all found - * pom.xml files automatically. - */ -class Maven extends java_1.Java { - async buildUpdates(options) { - const version = options.newVersion; - const versionsMap = options.versionsMap; - // Use generic Java updates - const updates = await super.buildUpdates(options); - // Update pom.xml files - const pomFiles = await this.github.findFilesByFilenameAndRef('pom.xml', this.targetBranch, this.path); - pomFiles.forEach(path => { - updates.push({ - path: this.addPath(path), - createIfMissing: false, - updater: new pom_xml_1.PomXml(version), - }); - if (!options.isSnapshot) { - updates.push({ - path: this.addPath(path), - createIfMissing: false, - updater: new java_released_1.JavaReleased({ version, versionsMap }), - }); - } - updates.push({ - path: this.addPath(path), - createIfMissing: false, - updater: new generic_1.Generic({ version, versionsMap }), - }); - }); - return updates; - } -} -exports.Maven = Maven; -//# sourceMappingURL=maven.js.map - -/***/ }), - -/***/ 78957: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Node = void 0; -const base_1 = __nccwpck_require__(95081); -const changelog_json_1 = __nccwpck_require__(23719); -const package_lock_json_1 = __nccwpck_require__(23443); -const samples_package_json_1 = __nccwpck_require__(68530); -const changelog_1 = __nccwpck_require__(3325); -const package_json_1 = __nccwpck_require__(26588); -const errors_1 = __nccwpck_require__(93637); -const filter_commits_1 = __nccwpck_require__(26498); -class Node extends base_1.BaseStrategy { - async buildUpdates(options) { - var _a; - const updates = []; - const version = options.newVersion; - const versionsMap = options.versionsMap; - const packageName = (_a = (await this.getPackageName())) !== null && _a !== void 0 ? _a : ''; - const lockFiles = ['package-lock.json', 'npm-shrinkwrap.json']; - lockFiles.forEach(lockFile => { - updates.push({ - path: this.addPath(lockFile), - createIfMissing: false, - updater: new package_lock_json_1.PackageLockJson({ - version, - versionsMap, - }), - }); - }); - updates.push({ - path: this.addPath('samples/package.json'), - createIfMissing: false, - updater: new samples_package_json_1.SamplesPackageJson({ - version, - packageName, - }), - }); - updates.push({ - path: this.addPath(this.changelogPath), - createIfMissing: true, - updater: new changelog_1.Changelog({ - version, - changelogEntry: options.changelogEntry, - }), - }); - updates.push({ - path: this.addPath('package.json'), - createIfMissing: false, - cachedFileContents: this.pkgJsonContents, - updater: new package_json_1.PackageJson({ - version, - }), - }); - // If a machine readable changelog.json exists update it: - if (options.commits && packageName) { - const commits = (0, filter_commits_1.filterCommits)(options.commits, this.changelogSections); - updates.push({ - path: 'changelog.json', - createIfMissing: false, - updater: new changelog_json_1.ChangelogJson({ - artifactName: packageName, - version, - commits, - language: 'JAVASCRIPT', - }), - }); - } - return updates; - } - async getDefaultPackageName() { - const pkgJsonContents = await this.getPkgJsonContents(); - const pkg = JSON.parse(pkgJsonContents.parsedContent); - return pkg.name; - } - normalizeComponent(component) { - if (!component) { - return ''; - } - return component.match(/^@[\w-]+\//) ? component.split('/')[1] : component; - } - async getPkgJsonContents() { - if (!this.pkgJsonContents) { - try { - this.pkgJsonContents = await this.github.getFileContentsOnBranch(this.addPath('package.json'), this.targetBranch); - } - catch (e) { - if (e instanceof errors_1.FileNotFoundError) { - throw new errors_1.MissingRequiredFileError(this.addPath('package.json'), 'node', `${this.repository.owner}/${this.repository.repo}`); - } - throw e; - } - } - return this.pkgJsonContents; - } -} -exports.Node = Node; -//# sourceMappingURL=node.js.map - -/***/ }), - -/***/ 72064: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.OCaml = void 0; -// Generic -const changelog_1 = __nccwpck_require__(3325); -// OCaml -const opam_1 = __nccwpck_require__(4401); -const esy_json_1 = __nccwpck_require__(38500); -const dune_project_1 = __nccwpck_require__(36275); -const base_1 = __nccwpck_require__(95081); -const notEsyLock = (path) => !path.startsWith('esy.lock'); -class OCaml extends base_1.BaseStrategy { - async buildUpdates(options) { - const updates = []; - const version = options.newVersion; - updates.push({ - path: this.addPath(this.changelogPath), - createIfMissing: true, - updater: new changelog_1.Changelog({ - version, - changelogEntry: options.changelogEntry, - }), - }); - const jsonPaths = await this.github.findFilesByExtension('json', this.path); - for (const path of jsonPaths) { - if (notEsyLock(path)) { - const contents = await this.github.getFileContents(this.addPath(path)); - const pkg = JSON.parse(contents.parsedContent); - if (pkg.version !== undefined) { - updates.push({ - path: this.addPath(path), - createIfMissing: false, - cachedFileContents: contents, - updater: new esy_json_1.EsyJson({ - version, - }), - }); - } - } - } - const opamPaths = await this.github.findFilesByExtension('opam', this.path); - opamPaths.filter(notEsyLock).forEach(path => { - updates.push({ - path: this.addPath(path), - createIfMissing: false, - updater: new opam_1.Opam({ - version, - }), - }); - }); - const opamLockedPaths = await this.github.findFilesByExtension('opam.locked', this.path); - opamLockedPaths.filter(notEsyLock).forEach(path => { - updates.push({ - path: this.addPath(path), - createIfMissing: false, - updater: new opam_1.Opam({ - version, - }), - }); - }); - updates.push({ - path: this.addPath('dune-project'), - createIfMissing: false, - updater: new dune_project_1.DuneProject({ - version, - }), - }); - return updates; - } -} -exports.OCaml = OCaml; -//# sourceMappingURL=ocaml.js.map - -/***/ }), - -/***/ 88460: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PHPYoshi = void 0; -const base_1 = __nccwpck_require__(95081); -const changelog_1 = __nccwpck_require__(3325); -const root_composer_update_packages_1 = __nccwpck_require__(45175); -const php_client_version_1 = __nccwpck_require__(50005); -const version_1 = __nccwpck_require__(17348); -const commit_1 = __nccwpck_require__(69158); -const commit_split_1 = __nccwpck_require__(6941); -const default_1 = __nccwpck_require__(69995); -const tag_name_1 = __nccwpck_require__(36503); -const pull_request_title_1 = __nccwpck_require__(1158); -const branch_name_1 = __nccwpck_require__(16344); -const pull_request_body_1 = __nccwpck_require__(70774); -const errors_1 = __nccwpck_require__(93637); -const CHANGELOG_SECTIONS = [ - { type: 'feat', section: 'Features' }, - { type: 'fix', section: 'Bug Fixes' }, - { type: 'perf', section: 'Performance Improvements' }, - { type: 'revert', section: 'Reverts' }, - { type: 'docs', section: 'Documentation' }, - { type: 'misc', section: 'Miscellaneous' }, - { type: 'chore', section: 'Chores', hidden: true }, - { type: 'style', section: 'Styles', hidden: true }, - { type: 'refactor', section: 'Code Refactoring', hidden: true }, - { type: 'test', section: 'Tests', hidden: true }, - { type: 'build', section: 'Build System', hidden: true }, - { type: 'ci', section: 'Continuous Integration', hidden: true }, -]; -class PHPYoshi extends base_1.BaseStrategy { - constructor(options) { - super({ - ...options, - changelogSections: CHANGELOG_SECTIONS, - }); - } - async buildReleasePullRequest(commits, latestRelease, draft, labels = [], bumpOnlyOptions) { - var _a, _b, _c; - const conventionalCommits = await this.postProcessCommits((0, commit_1.parseConventionalCommits)(commits, this.logger)); - if (!bumpOnlyOptions && conventionalCommits.length === 0) { - this.logger.info(`No commits for path: ${this.path}, skipping`); - return undefined; - } - const versionOverrides = {}; - commits.forEach(commit => { - var _a; - Object.entries(parseVersionOverrides(((_a = commit.pullRequest) === null || _a === void 0 ? void 0 : _a.body) || '')).forEach(([directory, version]) => { - versionOverrides[directory] = version; - }); - }); - const newVersion = latestRelease - ? await this.versioningStrategy.bump(latestRelease.tag.version, conventionalCommits) - : this.initialReleaseVersion(); - const cs = new commit_split_1.CommitSplit(); - const splitCommits = cs.split(conventionalCommits); - const topLevelDirectories = Object.keys(splitCommits).sort(); - const versionsMap = new Map(); - const directoryVersionContents = {}; - const component = await this.getComponent(); - const newVersionTag = new tag_name_1.TagName(newVersion, component, this.tagSeparator, this.includeVInTag); - let releaseNotesBody = `## ${newVersion.toString()}`; - for (const directory of topLevelDirectories) { - try { - const contents = await this.github.getFileContentsOnBranch(this.addPath(`${directory}/VERSION`), this.targetBranch); - const composer = await this.github.getFileJson(this.addPath(`${directory}/composer.json`), this.targetBranch); - directoryVersionContents[directory] = { - versionContents: contents, - composer, - }; - const newVersion = versionOverrides[directory] - ? version_1.Version.parse(versionOverrides[directory]) - : await this.versioningStrategy.bump(version_1.Version.parse(contents.parsedContent), splitCommits[directory]); - versionsMap.set(composer.name, newVersion); - const partialReleaseNotes = await this.changelogNotes.buildNotes(splitCommits[directory], { - host: this.changelogHost, - owner: this.repository.owner, - repository: this.repository.repo, - version: newVersion.toString(), - previousTag: (_a = latestRelease === null || latestRelease === void 0 ? void 0 : latestRelease.tag) === null || _a === void 0 ? void 0 : _a.toString(), - currentTag: newVersionTag.toString(), - targetBranch: this.targetBranch, - changelogSections: this.changelogSections, - }); - releaseNotesBody = updatePHPChangelogEntry(`${composer.name} ${newVersion.toString()}`, releaseNotesBody, partialReleaseNotes); - } - catch (err) { - if (err instanceof errors_1.FileNotFoundError) { - // if the updated path has no VERSION, assume this isn't a - // module that needs updating. - continue; - } - else { - throw err; - } - } - } - const pullRequestTitle = pull_request_title_1.PullRequestTitle.ofComponentTargetBranchVersion(component || '', this.targetBranch, newVersion); - const branchName = component - ? branch_name_1.BranchName.ofComponentTargetBranch(component, this.targetBranch) - : branch_name_1.BranchName.ofTargetBranch(this.targetBranch); - const updates = await this.buildUpdates({ - changelogEntry: releaseNotesBody, - newVersion, - versionsMap, - latestVersion: latestRelease === null || latestRelease === void 0 ? void 0 : latestRelease.tag.version, - commits: conventionalCommits, // TODO(@bcoe): these commits will need to be divided into multiple changelog.json updates. - }); - for (const directory in directoryVersionContents) { - const componentInfo = directoryVersionContents[directory]; - const version = versionsMap.get(componentInfo.composer.name); - if (!version) { - this.logger.warn(`No version found for ${componentInfo.composer.name}`); - continue; - } - updates.push({ - path: this.addPath(`${directory}/VERSION`), - createIfMissing: false, - cachedFileContents: componentInfo.versionContents, - updater: new default_1.DefaultUpdater({ - version, - }), - }); - updates.push({ - path: this.addPath(`${directory}/composer.json`), - createIfMissing: false, - updater: new root_composer_update_packages_1.RootComposerUpdatePackages({ - version, - }), - }); - if ((_c = (_b = componentInfo.composer.extra) === null || _b === void 0 ? void 0 : _b.component) === null || _c === void 0 ? void 0 : _c.entry) { - updates.push({ - path: this.addPath(`${directory}/${componentInfo.composer.extra.component.entry}`), - createIfMissing: false, - updater: new php_client_version_1.PHPClientVersion({ - version, - }), - }); - } - } - // TODO use pullrequest header here? - const pullRequestBody = new pull_request_body_1.PullRequestBody([ - { - component, - version: newVersion, - notes: releaseNotesBody, - }, - ]); - return { - title: pullRequestTitle, - body: pullRequestBody, - updates, - labels: [...labels, ...this.extraLabels], - headRefName: branchName.toString(), - version: newVersion, - draft: draft !== null && draft !== void 0 ? draft : false, - }; - } - async parsePullRequestBody(pullRequestBody) { - const body = pull_request_body_1.PullRequestBody.parse(pullRequestBody, this.logger); - if (!body) { - return undefined; - } - const component = await this.getComponent(); - const notes = body.releaseData - .map(release => { - var _a; - return `
${release.component}: ${(_a = release.version) === null || _a === void 0 ? void 0 : _a.toString()}\n\n${release.notes}\n
`; - }) - .join('\n\n'); - return new pull_request_body_1.PullRequestBody([{ component, notes }], { - footer: body.footer, - header: body.header, - }); - } - async buildUpdates(options) { - const updates = []; - const version = options.newVersion; - const versionsMap = options.versionsMap; - updates.push({ - path: this.addPath(this.changelogPath), - createIfMissing: true, - updater: new changelog_1.Changelog({ - version, - changelogEntry: options.changelogEntry, - }), - }); - // update VERSION file - updates.push({ - path: this.addPath('VERSION'), - createIfMissing: false, - updater: new default_1.DefaultUpdater({ - version, - }), - }); - // update the aggregate package information in the root composer.json - updates.push({ - path: this.addPath('composer.json'), - createIfMissing: false, - updater: new root_composer_update_packages_1.RootComposerUpdatePackages({ - version, - versionsMap, - }), - }); - return updates; - } -} -exports.PHPYoshi = PHPYoshi; -function parseVersionOverrides(body) { - // look for 'BEGIN_VERSION_OVERRIDE' section of pull request body - const versionOverrides = {}; - if (body) { - const overrideMessage = (body.split('BEGIN_VERSION_OVERRIDE')[1] || '') - .split('END_VERSION_OVERRIDE')[0] - .trim(); - if (overrideMessage) { - overrideMessage.split('\n').forEach(line => { - const [directory, version] = line.split(':'); - versionOverrides[directory.trim()] = version.trim(); - }); - } - } - return versionOverrides; -} -function updatePHPChangelogEntry(pkgKey, changelogEntry, entryUpdate) { - // Remove the first line of the entry, in favor of . - // This also allows us to use the same regex for extracting release - // notes (since the string "## v0.0.0" doesn't show up multiple times). - const entryUpdateSplit = entryUpdate.split(/\r?\n/); - entryUpdateSplit.shift(); - entryUpdate = entryUpdateSplit.join('\n'); - return `${changelogEntry} - -
${pkgKey} - -${entryUpdate} - -
`; -} -//# sourceMappingURL=php-yoshi.js.map - -/***/ }), - -/***/ 57658: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PHP = void 0; -// Generic -const changelog_1 = __nccwpck_require__(3325); -// PHP Specific. -const root_composer_update_packages_1 = __nccwpck_require__(45175); -const base_1 = __nccwpck_require__(95081); -const default_1 = __nccwpck_require__(69995); -const CHANGELOG_SECTIONS = [ - { type: 'feat', section: 'Features' }, - { type: 'fix', section: 'Bug Fixes' }, - { type: 'perf', section: 'Performance Improvements' }, - { type: 'revert', section: 'Reverts' }, - { type: 'chore', section: 'Miscellaneous Chores' }, - { type: 'docs', section: 'Documentation', hidden: true }, - { type: 'style', section: 'Styles', hidden: true }, - { type: 'refactor', section: 'Code Refactoring', hidden: true }, - { type: 'test', section: 'Tests', hidden: true }, - { type: 'build', section: 'Build System', hidden: true }, - { type: 'ci', section: 'Continuous Integration', hidden: true }, -]; -class PHP extends base_1.BaseStrategy { - constructor(options) { - var _a; - options.changelogSections = (_a = options.changelogSections) !== null && _a !== void 0 ? _a : CHANGELOG_SECTIONS; - super(options); - } - async buildUpdates(options) { - const updates = []; - const version = options.newVersion; - const versionsMap = new Map(); - updates.push({ - path: this.addPath(this.changelogPath), - createIfMissing: true, - updater: new changelog_1.Changelog({ - version, - changelogEntry: options.changelogEntry, - }), - }); - // update composer.json - updates.push({ - path: this.addPath('composer.json'), - createIfMissing: false, - updater: new root_composer_update_packages_1.RootComposerUpdatePackages({ - version, - versionsMap, - }), - }); - // update VERSION file - updates.push({ - path: this.addPath('VERSION'), - createIfMissing: false, - updater: new default_1.DefaultUpdater({ - version, - }), - }); - return updates; - } -} -exports.PHP = PHP; -//# sourceMappingURL=php.js.map - -/***/ }), - -/***/ 32109: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Python = void 0; -const base_1 = __nccwpck_require__(95081); -const changelog_1 = __nccwpck_require__(3325); -const changelog_json_1 = __nccwpck_require__(23719); -const version_1 = __nccwpck_require__(17348); -const setup_cfg_1 = __nccwpck_require__(40483); -const setup_py_1 = __nccwpck_require__(11519); -const pyproject_toml_1 = __nccwpck_require__(89290); -const python_file_with_version_1 = __nccwpck_require__(70464); -const errors_1 = __nccwpck_require__(93637); -const filter_commits_1 = __nccwpck_require__(26498); -const CHANGELOG_SECTIONS = [ - { type: 'feat', section: 'Features' }, - { type: 'fix', section: 'Bug Fixes' }, - { type: 'perf', section: 'Performance Improvements' }, - { type: 'deps', section: 'Dependencies' }, - { type: 'revert', section: 'Reverts' }, - { type: 'docs', section: 'Documentation' }, - { type: 'style', section: 'Styles', hidden: true }, - { type: 'chore', section: 'Miscellaneous Chores', hidden: true }, - { type: 'refactor', section: 'Code Refactoring', hidden: true }, - { type: 'test', section: 'Tests', hidden: true }, - { type: 'build', section: 'Build System', hidden: true }, - { type: 'ci', section: 'Continuous Integration', hidden: true }, -]; -class Python extends base_1.BaseStrategy { - constructor(options) { - var _a; - options.changelogSections = (_a = options.changelogSections) !== null && _a !== void 0 ? _a : CHANGELOG_SECTIONS; - super(options); - } - async buildUpdates(options) { - var _a; - const updates = []; - const version = options.newVersion; - updates.push({ - path: this.addPath(this.changelogPath), - createIfMissing: true, - updater: new changelog_1.Changelog({ - version, - changelogEntry: options.changelogEntry, - }), - }); - updates.push({ - path: this.addPath('setup.cfg'), - createIfMissing: false, - updater: new setup_cfg_1.SetupCfg({ - version, - }), - }); - updates.push({ - path: this.addPath('setup.py'), - createIfMissing: false, - updater: new setup_py_1.SetupPy({ - version, - }), - }); - const parsedPyProject = await this.getPyProject(this.addPath('pyproject.toml')); - const pyProject = (parsedPyProject === null || parsedPyProject === void 0 ? void 0 : parsedPyProject.project) || ((_a = parsedPyProject === null || parsedPyProject === void 0 ? void 0 : parsedPyProject.tool) === null || _a === void 0 ? void 0 : _a.poetry); - let projectName = this.component; - if (pyProject) { - updates.push({ - path: this.addPath('pyproject.toml'), - createIfMissing: false, - updater: new pyproject_toml_1.PyProjectToml({ - version, - }), - }); - projectName = pyProject.name; - } - else { - this.logger.warn(parsedPyProject - ? 'invalid pyproject.toml' - : `file ${this.addPath('pyproject.toml')} did not exist`); - } - if (!projectName) { - this.logger.warn('No project/component found.'); - } - else { - [projectName, projectName.replace(/-/g, '_')] - .flatMap(packageName => [ - `${packageName}/__init__.py`, - `src/${packageName}/__init__.py`, - ]) - .forEach(packagePath => updates.push({ - path: this.addPath(packagePath), - createIfMissing: false, - updater: new python_file_with_version_1.PythonFileWithVersion({ version }), - })); - } - // There should be only one version.py, but foreach in case that is incorrect - const versionPyFilesSearch = this.github.findFilesByFilenameAndRef('version.py', this.targetBranch, this.path); - const versionPyFiles = await versionPyFilesSearch; - versionPyFiles.forEach(path => { - updates.push({ - path: this.addPath(path), - createIfMissing: false, - updater: new python_file_with_version_1.PythonFileWithVersion({ - version, - }), - }); - }); - // If a machine readable changelog.json exists update it: - const artifactName = projectName !== null && projectName !== void 0 ? projectName : (await this.getNameFromSetupPy()); - if (options.commits && artifactName) { - const commits = (0, filter_commits_1.filterCommits)(options.commits, this.changelogSections); - updates.push({ - path: 'changelog.json', - createIfMissing: false, - updater: new changelog_json_1.ChangelogJson({ - artifactName, - version, - commits, - language: 'PYTHON', - }), - }); - } - return updates; - } - async getPyProject(path) { - try { - const content = await this.github.getFileContentsOnBranch(path, this.targetBranch); - return (0, pyproject_toml_1.parsePyProject)(content.parsedContent); - } - catch (e) { - return null; - } - } - async getNameFromSetupPy() { - var _a; - const ARTIFACT_NAME_REGEX = /name *= *['"](?.*)['"](\r|\n|$)/; - const setupPyContents = await this.getSetupPyContents(); - if (setupPyContents) { - const match = setupPyContents.match(ARTIFACT_NAME_REGEX); - if (match && ((_a = match === null || match === void 0 ? void 0 : match.groups) === null || _a === void 0 ? void 0 : _a.name)) { - return match.groups.name; - } - } - return null; - } - async getSetupPyContents() { - try { - return (await this.github.getFileContentsOnBranch(this.addPath('setup.py'), this.targetBranch)).parsedContent; - } - catch (e) { - if (e instanceof errors_1.FileNotFoundError) { - return null; - } - else { - throw e; - } - } - } - initialReleaseVersion() { - return version_1.Version.parse('0.1.0'); - } -} -exports.Python = Python; -//# sourceMappingURL=python.js.map - -/***/ }), - -/***/ 72294: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.RubyYoshi = void 0; -const indent_commit_1 = __nccwpck_require__(13170); -// Generic -const changelog_1 = __nccwpck_require__(3325); -// RubyYoshi -const version_rb_1 = __nccwpck_require__(46979); -const base_1 = __nccwpck_require__(95081); -const fs_1 = __nccwpck_require__(57147); -const path_1 = __nccwpck_require__(71017); -const CHANGELOG_SECTIONS = [ - { type: 'feat', section: 'Features' }, - { type: 'fix', section: 'Bug Fixes' }, - { type: 'perf', section: 'Performance Improvements' }, - { type: 'revert', section: 'Reverts' }, - { type: 'docs', section: 'Documentation' }, - { type: 'style', section: 'Styles', hidden: true }, - { type: 'chore', section: 'Miscellaneous Chores', hidden: true }, - { type: 'refactor', section: 'Code Refactoring', hidden: true }, - { type: 'test', section: 'Tests', hidden: true }, - { type: 'build', section: 'Build System', hidden: true }, - { type: 'ci', section: 'Continuous Integration', hidden: true }, -]; -class RubyYoshi extends base_1.BaseStrategy { - constructor(options) { - var _a; - super({ - ...options, - changelogSections: CHANGELOG_SECTIONS, - commitPartial: (0, fs_1.readFileSync)((0, path_1.resolve)(__dirname, '../../../templates/commit.hbs'), 'utf8'), - headerPartial: (0, fs_1.readFileSync)((0, path_1.resolve)(__dirname, '../../../templates/header.hbs'), 'utf8'), - mainTemplate: (0, fs_1.readFileSync)((0, path_1.resolve)(__dirname, '../../../templates/template.hbs'), 'utf8'), - tagSeparator: '/', - }); - this.versionFile = (_a = options.versionFile) !== null && _a !== void 0 ? _a : ''; - } - async buildUpdates(options) { - const updates = []; - const version = options.newVersion; - updates.push({ - path: this.addPath(this.changelogPath), - createIfMissing: true, - updater: new changelog_1.Changelog({ - version, - changelogEntry: options.changelogEntry, - }), - }); - const versionFile = this.versionFile - ? this.versionFile - : `lib/${(this.component || '').replace(/-/g, '/')}/version.rb`; - updates.push({ - path: this.addPath(versionFile), - createIfMissing: false, - updater: new version_rb_1.VersionRB({ - version, - }), - }); - return updates; - } - async postProcessCommits(commits) { - commits.forEach(commit => { - commit.message = (0, indent_commit_1.indentCommit)(commit); - }); - return commits; - } - async buildReleaseNotes(conventionalCommits, newVersion, newVersionTag, latestRelease, commits) { - const releaseNotes = await super.buildReleaseNotes(conventionalCommits, newVersion, newVersionTag, latestRelease, commits); - return (releaseNotes - // Remove links in version title line and standardize on h3 - .replace(/^###? \[([\d.]+)\]\([^)]*\)/gm, '### $1') - // Remove bolded scope from change lines - .replace(/^\* \*\*[\w-]+:\*\* /gm, '* ') - // Remove PR and commit links from pull request title suffixes - .replace(/(\(\[(\w+)\]\(https:\/\/github\.com\/[^)]*\)\))+\s*$/gm, '') - // Standardize on h4 for change type subheaders - .replace(/^### (Features|Bug Fixes|Documentation)$/gm, '#### $1') - // Collapse 2 or more blank lines - .replace(/\n{3,}/g, '\n\n')); - } -} -exports.RubyYoshi = RubyYoshi; -//# sourceMappingURL=ruby-yoshi.js.map - -/***/ }), - -/***/ 68142: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Ruby = void 0; -const indent_commit_1 = __nccwpck_require__(13170); -// Generic -const changelog_1 = __nccwpck_require__(3325); -// Ruby -const version_rb_1 = __nccwpck_require__(46979); -const gemfile_lock_1 = __nccwpck_require__(72221); -const base_1 = __nccwpck_require__(95081); -class Ruby extends base_1.BaseStrategy { - constructor(options) { - var _a; - super(options); - this.versionFile = (_a = options.versionFile) !== null && _a !== void 0 ? _a : ''; - this.tagSeparator = '/'; - } - async buildUpdates(options) { - const updates = []; - const version = options.newVersion; - updates.push({ - path: this.addPath(this.changelogPath), - createIfMissing: true, - updater: new changelog_1.Changelog({ - version, - changelogEntry: options.changelogEntry, - }), - }); - const versionFile = this.versionFile - ? this.versionFile - : `lib/${(this.component || '').replace(/-/g, '/')}/version.rb`; - updates.push({ - path: this.addPath(versionFile), - createIfMissing: false, - updater: new version_rb_1.VersionRB({ - version, - }), - }); - updates.push({ - path: this.addPath('Gemfile.lock'), - createIfMissing: false, - updater: new gemfile_lock_1.GemfileLock({ - version, - gemName: this.component || '', - }), - }); - return updates; - } - async postProcessCommits(commits) { - commits.forEach(commit => { - commit.message = (0, indent_commit_1.indentCommit)(commit); - }); - return commits; - } -} -exports.Ruby = Ruby; -//# sourceMappingURL=ruby.js.map - -/***/ }), - -/***/ 43066: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Rust = void 0; -// Generic -const changelog_1 = __nccwpck_require__(3325); -// Cargo.toml support -const cargo_toml_1 = __nccwpck_require__(90420); -const cargo_lock_1 = __nccwpck_require__(68875); -const common_1 = __nccwpck_require__(11659); -const base_1 = __nccwpck_require__(95081); -const version_1 = __nccwpck_require__(17348); -class Rust extends base_1.BaseStrategy { - async buildUpdates(options) { - var _a, _b, _c; - const updates = []; - const version = options.newVersion; - updates.push({ - path: this.addPath(this.changelogPath), - createIfMissing: true, - updater: new changelog_1.Changelog({ - version, - changelogEntry: options.changelogEntry, - }), - }); - const workspaceManifest = await this.getPackageManifest(); - const versionsMap = new Map(); - if ((_a = workspaceManifest === null || workspaceManifest === void 0 ? void 0 : workspaceManifest.workspace) === null || _a === void 0 ? void 0 : _a.members) { - const members = workspaceManifest.workspace.members; - if ((_b = workspaceManifest.package) === null || _b === void 0 ? void 0 : _b.name) { - versionsMap.set(workspaceManifest.package.name, version); - } - else { - this.logger.warn('No workspace manifest package name found'); - } - this.logger.info(`found workspace with ${members.length} members, upgrading all`); - // Collect submodule names to update - const manifestsByPath = new Map(); - for (const member of members) { - const manifestPath = `${member}/Cargo.toml`; - const manifestContent = await this.getContent(manifestPath); - if (!manifestContent) { - this.logger.warn(`member ${member} declared but did not find Cargo.toml`); - continue; - } - const manifest = (0, common_1.parseCargoManifest)(manifestContent.parsedContent); - manifestsByPath.set(manifestPath, manifestContent); - if (!((_c = manifest.package) === null || _c === void 0 ? void 0 : _c.name)) { - this.logger.warn(`member ${member} has no package name`); - continue; - } - versionsMap.set(manifest.package.name, version); - } - this.logger.info(`updating ${manifestsByPath.size} submodules`); - this.logger.debug('versions map:', versionsMap); - for (const [manifestPath, manifestContent] of manifestsByPath) { - updates.push({ - path: this.addPath(manifestPath), - createIfMissing: false, - cachedFileContents: manifestContent, - updater: new cargo_toml_1.CargoToml({ - version, - versionsMap, - }), - }); - } - // Update root Cargo.toml - updates.push({ - path: this.addPath('Cargo.toml'), - createIfMissing: false, - updater: new cargo_toml_1.CargoToml({ - version, - versionsMap, - }), - }); - } - else { - this.logger.info('single crate found, updating Cargo.toml'); - const packageName = await this.getDefaultPackageName(); - if (packageName) { - versionsMap.set(packageName, version); - } - else { - this.logger.warn('No crate package name found'); - } - updates.push({ - path: this.addPath('Cargo.toml'), - createIfMissing: false, - updater: new cargo_toml_1.CargoToml({ - version, - versionsMap, - }), - }); - } - updates.push({ - path: this.addPath('Cargo.lock'), - createIfMissing: false, - updater: new cargo_lock_1.CargoLock(versionsMap), - }); - return updates; - } - initialReleaseVersion() { - return version_1.Version.parse('0.1.0'); - } - async getDefaultPackageName() { - var _a; - const packageManifest = await this.getPackageManifest(); - if (packageManifest) { - return (_a = packageManifest.package) === null || _a === void 0 ? void 0 : _a.name; - } - return undefined; - } - /** - * @returns the package's manifest, ie. `crates/foobar/Cargo.toml` - */ - async getPackageManifest() { - if (this.packageManifest === undefined) { - this.packageManifest = await this.getManifest('Cargo.toml'); - } - return this.packageManifest; - } - async getContent(path) { - try { - return await this.github.getFileContentsOnBranch(this.addPath(path), this.targetBranch); - } - catch (e) { - return null; - } - } - async getManifest(path) { - const content = await this.getContent(path); - return content ? (0, common_1.parseCargoManifest)(content.parsedContent) : null; - } -} -exports.Rust = Rust; -//# sourceMappingURL=rust.js.map - -/***/ }), - -/***/ 87648: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Sfdx = void 0; -const base_1 = __nccwpck_require__(95081); -const changelog_1 = __nccwpck_require__(3325); -const errors_1 = __nccwpck_require__(93637); -const sfdx_project_json_1 = __nccwpck_require__(15688); -const sfdxProjectJsonFileName = 'sfdx-project.json'; -class Sfdx extends base_1.BaseStrategy { - async buildUpdates(options) { - const updates = []; - const version = options.newVersion; - updates.push({ - path: this.addPath(this.changelogPath), - createIfMissing: true, - updater: new changelog_1.Changelog({ - version, - changelogEntry: options.changelogEntry, - }), - }); - updates.push({ - path: this.addPath(sfdxProjectJsonFileName), - createIfMissing: false, - cachedFileContents: this.sfdxProjectJsonContents, - updater: new sfdx_project_json_1.SfdxProjectJson({ - version, - }), - }); - return updates; - } - async getDefaultPackageName() { - const pkgJsonContents = await this.getSfdxProjectJsonContents(); - const pkg = JSON.parse(pkgJsonContents.parsedContent); - return pkg.name; - } - async getSfdxProjectJsonContents() { - if (!this.sfdxProjectJsonContents) { - try { - this.sfdxProjectJsonContents = - await this.github.getFileContentsOnBranch(this.addPath(sfdxProjectJsonFileName), this.targetBranch); - } - catch (e) { - if (e instanceof errors_1.FileNotFoundError) { - throw new errors_1.MissingRequiredFileError(this.addPath(sfdxProjectJsonFileName), 'sfdx', `${this.repository.owner}/${this.repository.repo}`); - } - throw e; - } - } - return this.sfdxProjectJsonContents; - } -} -exports.Sfdx = Sfdx; -//# sourceMappingURL=sfdx.js.map - -/***/ }), - -/***/ 10591: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Simple = void 0; -// Generic -const changelog_1 = __nccwpck_require__(3325); -// version.txt support -const base_1 = __nccwpck_require__(95081); -const default_1 = __nccwpck_require__(69995); -class Simple extends base_1.BaseStrategy { - constructor(options) { - var _a; - super(options); - this.versionFile = (_a = options.versionFile) !== null && _a !== void 0 ? _a : 'version.txt'; - } - async buildUpdates(options) { - const updates = []; - const version = options.newVersion; - updates.push({ - path: this.addPath(this.changelogPath), - createIfMissing: true, - updater: new changelog_1.Changelog({ - version, - changelogEntry: options.changelogEntry, - }), - }); - updates.push({ - path: this.addPath(this.versionFile), - createIfMissing: false, - updater: new default_1.DefaultUpdater({ - version, - }), - }); - return updates; - } -} -exports.Simple = Simple; -//# sourceMappingURL=simple.js.map - -/***/ }), - -/***/ 80908: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.TerraformModule = void 0; -// Generic -const changelog_1 = __nccwpck_require__(3325); -// Terraform specific. -const readme_1 = __nccwpck_require__(4996); -const module_version_1 = __nccwpck_require__(19696); -const metadata_version_1 = __nccwpck_require__(54957); -const base_1 = __nccwpck_require__(95081); -const version_1 = __nccwpck_require__(17348); -class TerraformModule extends base_1.BaseStrategy { - async buildUpdates(options) { - const updates = []; - const version = options.newVersion; - updates.push({ - path: this.addPath(this.changelogPath), - createIfMissing: true, - updater: new changelog_1.Changelog({ - version, - changelogEntry: options.changelogEntry, - }), - }); - // Update version in README to current candidate version. - // A module may have submodules, so find all submodules. - const readmeFiles = await Promise.all([ - this.github.findFilesByFilenameAndRef('readme.md', this.targetBranch, this.path), - this.github.findFilesByFilenameAndRef('README.md', this.targetBranch, this.path), - ]).then(([v, vt]) => { - return v.concat(vt); - }); - readmeFiles.forEach(path => { - updates.push({ - path: this.addPath(path), - createIfMissing: false, - updater: new readme_1.ReadMe({ - version, - }), - }); - }); - // Update versions.tf to current candidate version. - // A module may have submodules, so find all versions.tfand versions.tf.tmpl to update. - const versionFiles = await Promise.all([ - this.github.findFilesByFilenameAndRef('versions.tf', this.targetBranch, this.path), - this.github.findFilesByFilenameAndRef('versions.tf.tmpl', this.targetBranch, this.path), - ]).then(([v, vt]) => { - return v.concat(vt); - }); - versionFiles.forEach(path => { - updates.push({ - path: this.addPath(path), - createIfMissing: false, - updater: new module_version_1.ModuleVersion({ - version, - }), - }); - }); - // Update metadata.yaml to current candidate version. - const metadataFiles = await this.github.findFilesByFilenameAndRef('metadata.yaml', this.targetBranch, this.path); - metadataFiles.forEach(path => { - updates.push({ - path: this.addPath(path), - createIfMissing: false, - updater: new metadata_version_1.MetadataVersion({ - version, - }), - }); - }); - return updates; - } - initialReleaseVersion() { - return version_1.Version.parse('0.1.0'); - } -} -exports.TerraformModule = TerraformModule; -//# sourceMappingURL=terraform-module.js.map - -/***/ }), - -/***/ 29481: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.BaseXml = void 0; -const dom = __nccwpck_require__(49213); -/** - * Base class for all updaters working with XML files. - */ -class BaseXml { - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content) { - const document = new dom.DOMParser().parseFromString(content); - const updated = this.updateDocument(document); - if (updated) { - const newContent = new dom.XMLSerializer().serializeToString(document); - if (content.endsWith('\n') && !newContent.endsWith('\n')) { - return `${newContent}\n`; - } - return newContent; - } - else { - return content; - } - } -} -exports.BaseXml = BaseXml; -//# sourceMappingURL=base-xml.js.map - -/***/ }), - -/***/ 75219: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2024 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ModuleBazel = void 0; -const default_1 = __nccwpck_require__(69995); -/** - * Updates a Bazel Module file. - */ -class ModuleBazel extends default_1.DefaultUpdater { - updateContent(content) { - const match = content.match(/module[\s\S]*?\([\s\S]*?version\s*=\s*(['"])(.*?)\1/m); - if (!match) { - return content; - } - const [fullMatch, , version] = match; - const module = fullMatch.replace(version, this.version.toString()); - return content.replace(fullMatch, module); - } -} -exports.ModuleBazel = ModuleBazel; -//# sourceMappingURL=module-bazel.js.map - -/***/ }), - -/***/ 23719: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ChangelogJson = void 0; -const logger_1 = __nccwpck_require__(68809); -const default_1 = __nccwpck_require__(69995); -const crypto_1 = __nccwpck_require__(6113); -const BREAKING_CHANGE_TITLE = 'BREAKING CHANGE'; -const COMMIT_PREFIX = /^[^:]+: ?/; -const PR_SUFFIX_REGEX = / ?\(#(?[0-9]+)\)$/; -/** - * Maintians a machine readable CHANGELOG in chnagelog.json. - * See: https://gist.github.com/bcoe/50ef0a0024bbf107cd5bc0adbdc04758 - */ -class ChangelogJson extends default_1.DefaultUpdater { - /** - * Instantiate a new SamplesPackageJson updater - * @param options - */ - constructor(options) { - super(options); - this.language = options.language; - this.artifactName = options.artifactName; - this.commits = options.commits; - } - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - var _a; - const parsed = JSON.parse(content); - logger.info(`adding release ${this.version} for ${this.artifactName}`); - const changes = []; - for (const commit of this.commits) { - const issues = new Set(); - // The commit.message field contains the type/scope prefix. - let message = commit.message.replace(COMMIT_PREFIX, ''); - // When squashing commits, GitHub adds a suffix refrencing - // the # of the PR, e.g., chore(main): release 15.5.1 (#1838) - // this logic removes this suffix and prepends it to the - // issues array. - const match = message.match(PR_SUFFIX_REGEX); - if (match && ((_a = match.groups) === null || _a === void 0 ? void 0 : _a.pr)) { - message = message.replace(match[0], ''); - issues.add(match.groups.pr); - } - // Array.from(someSet) will maintain elements in insertion - // order, given this we add references after the pr suffix. - for (const ref of commit.references) { - issues.add(ref.issue); - } - const change = { - type: commit.type, - sha: commit.sha, - message: message, - issues: Array.from(issues), - }; - if (commit.scope) - change.scope = commit.scope; - for (const note of commit.notes) { - if (note.title === BREAKING_CHANGE_TITLE) { - change.breakingChangeNote = note.text; - } - } - changes.push(change); - } - // If all commits were ignored, simply return the original changelog.json. - if (changes.length === 0) { - return content; - } - const time = new Date().toISOString(); - const release = { - changes, - version: this.version.toString(), - language: this.language, - artifactName: this.artifactName, - id: (0, crypto_1.randomUUID)(), - createTime: time, - }; - parsed.entries.unshift(release); - parsed.updateTime = time; - return JSON.stringify(parsed, null, 2); - } -} -exports.ChangelogJson = ChangelogJson; -//# sourceMappingURL=changelog-json.js.map - -/***/ }), - -/***/ 3325: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Changelog = void 0; -const default_1 = __nccwpck_require__(69995); -const DEFAULT_VERSION_HEADER_REGEX = '\n###? v?[0-9[]'; -class Changelog extends default_1.DefaultUpdater { - constructor(options) { - var _a; - super(options); - this.changelogEntry = options.changelogEntry; - this.versionHeaderRegex = new RegExp((_a = options.versionHeaderRegex) !== null && _a !== void 0 ? _a : DEFAULT_VERSION_HEADER_REGEX, 's'); - } - updateContent(content) { - content = content || ''; - // Handle both H2 (features/BREAKING CHANGES) and H3 (fixes). - const lastEntryIndex = content.search(this.versionHeaderRegex); - if (lastEntryIndex === -1) { - if (content) { - return `${this.header()}\n${this.changelogEntry}\n\n${adjustHeaders(content).trim()}\n`; - } - else { - return `${this.header()}\n${this.changelogEntry}\n`; - } - } - else { - const before = content.slice(0, lastEntryIndex); - const after = content.slice(lastEntryIndex); - return `${before}\n${this.changelogEntry}\n${after}`.trim() + '\n'; - } - } - header() { - return `\ -# Changelog -`; - } -} -exports.Changelog = Changelog; -// Helper to increase markdown H1 headers to H2 -function adjustHeaders(content) { - return content.replace(/^#(\s)/gm, '##$1'); -} -//# sourceMappingURL=changelog.js.map - -/***/ }), - -/***/ 40911: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.mergeUpdates = exports.CompositeUpdater = void 0; -/** - * The CompositeUpdater chains 0...n updaters and updates - * the content in order. - */ -class CompositeUpdater { - /** - * Instantiate a new CompositeUpdater - * @param {Updater[]} updaters The updaters to chain together - */ - constructor(...updaters) { - this.updaters = updaters; - } - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content) { - for (const updater of this.updaters) { - content = updater.updateContent(content); - } - return content || ''; - } -} -exports.CompositeUpdater = CompositeUpdater; -function mergeUpdates(updates) { - const updatesByPath = {}; - for (const update of updates) { - if (updatesByPath[update.path]) { - updatesByPath[update.path].push(update); - } - else { - updatesByPath[update.path] = [update]; - } - } - const newUpdates = []; - for (const path in updatesByPath) { - const update = updatesByPath[path]; - const updaters = update.map(u => u.updater); - newUpdates.push({ - path, - createIfMissing: update[0].createIfMissing, - updater: updaters.length === 1 ? updaters[0] : new CompositeUpdater(...updaters), - }); - } - return newUpdates; -} -exports.mergeUpdates = mergeUpdates; -//# sourceMappingURL=composite.js.map - -/***/ }), - -/***/ 62861: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PubspecYaml = void 0; -const logger_1 = __nccwpck_require__(68809); -const default_1 = __nccwpck_require__(69995); -/** - * Updates a Dart pubspec.yaml file. - */ -class PubspecYaml extends default_1.DefaultUpdater { - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - const oldVersion = content.match(/^version: ([0-9.]+)\+?(.*$)/m); - let buildNumber = ''; - if (oldVersion) { - buildNumber = oldVersion[2]; - const parsedBuild = parseInt(buildNumber); - if (!isNaN(parsedBuild)) { - buildNumber = `+${parsedBuild + 1}`; - logger.info(`updating from ${oldVersion[1]}+${oldVersion[2]} to ${this.version}${buildNumber}`); - } - else if (buildNumber.length > 0) { - buildNumber = `+${buildNumber}`; - logger.info(`updating from ${oldVersion[1]}+${oldVersion[2]} to ${this.version}${buildNumber}`); - } - else { - logger.info(`updating from ${oldVersion[1]} to ${this.version}`); - } - } - return content.replace(/^version: .*$/m, `version: ${this.version}${buildNumber}`); - } -} -exports.PubspecYaml = PubspecYaml; -//# sourceMappingURL=pubspec-yaml.js.map - -/***/ }), - -/***/ 69995: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DefaultUpdater = void 0; -/** - * This updater writes a plain file with the version string as the - * only content. - */ -class DefaultUpdater { - constructor(options) { - this.version = options.version; - this.versionsMap = options.versionsMap; - } - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(_content) { - return this.version + '\n'; - } -} -exports.DefaultUpdater = DefaultUpdater; -//# sourceMappingURL=default.js.map - -/***/ }), - -/***/ 4856: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Apis = void 0; -const logger_1 = __nccwpck_require__(68809); -const json_stringify_1 = __nccwpck_require__(69227); -/** - * Updates the apis.json format. See - * https://github.com/googleapis/google-cloud-dotnet/blob/main/apis/README.md. - */ -class Apis { - constructor(component, version) { - this.component = component; - this.version = version; - } - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - const data = JSON.parse(content); - const api = data.apis.find(api => api.id === this.component); - if (!api) { - logger.warn(`Failed to find component: ${this.component} in apis.json`); - return content; - } - api.version = this.version.toString(); - return (0, json_stringify_1.jsonStringify)(data, content); - } -} -exports.Apis = Apis; -//# sourceMappingURL=apis.js.map - -/***/ }), - -/***/ 31612: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ElixirMixExs = void 0; -const logger_1 = __nccwpck_require__(68809); -const default_1 = __nccwpck_require__(69995); -/** - * Updates an Elixir mix.exs file and looks for a version string. - */ -class ElixirMixExs extends default_1.DefaultUpdater { - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - const oldModuleAttributeVersion = content.match(/@version "([A-Za-z0-9_\-+.~]+)"/); - if (oldModuleAttributeVersion) { - logger.info(`updating module attribute version from ${oldModuleAttributeVersion[1]} to ${this.version}`); - return content.replace(/@version "[A-Za-z0-9_\-+.~]+"/, `@version "${this.version}"`); - } - const oldInlineVersion = content.match(/version: "([A-Za-z0-9_\-+.~]+)"/); - if (oldInlineVersion) { - logger.info(`updating inline version from ${oldInlineVersion[1]} to ${this.version}`); - } - return content.replace(/version: "[A-Za-z0-9_\-+.~]+",/, `version: "${this.version}",`); - } -} -exports.ElixirMixExs = ElixirMixExs; -//# sourceMappingURL=elixir-mix-exs.js.map - -/***/ }), - -/***/ 20059: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.AppJson = void 0; -const json_stringify_1 = __nccwpck_require__(69227); -const logger_1 = __nccwpck_require__(68809); -const default_1 = __nccwpck_require__(69995); -/** - * This updates a React Natve Expo project app.json file's main, ios and android - * versions. All values except the `android.versionCode` are standard semver - * version numbers. For the `android.versionCode`, the semver number is used as - * the basis for the `versionCode`. - */ -class AppJson extends default_1.DefaultUpdater { - constructor(options) { - super(options); - this.expoSDKVersion = options.expoSDKVersion; - } - /** - * Given initial file contents, return updated contents. - */ - updateContent(content, logger = logger_1.logger) { - var _a, _b; - const parsed = JSON.parse(content); - logger.info(`updating Expo version from ${parsed.expo.version} to ${this.version}`); - parsed.expo.version = this.version.toString(); - if ((_a = parsed.expo.ios) === null || _a === void 0 ? void 0 : _a.buildNumber) { - logger.info(`updating iOS version from ${parsed.expo.ios.buildNumber} to ${this.version}`); - parsed.expo.ios.buildNumber = this.version.toString(); - } - if ((_b = parsed.expo.android) === null || _b === void 0 ? void 0 : _b.versionCode) { - // Android versionCode - // https://developer.android.com/studio/publish/versioning#appversioning - let expoMajorVersion = 0; - try { - expoMajorVersion = this.expoSDKVersion.major; - } - catch (e) { - // Rethrow with a nice error message. - throw new Error('Unable to determine the Expo SDK version for this project. Make sure that the expo package is installed for your project.'); - } - // Implements the `versionCode` strategy described by Maxi Rosson - // @see https://medium.com/@maxirosson/versioning-android-apps-d6ec171cfd82 - const versionCode = expoMajorVersion * 10000000 + - this.version.major * 10000 + - this.version.minor * 100 + - this.version.patch; - logger.info(`updating Android version from ${parsed.expo.android.versionCode} to ${versionCode}`); - parsed.expo.android.versionCode = versionCode; - } - return (0, json_stringify_1.jsonStringify)(parsed, content); - } -} -exports.AppJson = AppJson; -//# sourceMappingURL=app-json.js.map - -/***/ }), - -/***/ 15011: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.GenericJson = void 0; -const jsonpath_plus_1 = __nccwpck_require__(24697); -const json_stringify_1 = __nccwpck_require__(69227); -const logger_1 = __nccwpck_require__(68809); -const VERSION_REGEX = /(?\d+)\.(?\d+)\.(?\d+)(-(?[\w.]+))?(\+(?[-\w.]+))?/; -class GenericJson { - constructor(jsonpath, version) { - this.jsonpath = jsonpath; - this.version = version; - } - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - const data = JSON.parse(content); - (0, jsonpath_plus_1.JSONPath)({ - resultType: 'all', - path: this.jsonpath, - json: data, - callback: (payload, _payloadType, _fullPayload) => { - if (typeof payload.value !== 'string') { - logger.warn(`No string in ${this.jsonpath}. Skipping.`); - return payload; - } - if (!payload.value.match(VERSION_REGEX)) { - logger.warn(`No version found in ${this.jsonpath}. Skipping.`); - return payload; - } - payload.parent[payload.parentProperty] = payload.parent[payload.parentProperty].replace(VERSION_REGEX, this.version.toString()); - return payload; - }, - }); - return (0, json_stringify_1.jsonStringify)(data, content); - } -} -exports.GenericJson = GenericJson; -//# sourceMappingURL=generic-json.js.map - -/***/ }), - -/***/ 53530: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.GenericToml = void 0; -const jsonpath_plus_1 = __nccwpck_require__(24697); -const toml_edit_1 = __nccwpck_require__(30567); -const logger_1 = __nccwpck_require__(68809); -/** - * Updates TOML document according to given JSONPath. - * - * Note that used parser does reformat the document and removes all comments, - * and converts everything to pure TOML. - * If you want to retain formatting, use generic updater with comment hints. - */ -class GenericToml { - constructor(jsonpath, version) { - this.jsonpath = jsonpath; - this.version = version; - } - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - let data; - try { - data = (0, toml_edit_1.parseWith)(content); - } - catch (e) { - logger.warn('Invalid toml, cannot be parsed', e); - return content; - } - const pointers = (0, jsonpath_plus_1.JSONPath)({ - path: this.jsonpath, - json: data, - resultType: 'pointer', - }); - const paths = pointers.map(pointer => pointer.split('/').filter(Boolean)); - if (!paths || paths.length === 0) { - logger.warn(`No entries modified in ${this.jsonpath}`); - return content; - } - let processed = content; - paths.forEach(path => { - if (path[0] === '$') - path = path.slice(1); - processed = (0, toml_edit_1.replaceTomlValue)(processed, path, this.version.toString()); - }); - return processed; - } -} -exports.GenericToml = GenericToml; -//# sourceMappingURL=generic-toml.js.map - -/***/ }), - -/***/ 15591: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.GenericXml = void 0; -const base_xml_1 = __nccwpck_require__(29481); -const xpath = __nccwpck_require__(65319); -class GenericXml extends base_xml_1.BaseXml { - constructor(xpath, version) { - super(); - this.xpath = xpath; - this.version = version; - } - updateDocument(document) { - const version = this.version.toString(); - let updated = false; - for (const node of xpath.select(this.xpath, document)) { - if (node.textContent !== version) { - node.textContent = version; - updated = true; - } - } - return updated; - } -} -exports.GenericXml = GenericXml; -//# sourceMappingURL=generic-xml.js.map - -/***/ }), - -/***/ 61024: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.GenericYaml = void 0; -const jsonpath_plus_1 = __nccwpck_require__(24697); -const yaml = __nccwpck_require__(21917); -const logger_1 = __nccwpck_require__(68809); -const DOCUMENT_SEPARATOR = '---\n'; -/** - * Updates YAML document according to given JSONPath. - * - * Note that used parser does reformat the document and removes all comments, - * and converts everything to pure YAML (even JSON source). - * If you want to retain formatting, use generic updater with comment hints. - * - * When applied on multi-document file, it updates all documents. - */ -class GenericYaml { - constructor(jsonpath, version) { - this.jsonpath = jsonpath; - this.version = version; - } - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - // Parse possibly multi-document file - let docs; - try { - docs = yaml.loadAll(content, null, { json: true }); - } - catch (e) { - logger.warn('Invalid yaml, cannot be parsed', e); - return content; - } - // Update each document - let modified = false; - docs.forEach(data => { - (0, jsonpath_plus_1.JSONPath)({ - resultType: 'all', - path: this.jsonpath, - json: data, - callback: (payload, _payloadType, _fullPayload) => { - if (typeof payload.value !== 'string') { - logger.warn(`No string in ${this.jsonpath}. Skipping.`); - return payload; - } - modified = true; - payload.parent[payload.parentProperty] = this.version.toString(); - return payload; - }, - }); - }); - // If nothing was modified, return original content - if (!modified) { - logger.warn(`No entries modified in ${this.jsonpath}`); - return content; - } - // Stringify documents - if (docs.length === 1) { - // Single doc - return yaml.dump(docs[0]); - } - else { - // Multi-document, each document starts with separator - return docs.map(data => DOCUMENT_SEPARATOR + yaml.dump(data)).join(''); - } - } -} -exports.GenericYaml = GenericYaml; -//# sourceMappingURL=generic-yaml.js.map - -/***/ }), - -/***/ 96323: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Generic = void 0; -const default_1 = __nccwpck_require__(69995); -const logger_1 = __nccwpck_require__(68809); -const VERSION_REGEX = /(?\d+)\.(?\d+)\.(?\d+)(-(?[\w.]+))?(\+(?[-\w.]+))?/; -const SINGLE_VERSION_REGEX = /\b\d+\b/; -const INLINE_UPDATE_REGEX = /x-release-please-(?major|minor|patch|version)/; -const BLOCK_START_REGEX = /x-release-please-start-(?major|minor|patch|version)/; -const BLOCK_END_REGEX = /x-release-please-end/; -/** - * The Generic updater looks for well known patterns and replaces - * content. The well known patterns are: - * - * 1. `x-release-please-version` if this string is found on the line, - * then replace a semver-looking string on that line with the next - * version - * 2. `x-release-please-major` if this string is found on the line, - * then replace an integer looking value with the next version's - * major - * 3. `x-release-please-minor` if this string is found on the line, - * then replace an integer looking value with the next version's - * minor - * 4. `x-release-please-patch` if this string is found on the line, - * then replace an integer looking value with the next version's - * patch - * - * You can also use a block-based replacement. Content between the - * opening `x-release-please-start-version` and `x-release-please-end` will - * be considered for version replacement. You can also open these blocks - * with `x-release-please-start-` to replace single - * numbers - */ -class Generic extends default_1.DefaultUpdater { - constructor(options) { - var _a, _b, _c; - super(options); - this.inlineUpdateRegex = (_a = options.inlineUpdateRegex) !== null && _a !== void 0 ? _a : INLINE_UPDATE_REGEX; - this.blockStartRegex = (_b = options.blockStartRegex) !== null && _b !== void 0 ? _b : BLOCK_START_REGEX; - this.blockEndRegex = (_c = options.blockEndRegex) !== null && _c !== void 0 ? _c : BLOCK_END_REGEX; - } - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - if (!content) { - return ''; - } - const newLines = []; - let blockScope; - function replaceVersion(line, scope, version) { - switch (scope) { - case 'major': - newLines.push(line.replace(SINGLE_VERSION_REGEX, `${version.major}`)); - return; - case 'minor': - newLines.push(line.replace(SINGLE_VERSION_REGEX, `${version.minor}`)); - return; - case 'patch': - newLines.push(line.replace(SINGLE_VERSION_REGEX, `${version.patch}`)); - return; - case 'version': - newLines.push(line.replace(VERSION_REGEX, version.toString())); - return; - default: - logger.warn(`unknown block scope: ${scope}`); - newLines.push(line); - } - } - content.split(/\r?\n/).forEach(line => { - var _a, _b; - let match = line.match(this.inlineUpdateRegex); - if (match) { - // replace inline versions - replaceVersion(line, (((_a = match.groups) === null || _a === void 0 ? void 0 : _a.scope) || 'version'), this.version); - } - else if (blockScope) { - // in a block, so try to replace versions - replaceVersion(line, blockScope, this.version); - if (line.match(this.blockEndRegex)) { - blockScope = undefined; - } - } - else { - // look for block start line - match = line.match(this.blockStartRegex); - if (match) { - if ((_b = match.groups) === null || _b === void 0 ? void 0 : _b.scope) { - blockScope = match.groups.scope; - } - else { - blockScope = 'version'; - } - } - newLines.push(line); - } - }); - return newLines.join('\n'); - } -} -exports.Generic = Generic; -//# sourceMappingURL=generic.js.map - -/***/ }), - -/***/ 54988: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.VersionGo = void 0; -const default_1 = __nccwpck_require__(69995); -class VersionGo extends default_1.DefaultUpdater { - updateContent(content) { - return content.replace(/const Version = "[0-9]+\.[0-9]+\.[0-9](-\w+)?"/, `const Version = "${this.version.toString()}"`); - } -} -exports.VersionGo = VersionGo; -//# sourceMappingURL=version-go.js.map - -/***/ }), - -/***/ 88368: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ChartYaml = void 0; -const yaml = __nccwpck_require__(44083); -const logger_1 = __nccwpck_require__(68809); -const default_1 = __nccwpck_require__(69995); -/** - * Updates a Helm chart.yaml file. - */ -class ChartYaml extends default_1.DefaultUpdater { - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - const chart = yaml.parseDocument(content); - if (chart === null || chart === undefined) { - return ''; - } - const oldVersion = chart.get('version'); - logger.info(`updating from ${oldVersion} to ${this.version}`); - chart.set('version', this.version.toString()); - return chart.toString(); - } -} -exports.ChartYaml = ChartYaml; -//# sourceMappingURL=chart-yaml.js.map - -/***/ }), - -/***/ 16255: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.JavaReleased = void 0; -const generic_1 = __nccwpck_require__(96323); -const INLINE_UPDATE_REGEX = /x-release-please-released-(?major|minor|patch|version)/; -const BLOCK_START_REGEX = /x-release-please-released-start-(?major|minor|patch|version)/; -const BLOCK_END_REGEX = /x-release-please-released-end/; -const REGEX_OPTIONS = { - inlineUpdateRegex: INLINE_UPDATE_REGEX, - blockStartRegex: BLOCK_START_REGEX, - blockEndRegex: BLOCK_END_REGEX, -}; -/** - * The JavaReleased updater is used only when updating to stable (not SNAPSHOT) - * versions. It looks for well known patterns and replaces content. - * The well known patterns are: - * - * 1. `x-release-please-released-version` if this string is found on the line, - * then replace a semver-looking string on that line with the next - * version - * 2. `x-release-please-released-major` if this string is found on the line, - * then replace an integer looking value with the next version's - * major - * 3. `x-release-please-released-minor` if this string is found on the line, - * then replace an integer looking value with the next version's - * minor - * 4. `x-release-please-released-patch` if this string is found on the line, - * then replace an integer looking value with the next version's - * patch - * - * You can also use a block-based replacement. Content between the - * opening `x-release-please-released-start-version` and `x-release-please-released-end` will - * be considered for version replacement. You can also open these blocks - * with `x-release-please-released-start-` to replace single - * numbers - */ -class JavaReleased extends generic_1.Generic { - constructor(options) { - super({ - ...REGEX_OPTIONS, - ...options, - }); - } -} -exports.JavaReleased = JavaReleased; -//# sourceMappingURL=java-released.js.map - -/***/ }), - -/***/ 90276: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.JavaUpdate = void 0; -const default_1 = __nccwpck_require__(69995); -const logger_1 = __nccwpck_require__(68809); -const INLINE_UPDATE_REGEX = /{x-version-update:([\w\-_]+):(current|released)}/; -const BLOCK_START_REGEX = /{x-version-update-start:([\w\-_]+):(current|released)}/; -const BLOCK_END_REGEX = /{x-version-update-end}/; -const VERSION_REGEX = /\d+\.\d+\.\d+(-\w+(\.\d+)?)?(-SNAPSHOT)?/; -/** - * Updates a file annotated with region markers. These region markers are - * either denoted inline with `{x-version-update::current|released}` - * or with a `{x-version-update-start:}` and `{x-version-update-end}`. - */ -class JavaUpdate extends default_1.DefaultUpdater { - constructor(options) { - super(options); - this.isSnapshot = !!options.isSnapshot; - } - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - if (!this.versionsMap) { - logger.warn('missing versions map'); - return content; - } - const newLines = []; - let blockPackageName = null; - content.split(/\r?\n/).forEach(line => { - let match = line.match(INLINE_UPDATE_REGEX); - if (match && (!this.isSnapshot || match[2] === 'current')) { - const newVersion = this.versionsMap.get(match[1]); - if (newVersion) { - newLines.push(line.replace(VERSION_REGEX, newVersion.toString())); - } - else { - newLines.push(line); - } - } - else if (blockPackageName) { - const newVersion = this.versionsMap.get(blockPackageName); - if (newVersion) { - newLines.push(line.replace(VERSION_REGEX, newVersion.toString())); - } - else { - newLines.push(line); - } - if (line.match(BLOCK_END_REGEX)) { - blockPackageName = null; - } - } - else { - match = line.match(BLOCK_START_REGEX); - if (match && (!this.isSnapshot || match[2] === 'current')) { - blockPackageName = match[1]; - } - newLines.push(line); - } - }); - return newLines.join('\n'); - } -} -exports.JavaUpdate = JavaUpdate; -//# sourceMappingURL=java-update.js.map - -/***/ }), - -/***/ 60255: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.parseDependencyNode = exports.PomXml = void 0; -const base_xml_1 = __nccwpck_require__(29481); -const xpath = __nccwpck_require__(65319); -const XPATH_PROJECT_VERSION = '/*[local-name()="project"]/*[local-name()="version"]'; -const XPATH_PROJECT_PARENT_VERSION = '/*[local-name()="project"]/*[local-name()="parent"]/*[local-name()="version"]'; -const XPATH_PROJECT_DEPENDENCIES = '/*[local-name()="project"]/*[local-name()="dependencies"]/*[local-name()="dependency"]'; -const XPATH_PROJECT_DEPENDENCY_MANAGEMENT_DEPENDENCIES = '/*[local-name()="project"]/*[local-name()="dependencyManagement"]/*[local-name()="dependencies"]/*[local-name()="dependency"]'; -/** - * Updates version pom.xml files. - * - * If present it updates project.version element. - * If project.version is not present, it updates project.parent.version. - */ -class PomXml extends base_xml_1.BaseXml { - constructor(version, dependencyVersions) { - super(); - this.version = version; - this.dependencyVersions = dependencyVersions; - } - updateDocument(document) { - // NOTE this intentionally ignores namespaces - let the maven decide, what's valid and what's not - const updates = []; - // Update project.version - const projectVersionNodes = xpath.select(XPATH_PROJECT_VERSION, document); - if (projectVersionNodes.length) { - // If found update, detect actual change - updates.push({ - nodes: projectVersionNodes, - version: this.version, - }); - } - else { - // Try updating project.parent.version - const parentVersionNodes = xpath.select(XPATH_PROJECT_PARENT_VERSION, document); - updates.push({ - nodes: parentVersionNodes, - version: this.version, - }); - } - if (this.dependencyVersions) { - updates.push(...this.dependencyUpdates(document, this.dependencyVersions)); - } - let updated = false; - for (const { nodes, version } of updates) { - updated = PomXml.updateNodes(nodes, version.toString()) || updated; - } - return updated; - } - dependencyUpdates(document, updatedVersions) { - const updates = []; - const dependencyNodes = xpath.select(XPATH_PROJECT_DEPENDENCIES, document); - const dependencyManagementNodes = xpath.select(XPATH_PROJECT_DEPENDENCY_MANAGEMENT_DEPENDENCIES, document); - // try to update dependency versions - for (const [name, version] of updatedVersions.entries()) { - // look under: - // - project/dependencies - // - project/dependencyManagement/dependencies - const [groupId, artifactId] = name.split(':'); - for (const nodeGroup of [dependencyNodes, dependencyManagementNodes]) { - const nodes = nodeGroup.reduce((collection, node) => { - const dependencyNode = parseDependencyNode(node); - if (dependencyNode.groupId === groupId && - dependencyNode.artifactId === artifactId && - dependencyNode.version !== version.toString() && - dependencyNode.versionNode) { - collection.push(dependencyNode.versionNode); - } - return collection; - }, []); - if (nodes.length) { - updates.push({ - name, - nodes, - version, - }); - } - } - } - return updates; - } - static updateNodes(nodes, value) { - const toUpdate = nodes.filter(node => node.textContent !== value); - toUpdate.forEach(node => (node.textContent = value)); - return toUpdate.length > 0; - } -} -exports.PomXml = PomXml; -function parseDependencyNode(node) { - var _a, _b, _c, _d; - let groupId = ''; - let artifactId = ''; - let scope; - let version; - let versionNode; - for (let i = 0; i < node.childNodes.length; i++) { - const childNode = node.childNodes.item(i); - if (childNode.nodeName === 'groupId') { - groupId = ((_a = childNode.firstChild) === null || _a === void 0 ? void 0 : _a.textContent) || ''; - } - else if (childNode.nodeName === 'artifactId') { - artifactId = ((_b = childNode.firstChild) === null || _b === void 0 ? void 0 : _b.textContent) || ''; - } - else if (childNode.nodeName === 'scope') { - scope = ((_c = childNode.firstChild) === null || _c === void 0 ? void 0 : _c.textContent) || ''; - } - else if (childNode.nodeName === 'version') { - version = ((_d = childNode.firstChild) === null || _d === void 0 ? void 0 : _d.textContent) || ''; - versionNode = childNode; - } - } - return { - groupId, - artifactId, - scope, - version, - versionNode, - }; -} -exports.parseDependencyNode = parseDependencyNode; -//# sourceMappingURL=pom-xml.js.map - -/***/ }), - -/***/ 78345: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.VersionsManifest = void 0; -const java_update_1 = __nccwpck_require__(90276); -const version_1 = __nccwpck_require__(17348); -const logger_1 = __nccwpck_require__(68809); -/** - * Updates a versions.txt file which contains current versions of - * components within a Java repo. - * @see https://github.com/googleapis/java-asset/blob/main/versions.txt - */ -class VersionsManifest extends java_update_1.JavaUpdate { - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - if (!this.versionsMap) { - logger.warn('missing versions map'); - return content; - } - let newContent = content; - this.versionsMap.forEach((version, packageName) => { - newContent = this.updateSingleVersion(newContent, packageName, version.toString()); - }); - return newContent; - } - updateSingleVersion(content, packageName, version) { - const newLines = []; - content.split(/\r?\n/).forEach(line => { - if (version.includes('SNAPSHOT')) { - newLines.push(line.replace(new RegExp(`${packageName}:(.*):(.*)`, 'g'), `${packageName}:$1:${version}`)); - } - else { - newLines.push(line.replace(new RegExp(`${packageName}:(.*):(.*)`, 'g'), `${packageName}:${version}:${version}`)); - } - }); - return newLines.join('\n'); - } - static parseVersions(content) { - const versions = new Map(); - content.split(/\r?\n/).forEach(line => { - const match = line.match(/^([\w\-_]+):([^:]+):([^:]+)/); - if (match) { - versions.set(match[1], version_1.Version.parse(match[2])); - } - }); - return versions; - } - static needsSnapshot(content) { - return !content.split(/\r?\n/).some(line => { - return !!line.match(/^[\w\-_]+:.+:.+-SNAPSHOT/); - }); - } -} -exports.VersionsManifest = VersionsManifest; -//# sourceMappingURL=versions-manifest.js.map - -/***/ }), - -/***/ 17490: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.KRMBlueprintVersion = void 0; -const logger_1 = __nccwpck_require__(68809); -const default_1 = __nccwpck_require__(69995); -/** - * Updates KMR blueprint yaml file. - */ -class KRMBlueprintVersion extends default_1.DefaultUpdater { - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - var _a; - // js-yaml(and kpt TS SDK) does not preserve comments hence regex match - // match starting cnrm/ ending with semver to prevent wrong updates like pinned config.kubernetes.io/function - let matchRegex = '(cnrm/.*/)(v[0-9]+.[0-9]+.[0-9]+)+(-w+)?'; - // if explicit previous version, match only that version - if ((_a = this.versionsMap) === null || _a === void 0 ? void 0 : _a.has('previousVersion')) { - matchRegex = `(cnrm/.*/)(v${this.versionsMap.get('previousVersion')})+(-w+)?`; - } - const oldVersion = content.match(new RegExp(matchRegex)); - if (oldVersion) { - logger.info(`updating from ${oldVersion[2]} to v${this.version}`); - } - const newVersion = content.replace(new RegExp(matchRegex, 'g'), `$1v${this.version}`); - return newVersion; - } -} -exports.KRMBlueprintVersion = KRMBlueprintVersion; -//# sourceMappingURL=krm-blueprint-version.js.map - -/***/ }), - -/***/ 26588: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.updateDependencies = exports.NPM_PROTOCOL_REGEXP = exports.newVersionWithRange = exports.PackageJson = void 0; -const json_stringify_1 = __nccwpck_require__(69227); -const logger_1 = __nccwpck_require__(68809); -const default_1 = __nccwpck_require__(69995); -/** - * This updates a Node.js package.json file's main version. - */ -class PackageJson extends default_1.DefaultUpdater { - constructor(options) { - super(options); - this.updatePeerDependencies = false; - this.updatePeerDependencies = options.updatePeerDependencies || false; - } - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @param logger - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - const parsed = JSON.parse(content); - logger.info(`updating from ${parsed.version} to ${this.version}`); - parsed.version = this.version.toString(); - // If additional dependency versions specified, then update dependency versions - // while preserving any valid version range prefixes. - if (this.versionsMap) { - if (parsed.dependencies) { - updateDependencies(parsed.dependencies, this.versionsMap); - } - if (parsed.devDependencies) { - updateDependencies(parsed.devDependencies, this.versionsMap); - } - if (parsed.peerDependencies && this.updatePeerDependencies) { - updateDependencies(parsed.peerDependencies, this.versionsMap); - } - if (parsed.optionalDependencies) { - updateDependencies(parsed.optionalDependencies, this.versionsMap); - } - } - return (0, json_stringify_1.jsonStringify)(parsed, content); - } -} -exports.PackageJson = PackageJson; -var SUPPORTED_RANGE_PREFIXES; -(function (SUPPORTED_RANGE_PREFIXES) { - SUPPORTED_RANGE_PREFIXES["CARET"] = "^"; - SUPPORTED_RANGE_PREFIXES["TILDE"] = "~"; - SUPPORTED_RANGE_PREFIXES["EQUAL_OR_GREATER_THAN"] = ">="; - SUPPORTED_RANGE_PREFIXES["EQUAL_OR_LESS_THAN"] = "<="; - SUPPORTED_RANGE_PREFIXES["GREATER_THAN"] = ">"; - SUPPORTED_RANGE_PREFIXES["LESS_THAN"] = "<"; -})(SUPPORTED_RANGE_PREFIXES || (SUPPORTED_RANGE_PREFIXES = {})); -function detectRangePrefix(version) { - return (Object.values(SUPPORTED_RANGE_PREFIXES).find(supportedRangePrefix => version.startsWith(supportedRangePrefix)) || ''); -} -/** - * Helper to coerce a new version value into a version range that preserves the - * version range prefix of the original version. - * @param {string} oldVersion Old semver with range - * @param {Version} newVersion The new version to update with - */ -function newVersionWithRange(oldVersion, newVersion) { - const prefix = detectRangePrefix(oldVersion); - if (prefix) { - return `${prefix}${newVersion}`; - } - return newVersion.toString(); -} -exports.newVersionWithRange = newVersionWithRange; -exports.NPM_PROTOCOL_REGEXP = /^[a-z]+:/; -/** - * Helper function to update dependency versions for all new versions specified - * in the updated versions map. Note that this mutates the existing input. - * @param {Record} dependencies Entries in package.json dependencies - * where the key is the dependency name and the value is the dependency range - * @param {VersionsMap} updatedVersions Map of new versions (without dependency range prefixes) - */ -function updateDependencies(dependencies, updatedVersions) { - for (const depName of Object.keys(dependencies)) { - const oldVersion = dependencies[depName]; - if (exports.NPM_PROTOCOL_REGEXP.test(oldVersion)) { - continue; - } - const newVersion = updatedVersions.get(depName); - if (newVersion) { - dependencies[depName] = newVersionWithRange(oldVersion, newVersion); - } - } -} -exports.updateDependencies = updateDependencies; -//# sourceMappingURL=package-json.js.map - -/***/ }), - -/***/ 23443: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PackageLockJson = void 0; -const json_stringify_1 = __nccwpck_require__(69227); -const logger_1 = __nccwpck_require__(68809); -const package_json_1 = __nccwpck_require__(26588); -/** - * Updates a Node.js package-lock.json file's version and '' package - * version (for a v2 lock file). - */ -class PackageLockJson { - constructor(options) { - this.version = options.version; - this.versionsMap = options.versionsMap; - } - updateContent(content, logger = logger_1.logger) { - const parsed = JSON.parse(content); - if (this.version) { - logger.info(`updating from ${parsed.version} to ${this.version}`); - parsed.version = this.version.toString(); - } - if (parsed.lockfileVersion === 2 || parsed.lockfileVersion === 3) { - if (this.version) { - parsed.packages[''].version = this.version.toString(); - } - if (this.versionsMap) { - this.versionsMap.forEach((version, name) => { - let pkg = parsed.packages['node_modules/' + name]; - if (!pkg) { - return; - } - // @see https://docs.npmjs.com/cli/v10/configuring-npm/package-lock-json#packages - if (pkg.link && pkg.resolved) { - pkg = parsed.packages[pkg.resolved]; - if (!pkg) { - return; - } - } - pkg.version = version.toString(); - if (pkg.dependencies) { - (0, package_json_1.updateDependencies)(pkg.dependencies, this.versionsMap); - } - if (pkg.devDependencies) { - (0, package_json_1.updateDependencies)(pkg.devDependencies, this.versionsMap); - } - if (pkg.peerDependencies) { - (0, package_json_1.updateDependencies)(pkg.peerDependencies, this.versionsMap); - } - if (pkg.optionalDependencies) { - (0, package_json_1.updateDependencies)(pkg.optionalDependencies, this.versionsMap); - } - }); - } - } - if (this.versionsMap) { - for (const [, obj] of Object.entries(parsed.packages)) { - if (!obj.name) { - continue; - } - const ver = this.versionsMap.get(obj.name); - if (ver) { - obj.version = ver.toString(); - } - } - } - return (0, json_stringify_1.jsonStringify)(parsed, content); - } -} -exports.PackageLockJson = PackageLockJson; -//# sourceMappingURL=package-lock-json.js.map - -/***/ }), - -/***/ 68530: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.SamplesPackageJson = void 0; -const logger_1 = __nccwpck_require__(68809); -const json_stringify_1 = __nccwpck_require__(69227); -const default_1 = __nccwpck_require__(69995); -/** - * Updates the a Node.js package.json file with the library in the - * dependencies section. - */ -class SamplesPackageJson extends default_1.DefaultUpdater { - /** - * Instantiate a new SamplesPackageJson updater - * @param options - */ - constructor(options) { - super(options); - this.packageName = options.packageName; - } - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - const parsed = JSON.parse(content); - if (!parsed.dependencies || !parsed.dependencies[this.packageName]) { - return content; - } - logger.info(`updating ${this.packageName} dependency from ${parsed.dependencies[this.packageName]} to ^${this.version}`); - parsed.dependencies[this.packageName] = `^${this.version}`; - return (0, json_stringify_1.jsonStringify)(parsed, content); - } -} -exports.SamplesPackageJson = SamplesPackageJson; -//# sourceMappingURL=samples-package-json.js.map - -/***/ }), - -/***/ 36275: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DuneProject = void 0; -const logger_1 = __nccwpck_require__(68809); -const default_1 = __nccwpck_require__(69995); -/** - * Updates an OCaml dune-project file. - */ -class DuneProject extends default_1.DefaultUpdater { - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - const oldVersion = content.match(/^\(version ([A-Za-z0-9_\-+.~]+)\)$/m); - if (oldVersion) { - logger.info(`updating from ${oldVersion[1]} to ${this.version}`); - } - return content.replace(/^\(version ([A-Za-z0-9_\-+.~]+)\)$/m, `(version ${this.version})`); - } -} -exports.DuneProject = DuneProject; -//# sourceMappingURL=dune-project.js.map - -/***/ }), - -/***/ 38500: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.EsyJson = void 0; -const logger_1 = __nccwpck_require__(68809); -const json_stringify_1 = __nccwpck_require__(69227); -const default_1 = __nccwpck_require__(69995); -/** - * Updates an OCaml esy.json file. - */ -class EsyJson extends default_1.DefaultUpdater { - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - const parsed = JSON.parse(content); - logger.info(`updating from ${parsed.version} to ${this.version}`); - parsed.version = this.version.toString(); - return (0, json_stringify_1.jsonStringify)(parsed, content); - } -} -exports.EsyJson = EsyJson; -//# sourceMappingURL=esy-json.js.map - -/***/ }), - -/***/ 4401: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Opam = void 0; -const logger_1 = __nccwpck_require__(68809); -const default_1 = __nccwpck_require__(69995); -/** - * Updates an OCaml .opam file - */ -class Opam extends default_1.DefaultUpdater { - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - const oldVersion = content.match(/^version: "([A-Za-z0-9_\-+.~]+)"$/m); - if (oldVersion) { - logger.info(`updating from ${oldVersion[1]} to ${this.version}`); - } - return content.replace(/^version: "[A-Za-z0-9_\-+.~]+"$/m, `version: "${this.version}"`); - } -} -exports.Opam = Opam; -//# sourceMappingURL=opam.js.map - -/***/ }), - -/***/ 50005: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PHPClientVersion = void 0; -const default_1 = __nccwpck_require__(69995); -/** - * Updates a php file that has a constant VERSION defined. - */ -class PHPClientVersion extends default_1.DefaultUpdater { - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content) { - return content.replace(/const VERSION = '[0-9]+\.[0-9]+\.[0-9]+'/, `const VERSION = '${this.version}'`); - } -} -exports.PHPClientVersion = PHPClientVersion; -//# sourceMappingURL=php-client-version.js.map - -/***/ }), - -/***/ 45175: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.RootComposerUpdatePackages = void 0; -const logger_1 = __nccwpck_require__(68809); -const json_stringify_1 = __nccwpck_require__(69227); -const default_1 = __nccwpck_require__(69995); -/** - * Updates a root composer.json - */ -class RootComposerUpdatePackages extends default_1.DefaultUpdater { - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - if (!this.version && (!this.versionsMap || this.versionsMap.size === 0)) { - logger.info('no updates necessary'); - return content; - } - const parsed = JSON.parse(content); - if (parsed['version']) { - const fromVersion = parsed['version']; - const toVersion = this.version.toString() || '1.0.0'; - parsed['version'] = toVersion; - logger.info(`updating "version" from ${fromVersion} to ${toVersion}`); - } - if (this.versionsMap) { - for (const [key, version] of this.versionsMap.entries()) { - const toVersion = version.toString() || '1.0.0'; - let fromVersion; - if (parsed.replace) { - fromVersion = parsed.replace[key]; - parsed.replace[key] = toVersion; - } - if (parsed[key]) { - fromVersion !== null && fromVersion !== void 0 ? fromVersion : (fromVersion = parsed[key]); - parsed[key] = toVersion; - } - logger.info(`updating ${key} from ${fromVersion} to ${toVersion}`); - } - } - return (0, json_stringify_1.jsonStringify)(parsed, content); - } -} -exports.RootComposerUpdatePackages = RootComposerUpdatePackages; -//# sourceMappingURL=root-composer-update-packages.js.map - -/***/ }), - -/***/ 89290: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PyProjectToml = exports.parsePyProject = void 0; -const TOML = __nccwpck_require__(62901); -const logger_1 = __nccwpck_require__(68809); -const toml_edit_1 = __nccwpck_require__(30567); -const default_1 = __nccwpck_require__(69995); -function parsePyProject(content) { - return TOML.parse(content); -} -exports.parsePyProject = parsePyProject; -/** - * Updates a pyproject.toml file - */ -class PyProjectToml extends default_1.DefaultUpdater { - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - var _a; - const parsed = parsePyProject(content); - const project = parsed.project || ((_a = parsed.tool) === null || _a === void 0 ? void 0 : _a.poetry); - if (!(project === null || project === void 0 ? void 0 : project.version)) { - // Throw warning if the version is dynamically generated. - if ((project === null || project === void 0 ? void 0 : project.dynamic) && project.dynamic.includes('version')) { - const msg = "dynamic version found in 'pyproject.toml'. Skipping update."; - logger.warn(msg); - return content; - } - const msg = 'invalid file'; - logger.error(msg); - throw new Error(msg); - } - return (0, toml_edit_1.replaceTomlValue)(content, (parsed.project ? ['project'] : ['tool', 'poetry']).concat('version'), this.version.toString()); - } -} -exports.PyProjectToml = PyProjectToml; -//# sourceMappingURL=pyproject-toml.js.map - -/***/ }), - -/***/ 70464: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PythonFileWithVersion = void 0; -const default_1 = __nccwpck_require__(69995); -/** - * Python file with a __version__ property (or attribute, or whatever). - */ -class PythonFileWithVersion extends default_1.DefaultUpdater { - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content) { - return content.replace(/(__version__ ?= ?["'])[0-9]+\.[0-9]+\.[0-9]+(?:-\w+)?(["'])/, `$1${this.version}$2`); - } -} -exports.PythonFileWithVersion = PythonFileWithVersion; -//# sourceMappingURL=python-file-with-version.js.map - -/***/ }), - -/***/ 40483: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.SetupCfg = void 0; -const default_1 = __nccwpck_require__(69995); -/** - * Updates a setup.cfg file - */ -class SetupCfg extends default_1.DefaultUpdater { - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content) { - return content.replace(/(version ?= ?)[0-9]+\.[0-9]+\.[0-9]+(?:-\w+)?/, `$1${this.version}`); - } -} -exports.SetupCfg = SetupCfg; -//# sourceMappingURL=setup-cfg.js.map - -/***/ }), - -/***/ 11519: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.SetupPy = void 0; -const default_1 = __nccwpck_require__(69995); -/** - * Updates a setup.py file. - */ -class SetupPy extends default_1.DefaultUpdater { - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content) { - return content.replace(/(version ?= ?["'])[0-9]+\.[0-9]+\.[0-9]+(?:-\w+)?(["'])/, `$1${this.version}$2`); - } -} -exports.SetupPy = SetupPy; -//# sourceMappingURL=setup-py.js.map - -/***/ }), - -/***/ 62648: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.RawContent = void 0; -/** - * This updater ignores previous content and writes the provided - * content verbatim. - */ -class RawContent { - /** - * Create a new RawContent instance - * @param {string} rawContent The raw content to set as the contents. - */ - constructor(rawContent) { - this.rawContent = rawContent; - } - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(_content) { - return this.rawContent; - } -} -exports.RawContent = RawContent; -//# sourceMappingURL=raw-content.js.map - -/***/ }), - -/***/ 9817: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ReleasePleaseManifest = void 0; -const json_stringify_1 = __nccwpck_require__(69227); -const default_1 = __nccwpck_require__(69995); -class ReleasePleaseManifest extends default_1.DefaultUpdater { - updateContent(content) { - const parsed = content ? JSON.parse(content) : {}; - for (const [path, version] of this.versionsMap) { - parsed[path] = version.toString(); - } - if (content) { - return (0, json_stringify_1.jsonStringify)(parsed, content); - } - else { - return JSON.stringify(parsed, null, 2); - } - } -} -exports.ReleasePleaseManifest = ReleasePleaseManifest; -//# sourceMappingURL=release-please-manifest.js.map - -/***/ }), - -/***/ 37022: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.resolveRubyGemfileLockVersion = exports.stringifyRubyVersion = exports.RUBY_VERSION_REGEX = void 0; -// Ruby gem semver strings using `.` seperator for prereleases rather then `-` -// See https://guides.rubygems.org/patterns/ -exports.RUBY_VERSION_REGEX = /((\d+).(\d)+.(\d+)(.\w+.*)?)/g; -/** - * Stringify a version to a ruby compatible version string - * - * @param version The version to stringify - * @param useDotPrePreleaseSeperator Use a `.` seperator for prereleases rather then `-` - * @returns a ruby compatible version string - */ -function stringifyRubyVersion(version, useDotPrePreleaseSeperator = false) { - if (!useDotPrePreleaseSeperator) { - return version.toString(); - } - return `${version.major}.${version.minor}.${version.patch}${version.preRelease ? `.${version.preRelease}` : ''}`; -} -exports.stringifyRubyVersion = stringifyRubyVersion; -/** - * This function mimics Gem::Version parsing of version semver strings - * - * @param versionString The version string to resolve - * @returns A Gem::Version compatible version string - */ -function resolveRubyGemfileLockVersion(versionString) { - // Replace `-` with `.pre.` as per ruby gem parsing - // See https://github.com/rubygems/rubygems/blob/master/lib/rubygems/version.rb#L229 - return versionString.replace(/-/g, '.pre.'); -} -exports.resolveRubyGemfileLockVersion = resolveRubyGemfileLockVersion; -//# sourceMappingURL=common.js.map - -/***/ }), - -/***/ 72221: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.GemfileLock = exports.buildGemfileLockVersionRegex = void 0; -const default_1 = __nccwpck_require__(69995); -const common_1 = __nccwpck_require__(37022); -/** - * Builds a regex matching a gem version in a Gemfile.lock file. - * @example - * rails (7.0.1) - * rails (7.0.1.alpha1) - */ -function buildGemfileLockVersionRegex(gemName) { - return new RegExp(`s*${gemName} \\(${common_1.RUBY_VERSION_REGEX.source}\\)`); -} -exports.buildGemfileLockVersionRegex = buildGemfileLockVersionRegex; -/** - * Updates a Gemfile.lock files which is expected to have a local path version string. - */ -class GemfileLock extends default_1.DefaultUpdater { - constructor(options) { - super(options); - this.gemName = options.gemName; - } - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content) { - if (!this.gemName) { - return content; - } - // Bundler will convert 1.0.0-alpha1 to 1.0.0.pre.alpha1, so we need to - // do the same here. - const versionString = (0, common_1.resolveRubyGemfileLockVersion)(this.version.toString()); - return content.replace(buildGemfileLockVersionRegex(this.gemName), `${this.gemName} (${versionString})`); - } -} -exports.GemfileLock = GemfileLock; -//# sourceMappingURL=gemfile-lock.js.map - -/***/ }), - -/***/ 46979: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.VersionRB = void 0; -const default_1 = __nccwpck_require__(69995); -const common_1 = __nccwpck_require__(37022); -const RUBY_VERSION_RB_REGEX = new RegExp(`(["'])(${common_1.RUBY_VERSION_REGEX.source})(["'])`); -/** - * Updates a versions.rb file which is expected to have a version string. - */ -class VersionRB extends default_1.DefaultUpdater { - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content) { - return content.replace(RUBY_VERSION_RB_REGEX, `$1${(0, common_1.stringifyRubyVersion)(this.version)}$1`); - } -} -exports.VersionRB = VersionRB; -//# sourceMappingURL=version-rb.js.map - -/***/ }), - -/***/ 68875: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.CargoLock = void 0; -const toml_edit_1 = __nccwpck_require__(30567); -const common_1 = __nccwpck_require__(11659); -const logger_1 = __nccwpck_require__(68809); -/** - * Updates `Cargo.lock` lockfiles, preserving formatting and comments. - */ -class CargoLock { - constructor(versionsMap) { - this.versionsMap = versionsMap; - } - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - let payload = content; - const parsed = (0, common_1.parseCargoLockfile)(payload); - if (!parsed.package) { - logger.error('is not a Cargo lockfile'); - throw new Error('is not a Cargo lockfile'); - } - // n.b for `replaceTomlString`, we need to keep track of the index - // (position) of the package we're considering. - for (let i = 0; i < parsed.package.length; i++) { - const pkg = parsed.package[i]; - if (!pkg.name) { - // all `[[package]]` entries should have a name, - // but if they don't, ignore them silently. - continue; // to next package - } - const nextVersion = this.versionsMap.get(pkg.name); - if (!nextVersion) { - // this package is not upgraded. - continue; // to next package - } - // note: in ECMAScript, using strings to index arrays is perfectly valid, - // which is lucky because `replaceTomlString` expect "all strings" in its - // `path` argument. - const packageIndex = i.toString(); - logger.info(`updating ${pkg.name} in`); - payload = (0, toml_edit_1.replaceTomlValue)(payload, ['package', packageIndex, 'version'], nextVersion.toString()); - } - return payload; - } -} -exports.CargoLock = CargoLock; -//# sourceMappingURL=cargo-lock.js.map - -/***/ }), - -/***/ 90420: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.CargoToml = void 0; -const toml_edit_1 = __nccwpck_require__(30567); -const common_1 = __nccwpck_require__(11659); -const logger_1 = __nccwpck_require__(68809); -const default_1 = __nccwpck_require__(69995); -/** - * Updates `Cargo.toml` manifests, preserving formatting and comments. - */ -class CargoToml extends default_1.DefaultUpdater { - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - let payload = content; - if (!this.versionsMap) { - throw new Error('updateContent called with no versions'); - } - const parsed = (0, common_1.parseCargoManifest)(payload); - if (!parsed.package) { - const msg = 'is not a package manifest (might be a cargo workspace)'; - logger.error(msg); - throw new Error(msg); - } - payload = (0, toml_edit_1.replaceTomlValue)(payload, ['package', 'version'], this.version.toString()); - for (const [pkgName, pkgVersion] of this.versionsMap) { - for (const depKind of common_1.DEP_KINDS) { - const deps = parsed[depKind]; - if (!deps) { - continue; // to next depKind - } - if (!deps[pkgName]) { - continue; // to next depKind - } - const dep = deps[pkgName]; - if (typeof dep === 'string' || typeof dep.path === 'undefined') { - logger.info(`skipping ${depKind}.${pkgName} (no path set)`); - continue; // to next depKind - } - if (typeof dep.version === 'undefined') { - logger.info(`skipping ${depKind}.${pkgName} (no version set)`); - continue; // to next depKind - } - logger.info(`updating ${depKind}.${pkgName} from ${dep.version} to ${pkgVersion}`); - payload = (0, toml_edit_1.replaceTomlValue)(payload, [depKind, pkgName, 'version'], pkgVersion.toString()); - } - // Update platform-specific dependencies - if (parsed.target) { - for (const targetName of Object.keys(parsed.target)) { - for (const depKind of common_1.DEP_KINDS) { - const deps = parsed.target[targetName][depKind]; - if (!deps) { - continue; // to next depKind - } - if (!deps[pkgName]) { - continue; // to next depKind - } - const dep = deps[pkgName]; - if (typeof dep === 'string' || typeof dep.path === 'undefined') { - logger.info(`skipping target.${targetName}.${depKind}.${pkgName} in`); - continue; // to next depKind - } - logger.info(`updating target.${targetName}.${depKind}.${pkgName} from ${dep.version} to ${pkgVersion}`); - payload = (0, toml_edit_1.replaceTomlValue)(payload, ['target', targetName, depKind, pkgName, 'version'], pkgVersion.toString()); - } - } - } - } - return payload; - } -} -exports.CargoToml = CargoToml; -//# sourceMappingURL=cargo-toml.js.map - -/***/ }), - -/***/ 11659: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.parseCargoLockfile = exports.parseCargoManifest = exports.DEP_KINDS = void 0; -const TOML = __nccwpck_require__(62901); -/** - * All possible dependency kinds for `CargoManifest`, - * typed properly. - */ -exports.DEP_KINDS = [ - 'dependencies', - 'dev-dependencies', - 'build-dependencies', -]; -function parseCargoManifest(content) { - return TOML.parse(content); -} -exports.parseCargoManifest = parseCargoManifest; -function parseCargoLockfile(content) { - return TOML.parse(content); -} -exports.parseCargoLockfile = parseCargoLockfile; -//# sourceMappingURL=common.js.map - -/***/ }), - -/***/ 15688: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.SfdxProjectJson = void 0; -const json_stringify_1 = __nccwpck_require__(69227); -const logger_1 = __nccwpck_require__(68809); -const default_1 = __nccwpck_require__(69995); -/** - * This updates a sfdx sfdx-project.json file's main version. - */ -class SfdxProjectJson extends default_1.DefaultUpdater { - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - const parsed = JSON.parse(content); - for (const packDir of parsed.packageDirectories) { - if (packDir.default) { - logger.info(`updating from ${packDir.versionNumber} to ${this.version}`); - packDir.versionNumber = `${this.version.toString()}.NEXT`; - } - } - return (0, json_stringify_1.jsonStringify)(parsed, content); - } -} -exports.SfdxProjectJson = SfdxProjectJson; -//# sourceMappingURL=sfdx-project-json.js.map - -/***/ }), - -/***/ 54957: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.MetadataVersion = void 0; -const logger_1 = __nccwpck_require__(68809); -const default_1 = __nccwpck_require__(69995); -/** - * Updates a Terraform metadata.yaml or metadata.display.yaml file(s). - */ -class MetadataVersion extends default_1.DefaultUpdater { - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - const oldVersion = content.match(/version: [0-9]+\.[0-9]+\.[0-9]+(-\w+)?/); - if (oldVersion) { - logger.info(`updating from ${oldVersion} to v${this.version}`); - } - return content.replace(/version: [0-9]+\.[0-9]+\.[0-9]+(-\w+)?/g, `version: ${this.version}`); - } -} -exports.MetadataVersion = MetadataVersion; -//# sourceMappingURL=metadata-version.js.map - -/***/ }), - -/***/ 19696: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ModuleVersion = void 0; -const logger_1 = __nccwpck_require__(68809); -const default_1 = __nccwpck_require__(69995); -/** - * Updates a Terraform Module versions.tf file. - */ -class ModuleVersion extends default_1.DefaultUpdater { - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content, logger = logger_1.logger) { - const oldVersion = content.match(/v[0-9]+\.[0-9]+\.[0-9]+(-\w+)?/); - if (oldVersion) { - logger.info(`updating from ${oldVersion} to v${this.version}`); - } - return content.replace(/v[0-9]+\.[0-9]+\.[0-9]+(-\w+)?/g, `v${this.version}`); - } -} -exports.ModuleVersion = ModuleVersion; -//# sourceMappingURL=module-version.js.map - -/***/ }), - -/***/ 4996: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ReadMe = void 0; -const default_1 = __nccwpck_require__(69995); -/** - * Updates a Terraform module's README. - */ -class ReadMe extends default_1.DefaultUpdater { - /** - * Given initial file contents, return updated contents. - * @param {string} content The initial content - * @returns {string} The updated content - */ - updateContent(content) { - return content.replace(/version = "~> [\d]+.[\d]+"/, `version = "~> ${this.version.major}.${this.version.minor}"`); - } -} -exports.ReadMe = ReadMe; -//# sourceMappingURL=readme.js.map - -/***/ }), - -/***/ 16344: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.BranchName = void 0; -const version_1 = __nccwpck_require__(17348); -const logger_1 = __nccwpck_require__(68809); -// cannot import from '..' - transpiled code references to RELEASE_PLEASE -// at the script level are undefined, they are only defined inside function -// or instance methods/properties. -// import {RELEASE_PLEASE} from '../constants'; -const RELEASE_PLEASE = 'release-please'; -function getAllResourceNames() { - return [ - AutoreleaseBranchName, - ComponentBranchName, - GroupBranchName, - DefaultBranchName, - V12ComponentBranchName, - V12DefaultBranchName, - ]; -} -class BranchName { - static parse(branchName, logger = logger_1.logger) { - try { - const branchNameClass = getAllResourceNames().find(clazz => { - return clazz.matches(branchName); - }); - if (!branchNameClass) { - return undefined; - } - return new branchNameClass(branchName); - } - catch (e) { - logger.warn(`Error parsing branch name: ${branchName}`, e); - return undefined; - } - } - static ofComponentVersion(branchPrefix, version) { - return new AutoreleaseBranchName(`release-${branchPrefix}-v${version}`); - } - static ofVersion(version) { - return new AutoreleaseBranchName(`release-v${version}`); - } - static ofTargetBranch(targetBranch) { - return new DefaultBranchName(`${RELEASE_PLEASE}--branches--${targetBranch}`); - } - static ofComponentTargetBranch(component, targetBranch) { - return new ComponentBranchName(`${RELEASE_PLEASE}--branches--${targetBranch}--components--${component}`); - } - static ofGroupTargetBranch(group, targetBranch) { - return new GroupBranchName(`${RELEASE_PLEASE}--branches--${targetBranch}--groups--${safeBranchName(group)}`); - } - constructor(_branchName) { } - static matches(_branchName) { - return false; - } - getTargetBranch() { - return this.targetBranch; - } - getComponent() { - return this.component; - } - getVersion() { - return this.version; - } - toString() { - return ''; - } -} -exports.BranchName = BranchName; -/** - * This is the legacy branch pattern used by releasetool - * - * @see https://github.com/googleapis/releasetool - */ -const AUTORELEASE_PATTERN = /^release-?(?[\w-.]*)?-v(?[0-9].*)$/; -const RELEASE_PLEASE_BRANCH_PREFIX = 'release-please--branches'; -class AutoreleaseBranchName extends BranchName { - static matches(branchName) { - if (branchName.startsWith(RELEASE_PLEASE_BRANCH_PREFIX)) { - return false; - } - return !!branchName.match(AUTORELEASE_PATTERN); - } - constructor(branchName) { - super(branchName); - const match = branchName.match(AUTORELEASE_PATTERN); - if (match === null || match === void 0 ? void 0 : match.groups) { - this.component = match.groups['component']; - this.version = version_1.Version.parse(match.groups['version']); - } - } - toString() { - var _a, _b; - if (this.component) { - return `release-${this.component}-v${(_a = this.version) === null || _a === void 0 ? void 0 : _a.toString()}`; - } - return `release-v${(_b = this.version) === null || _b === void 0 ? void 0 : _b.toString()}`; - } -} -/** - * This is a parsable branch pattern used by release-please v12. - * It has potential issues due to git treating `/` like directories. - * This should be removed at some point in the future. - * - * @see https://github.com/googleapis/release-please/issues/1024 - */ -const V12_DEFAULT_PATTERN = `^${RELEASE_PLEASE}/branches/(?[^/]+)$`; -class V12DefaultBranchName extends BranchName { - static matches(branchName) { - return !!branchName.match(V12_DEFAULT_PATTERN); - } - constructor(branchName) { - super(branchName); - const match = branchName.match(V12_DEFAULT_PATTERN); - if (match === null || match === void 0 ? void 0 : match.groups) { - this.targetBranch = match.groups['branch']; - } - } - toString() { - return `${RELEASE_PLEASE}/branches/${this.targetBranch}`; - } -} -/** - * This is a parsable branch pattern used by release-please v12. - * It has potential issues due to git treating `/` like directories. - * This should be removed at some point in the future. - * - * @see https://github.com/googleapis/release-please/issues/1024 - */ -const V12_COMPONENT_PATTERN = `^${RELEASE_PLEASE}/branches/(?[^/]+)/components/(?.+)$`; -class V12ComponentBranchName extends BranchName { - static matches(branchName) { - return !!branchName.match(V12_COMPONENT_PATTERN); - } - constructor(branchName) { - super(branchName); - const match = branchName.match(V12_COMPONENT_PATTERN); - if (match === null || match === void 0 ? void 0 : match.groups) { - this.targetBranch = match.groups['branch']; - this.component = match.groups['component']; - } - } - toString() { - return `${RELEASE_PLEASE}/branches/${this.targetBranch}/components/${this.component}`; - } -} -const DEFAULT_PATTERN = `^${RELEASE_PLEASE}--branches--(?.+)$`; -class DefaultBranchName extends BranchName { - static matches(branchName) { - return !!branchName.match(DEFAULT_PATTERN); - } - constructor(branchName) { - super(branchName); - const match = branchName.match(DEFAULT_PATTERN); - if (match === null || match === void 0 ? void 0 : match.groups) { - this.targetBranch = match.groups['branch']; - } - } - toString() { - return `${RELEASE_PLEASE}--branches--${this.targetBranch}`; - } -} -const COMPONENT_PATTERN = `^${RELEASE_PLEASE}--branches--(?.+)--components--(?.+)$`; -class ComponentBranchName extends BranchName { - static matches(branchName) { - return !!branchName.match(COMPONENT_PATTERN); - } - constructor(branchName) { - super(branchName); - const match = branchName.match(COMPONENT_PATTERN); - if (match === null || match === void 0 ? void 0 : match.groups) { - this.targetBranch = match.groups['branch']; - this.component = match.groups['component']; - } - } - toString() { - return `${RELEASE_PLEASE}--branches--${this.targetBranch}--components--${this.component}`; - } -} -const GROUP_PATTERN = `^${RELEASE_PLEASE}--branches--(?.+)--groups--(?.+)$`; -class GroupBranchName extends BranchName { - static matches(branchName) { - return !!branchName.match(GROUP_PATTERN); - } - constructor(branchName) { - super(branchName); - const match = branchName.match(GROUP_PATTERN); - if (match === null || match === void 0 ? void 0 : match.groups) { - this.targetBranch = match.groups['branch']; - this.component = match.groups['group']; - } - } - toString() { - return `${RELEASE_PLEASE}--branches--${this.targetBranch}--groups--${this.component}`; - } -} -function safeBranchName(branchName) { - // convert disallowed characters in branch names, replacing them with '-'. - // replace multiple consecutive '-' with a single '-' to avoid interfering with - // our regexes for parsing the branch names - return branchName.replace(/[^\w\d]/g, '-').replace(/-+/g, '-'); -} -//# sourceMappingURL=branch-name.js.map - -/***/ }), - -/***/ 14702: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.CommitExclude = void 0; -const manifest_1 = __nccwpck_require__(31999); -const commit_utils_1 = __nccwpck_require__(16828); -class CommitExclude { - constructor(config) { - this.excludePaths = {}; - Object.entries(config).forEach(([path, releaseConfig]) => { - if (releaseConfig.excludePaths) { - this.excludePaths[path] = (0, commit_utils_1.normalizePaths)(releaseConfig.excludePaths); - } - }); - } - excludeCommits(commitsPerPath) { - const filteredCommitsPerPath = {}; - Object.entries(commitsPerPath).forEach(([path, commits]) => { - if (this.excludePaths[path]) { - commits = commits.filter(commit => this.shouldInclude(commit, this.excludePaths[path], path)); - } - filteredCommitsPerPath[path] = commits; - }); - return filteredCommitsPerPath; - } - shouldInclude(commit, excludePaths, packagePath) { - return (!commit.files || - !commit.files - .filter(file => this.isRelevant(file, packagePath)) - .every(file => excludePaths.some(path => this.isRelevant(file, path)))); - } - isRelevant(file, path) { - return path === manifest_1.ROOT_PROJECT_PATH || file.indexOf(`${path}/`) === 0; - } -} -exports.CommitExclude = CommitExclude; -//# sourceMappingURL=commit-exclude.js.map - -/***/ }), - -/***/ 6941: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.CommitSplit = void 0; -const manifest_1 = __nccwpck_require__(31999); -const commit_utils_1 = __nccwpck_require__(16828); -/** - * Helper class for splitting commits by component path. If `packagePaths` - * is configured, then only consider the provided paths. If `includeEmpty` - * is configured, then commits without any touched files apply to all - * configured component paths. - */ -class CommitSplit { - constructor(opts) { - opts = opts || {}; - this.includeEmpty = !!opts.includeEmpty; - if (opts.packagePaths) { - const paths = (0, commit_utils_1.normalizePaths)(opts.packagePaths); - this.packagePaths = paths - .filter(path => { - // The special "." path, representing the root of the module, should be - // ignored by commit-split as it is assigned all commits in manifest.ts - return path !== manifest_1.ROOT_PROJECT_PATH; - }) - .sort((a, b) => b.length - a.length); // sort by longest paths first - } - } - /** - * Split commits by component path. If the commit splitter is configured - * with a set of tracked package paths, then only consider paths for - * configured components. If `includeEmpty` is configured, then a commit - * that does not touch any files will be applied to all components' - * commits. - * @param {Commit[]} commits The commits to split - * @returns {Record} Commits indexed by component path - */ - split(commits) { - const splitCommits = {}; - commits.forEach(commit => { - if (commit.files === undefined) { - throw new Error(`Commit ${commit.sha} is missing files. Did you set "backfillFiles" to "true"?`); - } - const dedupe = new Set(); - for (let i = 0; i < commit.files.length; i++) { - const file = commit.files[i]; - // NOTE: GitHub API always returns paths using the `/` separator, - // regardless of what platform the client code is running on - const splitPath = file.split('/'); - // indicates that we have a top-level file and not a folder - // in this edge-case we should not attempt to update the path. - if (splitPath.length === 1) - continue; - let pkgName; - if (this.packagePaths) { - // only track paths under this.packagePaths - pkgName = this.packagePaths.find(p => file.indexOf(`${p}/`) === 0); - } - else { - // track paths by top level folder - pkgName = splitPath[0]; - } - if (!pkgName || dedupe.has(pkgName)) - continue; - else - dedupe.add(pkgName); - if (!splitCommits[pkgName]) - splitCommits[pkgName] = []; - splitCommits[pkgName].push(commit); - } - if (commit.files.length === 0 && this.includeEmpty) { - if (this.packagePaths) { - for (const pkgName of this.packagePaths) { - splitCommits[pkgName] = splitCommits[pkgName] || []; - splitCommits[pkgName].push(commit); - } - } - else { - for (const pkgName in splitCommits) { - splitCommits[pkgName].push(commit); - } - } - } - }); - return splitCommits; - } -} -exports.CommitSplit = CommitSplit; -//# sourceMappingURL=commit-split.js.map - -/***/ }), - -/***/ 16828: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.normalizePaths = void 0; -const normalizePaths = (paths) => { - return paths.map(path => { - // normalize so that all paths have leading and trailing slashes for - // non-overlap validation. - // NOTE: GitHub API always returns paths using the `/` separator, - // regardless of what platform the client code is running on - let newPath = path.replace(/\/$/, ''); - newPath = newPath.replace(/^\//, ''); - newPath = newPath.replace(/$/, '/'); - newPath = newPath.replace(/^/, '/'); - // store them with leading and trailing slashes removed. - newPath = newPath.replace(/\/$/, ''); - newPath = newPath.replace(/^\//, ''); - return newPath; - }); -}; -exports.normalizePaths = normalizePaths; -//# sourceMappingURL=commit-utils.js.map - -/***/ }), - -/***/ 26498: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.filterCommits = void 0; -const BREAKING_CHANGE_NOTE = 'BREAKING CHANGE'; -const DEFAULT_CHANGELOG_SECTIONS = [ - { type: 'feat', section: 'Features' }, - { type: 'fix', section: 'Bug Fixes' }, - { type: 'perf', section: 'Performance Improvements' }, - { type: 'revert', section: 'Reverts' }, - { type: 'chore', section: 'Miscellaneous Chores', hidden: true }, - { type: 'docs', section: 'Documentation', hidden: true }, - { type: 'style', section: 'Styles', hidden: true }, - { type: 'refactor', section: 'Code Refactoring', hidden: true }, - { type: 'test', section: 'Tests', hidden: true }, - { type: 'build', section: 'Build System', hidden: true }, - { type: 'ci', section: 'Continuous Integration', hidden: true }, -]; -/** - * Given a set of conventional commits and the configured - * changelog sections provided by the user, return the set - * of commits that should be displayed: - * - * @param commits - * @param changelogSections - * @returns ConventionalCommit[] - */ -function filterCommits(commits, changelogSections) { - changelogSections = changelogSections !== null && changelogSections !== void 0 ? changelogSections : DEFAULT_CHANGELOG_SECTIONS; - const hiddenSections = []; - const visibleSections = []; - for (const section of changelogSections) { - if (!section.hidden) - visibleSections.push(section.type); - else - hiddenSections.push(section.type); - } - return commits.filter(commit => { - const isBreaking = commit.notes.find(note => { - return note.title === BREAKING_CHANGE_NOTE; - }); - return (visibleSections.includes(commit.type) || - (isBreaking && hiddenSections.includes(commit.type))); - }); -} -exports.filterCommits = filterCommits; -//# sourceMappingURL=filter-commits.js.map - -/***/ }), - -/***/ 13170: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.indentCommit = void 0; -function indentCommit(commit) { - const reduced = []; - let inList = false; - commit.message.split(/\r?\n/).forEach((line, i) => { - if (i !== 0) - line = ` ${line}`; - else - reduced.push(line); - if (/^\s*\*/.test(line)) { - inList = true; - reduced.push(line); - } - else if (/^ +[\w]/.test(line) && inList) { - reduced[reduced.length - 1] = `${reduced[reduced.length - 1]}\n${line}`; - } - else { - inList = false; - } - }); - return reduced.join('\n'); -} -exports.indentCommit = indentCommit; -//# sourceMappingURL=indent-commit.js.map - -/***/ }), - -/***/ 69227: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.jsonStringify = void 0; -const detectIndent = __nccwpck_require__(83084); -function jsonStringify(parsed, content, replacer) { - return `${content.slice(0, content.indexOf('{'))}${JSON.stringify(parsed, replacer, detectIndent(content.trim()).indent)}${content.slice(content.lastIndexOf('}') + 1)}`; -} -exports.jsonStringify = jsonStringify; -//# sourceMappingURL=json-stringify.js.map - -/***/ }), - -/***/ 68809: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.setLogger = exports.logger = exports.CheckpointLogger = void 0; -const chalk = __nccwpck_require__(78818); -const figures = __nccwpck_require__(57099); -const errorPrefix = chalk.red(figures.cross); -const warnPrefix = chalk.yellow(figures.warning); -const infoPrefix = chalk.green(figures.tick); -const debugPrefix = chalk.gray(figures.pointer); -const tracePrefix = chalk.dim.gray(figures.pointerSmall); -class CheckpointLogger { - constructor(includeDebug = false, includeTrace = false) { - this.error = (...args) => { - console.error(`${errorPrefix}`, ...args); - }; - this.warn = (...args) => { - console.warn(`${warnPrefix}`, ...args); - }; - this.info = (...args) => { - console.info(`${infoPrefix}`, ...args); - }; - this.debug = (...args) => { - if (this.includeDebug) - console.debug(`${debugPrefix}`, ...args); - }; - this.trace = (...args) => { - if (this.includeTrace) - console.debug(`${tracePrefix}`, ...args); - }; - this.includeDebug = includeDebug; - this.includeTrace = includeTrace; - } -} -exports.CheckpointLogger = CheckpointLogger; -/* eslint-enable @typescript-eslint/no-explicit-any */ -exports.logger = new CheckpointLogger(true); -function setLogger(userLogger) { - exports.logger = userLogger; -} -exports.setLogger = setLogger; -//# sourceMappingURL=logger.js.map - -/***/ }), - -/***/ 70774: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PullRequestBody = void 0; -const logger_1 = __nccwpck_require__(68809); -const node_html_parser_1 = __nccwpck_require__(14363); -const version_1 = __nccwpck_require__(17348); -const DEFAULT_HEADER = ':robot: I have created a release *beep* *boop*'; -const DEFAULT_FOOTER = 'This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please).'; -const NOTES_DELIMITER = '---'; -class PullRequestBody { - constructor(releaseData, options) { - var _a; - this.header = (options === null || options === void 0 ? void 0 : options.header) || DEFAULT_HEADER; - this.footer = (options === null || options === void 0 ? void 0 : options.footer) || DEFAULT_FOOTER; - this.extra = options === null || options === void 0 ? void 0 : options.extra; - this.releaseData = releaseData; - this.useComponents = (_a = options === null || options === void 0 ? void 0 : options.useComponents) !== null && _a !== void 0 ? _a : this.releaseData.length > 1; - } - static parse(body, logger = logger_1.logger) { - const parts = splitBody(body); - if (!parts) { - logger.error('Pull request body did not match'); - return undefined; - } - let data = extractMultipleReleases(parts.content, logger); - let useComponents = true; - if (data.length === 0) { - data = extractSingleRelease(parts.content, logger); - useComponents = false; - if (data.length === 0) { - logger.warn('Failed to parse releases.'); - } - } - return new PullRequestBody(data, { - header: parts.header, - footer: parts.footer, - useComponents, - }); - } - notes() { - if (this.useComponents) { - return this.releaseData - .map(release => { - var _a; - return `
${release.component ? `${release.component}: ` : ''}${(_a = release.version) === null || _a === void 0 ? void 0 : _a.toString()}\n\n${release.notes}\n
`; - }) - .join('\n\n'); - } - return this.releaseData.map(release => release.notes).join('\n\n'); - } - toString() { - const notes = this.notes(); - return `${this.header} -${NOTES_DELIMITER} - - -${notes} - -${NOTES_DELIMITER}${this.extra ? `\n\n${this.extra}\n` : ''} -${this.footer}`; - } -} -exports.PullRequestBody = PullRequestBody; -function splitBody(body) { - const lines = body.trim().replace(/\r\n/g, '\n').split('\n'); - const index = lines.indexOf(NOTES_DELIMITER); - if (index === -1) { - return undefined; - } - let lastIndex = lines.lastIndexOf(NOTES_DELIMITER); - if (lastIndex === index) { - lastIndex = lines.length - 1; - } - const header = lines.slice(0, index).join('\n').trim(); - const content = lines.slice(index + 1, lastIndex).join('\n'); - const footer = lines.slice(lastIndex + 1).join('\n'); - return { - header, - footer, - content, - }; -} -const SUMMARY_PATTERN = /^(?.*[^:]):? (?\d+\.\d+\.\d+.*)$/; -const COMPONENTLESS_SUMMARY_PATTERN = /^(?\d+\.\d+\.\d+.*)$/; -function extractMultipleReleases(notes, logger) { - const data = []; - const root = (0, node_html_parser_1.parse)(notes); - for (const detail of root.getElementsByTagName('details')) { - const summaryNode = detail.getElementsByTagName('summary')[0]; - const summary = summaryNode === null || summaryNode === void 0 ? void 0 : summaryNode.textContent; - const match = summary.match(SUMMARY_PATTERN); - if (match === null || match === void 0 ? void 0 : match.groups) { - detail.removeChild(summaryNode); - const notes = detail.textContent.trim(); - data.push({ - component: match.groups.component, - version: version_1.Version.parse(match.groups.version), - notes, - }); - } - else { - const componentlessMatch = summary.match(COMPONENTLESS_SUMMARY_PATTERN); - if (!(componentlessMatch === null || componentlessMatch === void 0 ? void 0 : componentlessMatch.groups)) { - logger.warn(`Summary: ${summary} did not match the expected pattern`); - continue; - } - detail.removeChild(summaryNode); - const notes = detail.textContent.trim(); - data.push({ - version: version_1.Version.parse(componentlessMatch.groups.version), - notes, - }); - } - } - return data; -} -const COMPARE_REGEX = /^#{2,} \[?(?\d+\.\d+\.\d+[^\]]*)\]?/; -function extractSingleRelease(body, logger) { - var _a; - body = body.trim(); - const match = body.match(COMPARE_REGEX); - const versionString = (_a = match === null || match === void 0 ? void 0 : match.groups) === null || _a === void 0 ? void 0 : _a.version; - if (!versionString) { - logger.warn('Failed to find version in release notes'); - return []; - } - return [ - { - version: version_1.Version.parse(versionString), - notes: body, - }, - ]; -} -//# sourceMappingURL=pull-request-body.js.map - -/***/ }), - -/***/ 93937: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.FilePullRequestOverflowHandler = void 0; -const pull_request_body_1 = __nccwpck_require__(70774); -const logger_1 = __nccwpck_require__(68809); -const url_1 = __nccwpck_require__(57310); -const MAX_ISSUE_BODY_SIZE = 65536; -const OVERFLOW_MESSAGE = 'This release is too large to preview in the pull request body. View the full release notes here:'; -const OVERFLOW_MESSAGE_REGEX = new RegExp(`${OVERFLOW_MESSAGE} (?.*)`); -const RELEASE_NOTES_FILENAME = 'release-notes.md'; -const FILE_PATH_REGEX = new RegExp(`blob/(?.*)/${RELEASE_NOTES_FILENAME}`); -/** - * This implementation of PullRequestOverflowHandler stores the full release - * notes on a new git branch. The branch name is derived from the head branch - * name of the release pull request. - */ -class FilePullRequestOverflowHandler { - constructor(github, logger = logger_1.logger) { - this.github = github; - this.logger = logger; - } - /** - * Optionally store the full release notes into `release-notes.md` file - * on a new branch if they do not fit into the body of a pull request. - * - * The new release notes will have a link to the GitHub UI for that file - * which should render the release notes nicely. - * @param {ReleasePullRequest} pullRequest The candidate release pull request - * @returns {string} The new pull request body which contains a link to - * the full content. - */ - async handleOverflow(pullRequest, maxSize = MAX_ISSUE_BODY_SIZE) { - const notes = pullRequest.body.toString(); - if (notes.length > maxSize) { - const notesBranchName = `${pullRequest.headRefName}--release-notes`; - const url = await this.github.createFileOnNewBranch(RELEASE_NOTES_FILENAME, notes, notesBranchName, this.github.repository.defaultBranch); - return `${OVERFLOW_MESSAGE} ${url}`; - } - return notes; - } - /** - * Given a pull request, retrieve the full release notes from the stored - * file if the body was too big to store in the pull request body. - * @param {PullRequest} pullRequest The pull request from GitHub - * @return {PullRequestBody} The parsed pull request body - */ - async parseOverflow(pullRequest) { - var _a, _b; - const match = pullRequest.body.match(OVERFLOW_MESSAGE_REGEX); - if ((_a = match === null || match === void 0 ? void 0 : match.groups) === null || _a === void 0 ? void 0 : _a.url) { - this.logger.info(`Pull request body overflows, parsing full body from: ${match.groups.url}`); - const url = new url_1.URL(match.groups.url); - const pathMatch = url.pathname.match(FILE_PATH_REGEX); - if ((_b = pathMatch === null || pathMatch === void 0 ? void 0 : pathMatch.groups) === null || _b === void 0 ? void 0 : _b.branchName) { - const fileContents = await this.github.getFileContentsOnBranch(RELEASE_NOTES_FILENAME, pathMatch.groups.branchName); - return pull_request_body_1.PullRequestBody.parse(fileContents.parsedContent); - } - this.logger.warn(`Could not parse branch from ${match.groups.url}`); - return pull_request_body_1.PullRequestBody.parse(pullRequest.body, this.logger); - } - return pull_request_body_1.PullRequestBody.parse(pullRequest.body, this.logger); - } -} -exports.FilePullRequestOverflowHandler = FilePullRequestOverflowHandler; -//# sourceMappingURL=pull-request-overflow-handler.js.map - -/***/ }), - -/***/ 1158: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PullRequestTitle = exports.generateMatchPattern = void 0; -const logger_1 = __nccwpck_require__(68809); -const version_1 = __nccwpck_require__(17348); -// cannot import from '..' - transpiled code references to RELEASE_PLEASE -// at the script level are undefined, they are only defined inside function -// or instance methods/properties. -const DEFAULT_PR_TITLE_PATTERN = 'chore${scope}: release${component} ${version}'; -const COMPONENT_NO_SPACE = false; -function generateMatchPattern(pullRequestTitlePattern, componentNoSpace, logger = logger_1.logger) { - if (pullRequestTitlePattern && - pullRequestTitlePattern.search(/\$\{scope\}/) === -1) - logger.warn("pullRequestTitlePattern miss the part of '${scope}'"); - if (pullRequestTitlePattern && - pullRequestTitlePattern.search(/\$\{component\}/) === -1) - logger.warn("pullRequestTitlePattern miss the part of '${component}'"); - if (pullRequestTitlePattern && - pullRequestTitlePattern.search(/\$\{version\}/) === -1) - logger.warn("pullRequestTitlePattern miss the part of '${version}'"); - return new RegExp(`^${(pullRequestTitlePattern || DEFAULT_PR_TITLE_PATTERN) - .replace('[', '\\[') // TODO: handle all regex escaping - .replace(']', '\\]') - .replace('(', '\\(') - .replace(')', '\\)') - .replace('${scope}', '(\\((?[\\w-./]+)\\))?') - .replace('${component}', componentNoSpace === true - ? '?(?@?[\\w-./]*)?' - : ' ?(?@?[\\w-./]*)?') - .replace('${version}', 'v?(?[0-9].*)') - .replace('${branch}', '(?[\\w-./]+)?')}$`); -} -exports.generateMatchPattern = generateMatchPattern; -class PullRequestTitle { - constructor(opts) { - this.version = opts.version; - this.component = opts.component; - this.targetBranch = opts.targetBranch; - this.pullRequestTitlePattern = - opts.pullRequestTitlePattern || DEFAULT_PR_TITLE_PATTERN; - this.componentNoSpace = opts.componentNoSpace || COMPONENT_NO_SPACE; - this.matchPattern = generateMatchPattern(this.pullRequestTitlePattern, this.componentNoSpace, opts.logger); - } - static parse(title, pullRequestTitlePattern, componentNoSpace, logger = logger_1.logger) { - const matchPattern = generateMatchPattern(pullRequestTitlePattern, componentNoSpace, logger); - const match = title.match(matchPattern); - if (match === null || match === void 0 ? void 0 : match.groups) { - return new PullRequestTitle({ - version: match.groups['version'] - ? version_1.Version.parse(match.groups['version']) - : undefined, - component: match.groups['component'], - targetBranch: match.groups['branch'], - pullRequestTitlePattern, - componentNoSpace, - logger, - }); - } - return undefined; - } - static ofComponentVersion(component, version, pullRequestTitlePattern, componentNoSpace) { - return new PullRequestTitle({ - version, - component, - pullRequestTitlePattern, - componentNoSpace, - }); - } - static ofVersion(version, pullRequestTitlePattern, componentNoSpace) { - return new PullRequestTitle({ - version, - pullRequestTitlePattern, - componentNoSpace, - }); - } - static ofTargetBranchVersion(targetBranch, version, pullRequestTitlePattern, componentNoSpace) { - return new PullRequestTitle({ - version, - targetBranch, - pullRequestTitlePattern, - componentNoSpace, - }); - } - static ofComponentTargetBranchVersion(component, targetBranch, version, pullRequestTitlePattern, componentNoSpace) { - return new PullRequestTitle({ - version, - component, - targetBranch, - pullRequestTitlePattern, - componentNoSpace, - }); - } - static ofTargetBranch(targetBranch, pullRequestTitlePattern, componentNoSpace) { - return new PullRequestTitle({ - targetBranch, - pullRequestTitlePattern, - componentNoSpace, - }); - } - getTargetBranch() { - return this.targetBranch; - } - getComponent() { - return this.component; - } - getVersion() { - return this.version; - } - toString() { - var _a; - const scope = this.targetBranch ? `(${this.targetBranch})` : ''; - const component = this.componentNoSpace === true - ? this.component - ? `${this.component}` - : '' - : this.component - ? ` ${this.component}` - : ''; - const version = (_a = this.version) !== null && _a !== void 0 ? _a : ''; - if (this.componentNoSpace === true && !component) { - console.log('`component` is empty. Removing component from title pattern..'); - this.pullRequestTitlePattern = this.pullRequestTitlePattern.replace('${component} ', ''); - } - return this.pullRequestTitlePattern - .replace('${scope}', scope) - .replace('${component}', component) - .replace('${version}', version.toString()) - .replace('${branch}', this.targetBranch || '') - .trim(); - } -} -exports.PullRequestTitle = PullRequestTitle; -//# sourceMappingURL=pull-request-title.js.map - -/***/ }), - -/***/ 2686: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.signoffCommitMessage = void 0; -// cannot import from '..' - transpiled code references to RELEASE_PLEASE -// at the script level are undefined, they are only defined inside function -// or instance methods/properties. -function isValidSignoffUser(signoffUser) { - // Parse the name and email address from a string in the following format - // Display Name - const pattern = /^([^<]+)\s*<([^>]+)>$/i; - // Check we have a match - const isMatch = new RegExp(pattern).test(signoffUser); - return isMatch; -} -function signoffCommitMessage(commitMessage, signoffUser) { - if (!isValidSignoffUser(signoffUser)) { - throw new Error(`The format of '${signoffUser}' is not a valid email address with display name`); - } - return commitMessage + `\n\nSigned-off-by: ${signoffUser}`; -} -exports.signoffCommitMessage = signoffCommitMessage; -//# sourceMappingURL=signoff-commit-message.js.map - -/***/ }), - -/***/ 36503: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.TagName = void 0; -const version_1 = __nccwpck_require__(17348); -const TAG_PATTERN = /^((?.*)(?[^a-zA-Z0-9]))?(?v)?(?\d+\.\d+\.\d+.*)$/; -const DEFAULT_SEPARATOR = '-'; -class TagName { - constructor(version, component, separator = DEFAULT_SEPARATOR, includeV = true) { - this.version = version; - this.component = component; - this.separator = separator; - this.includeV = includeV; - } - static parse(tagName) { - const match = tagName.match(TAG_PATTERN); - if (match === null || match === void 0 ? void 0 : match.groups) { - return new TagName(version_1.Version.parse(match.groups.version), match.groups.component, match.groups.separator, !!match.groups.v); - } - return; - } - toString() { - if (this.component) { - return `${this.component}${this.separator}${this.includeV ? 'v' : ''}${this.version.toString()}`; - } - return `${this.includeV ? 'v' : ''}${this.version.toString()}`; - } -} -exports.TagName = TagName; -//# sourceMappingURL=tag-name.js.map - -/***/ }), - -/***/ 30567: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.replaceTomlValue = exports.parseWith = void 0; -const TOMLParser = __nccwpck_require__(68784); -const taggedValueMarker = Symbol('__TAGGED_VALUE'); -/** - * A custom variant of `TOMLParser` that replaces all values with a tagged - * variant that includes their start and end positions, allowing them to be - * replaced. - */ -class TaggedTOMLParser extends TOMLParser { - parseValue() { - // Remember the start position of the value. - // - // Off-by-one correctness: by this point, `this.pos` points one character - // *after* the first character of the value, which is in `this.char` - this.state.__TAGGED_START = this.pos - 1; - return super.parseValue(); - } - next(fn) { - const prevState = this.state; - super.next(fn); // `next` returns void - // Carry over the start position. If it wasn't set, (say, if we were parsing - // something other than a value), we're just assigning `undefined` here. - this.state.__TAGGED_START = prevState.__TAGGED_START; - } - return(value) { - const prevState = this.state; - super.return(value); // `return` returns void - if (prevState.__TAGGED_START && typeof this.state.returned !== 'object') { - // If the parser we just returned from remembered a start position, - // tag the returned value with "start" and "end". - // Note that we don't tag objects to avoid encountering multiple tagged - // values when replacing later on. - const taggedValue = { - [taggedValueMarker]: true, - start: prevState.__TAGGED_START, - end: this.pos, - value: this.state.returned, - }; - this.state.returned = taggedValue; - } - } -} -/** - * Parses input as TOML with the given parser - * @param input A string - * @param parserType The TOML parser to use (might be custom) - */ -function parseWith(input, parserType = TaggedTOMLParser) { - const parser = new parserType(); - parser.parse(input); - return parser.finish(); -} -exports.parseWith = parseWith; -function isTaggedValue(x) { - if (!x) { - return false; - } - if (typeof x !== 'object') { - return false; - } - const ts = x; - return ts[taggedValueMarker] === true; -} -/** - * Given TOML input and a path to a value, attempt to replace - * that value without modifying the formatting. - * @param input A string that's valid TOML - * @param path Path to a value to replace. When replacing 'deps.tokio.version', pass ['deps', 'tokio', 'version']. The value must already exist. - * @param newValue The value to replace the value at `path` with. Is passed through `JSON.stringify()` when replacing: strings will end up being double-quoted strings, properly escaped. Numbers will be numbers. - */ -function replaceTomlValue(input, path, newValue) { - // our pointer into the object "tree", initially points to the root. - let current = parseWith(input, TaggedTOMLParser); - // navigate down the object tree, following the path, expecting only objects. - // Note that tagged strings (generated by `TaggedTOMLParser`) are also objects. - for (let i = 0; i < path.length; i++) { - const key = path[i]; - // // We may encounter tagged values when descending through the object tree - // if (isTaggedValue(current)) { - // if (!current.value || typeof current.value !== 'object') { - // const msg = `partial path does not lead to table: ${path - // .slice(0, i) - // .join('.')}`; - // throw new Error(msg); - // } - // current = current.value as Record; - // } - const next = current[key]; - if (typeof next !== 'object') { - const msg = `path not found in object: ${path.slice(0, i + 1).join('.')}`; - throw new Error(msg); - } - current = next; - } - if (!isTaggedValue(current)) { - const msg = `value at path ${path.join('.')} is not tagged`; - throw new Error(msg); - } - const before = input.slice(0, current.start); - const after = input.slice(current.end); - const output = before + JSON.stringify(newValue) + after; - try { - parseWith(output, TOMLParser); - } - catch (e) { - throw new Error(`After replacing value, result is not valid TOML: ${e}`); - } - return output; -} -exports.replaceTomlValue = replaceTomlValue; -//# sourceMappingURL=toml-edit.js.map - -/***/ }), - -/***/ 17348: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Version = void 0; -const semver = __nccwpck_require__(11383); -const VERSION_REGEX = /(?\d+)\.(?\d+)\.(?\d+)(-(?[^+]+))?(\+(?.*))?/; -/** - * This data class is used to represent a SemVer version. - */ -class Version { - constructor(major, minor, patch, preRelease, build) { - this.major = major; - this.minor = minor; - this.patch = patch; - this.preRelease = preRelease; - this.build = build; - } - /** - * Parse a version string into a data class. - * - * @param {string} versionString the input version string - * @returns {Version} the parsed version - * @throws {Error} if the version string cannot be parsed - */ - static parse(versionString) { - const match = versionString.match(VERSION_REGEX); - if (!(match === null || match === void 0 ? void 0 : match.groups)) { - throw Error(`unable to parse version string: ${versionString}`); - } - const major = Number(match.groups.major); - const minor = Number(match.groups.minor); - const patch = Number(match.groups.patch); - const preRelease = match.groups.preRelease; - const build = match.groups.build; - return new Version(major, minor, patch, preRelease, build); - } - /** - * Comparator to other Versions to be used in sorting. - * - * @param {Version} other The other version to compare to - * @returns {number} -1 if this version is earlier, 0 if the versions - * are the same, or 1 otherwise. - */ - compare(other) { - return semver.compare(this.toString(), other.toString()); - } - /** - * Returns a normalized string version of this version. - * - * @returns {string} - */ - toString() { - const preReleasePart = this.preRelease ? `-${this.preRelease}` : ''; - const buildPart = this.build ? `+${this.build}` : ''; - return `${this.major}.${this.minor}.${this.patch}${preReleasePart}${buildPart}`; - } - get isPreMajor() { - return this.major < 1; - } -} -exports.Version = Version; -//# sourceMappingURL=version.js.map - -/***/ }), - -/***/ 51346: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.AlwaysBumpMajor = void 0; -const default_1 = __nccwpck_require__(94073); -const versioning_strategy_1 = __nccwpck_require__(41941); -/** - * This VersioningStrategy always bumps the major version. - */ -class AlwaysBumpMajor extends default_1.DefaultVersioningStrategy { - determineReleaseType(_version, _commits) { - return new versioning_strategy_1.MajorVersionUpdate(); - } -} -exports.AlwaysBumpMajor = AlwaysBumpMajor; -//# sourceMappingURL=always-bump-major.js.map - -/***/ }), - -/***/ 9657: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.AlwaysBumpMinor = void 0; -const default_1 = __nccwpck_require__(94073); -const versioning_strategy_1 = __nccwpck_require__(41941); -/** - * This VersioningStrategy always bumps the minor version. - */ -class AlwaysBumpMinor extends default_1.DefaultVersioningStrategy { - determineReleaseType(_version, _commits) { - return new versioning_strategy_1.MinorVersionUpdate(); - } -} -exports.AlwaysBumpMinor = AlwaysBumpMinor; -//# sourceMappingURL=always-bump-minor.js.map - -/***/ }), - -/***/ 82926: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.AlwaysBumpPatch = void 0; -const default_1 = __nccwpck_require__(94073); -const versioning_strategy_1 = __nccwpck_require__(41941); -/** - * This VersioningStrategy always bumps the patch version. This - * strategy is useful for backport branches. - */ -class AlwaysBumpPatch extends default_1.DefaultVersioningStrategy { - determineReleaseType(_version, _commits) { - return new versioning_strategy_1.PatchVersionUpdate(); - } -} -exports.AlwaysBumpPatch = AlwaysBumpPatch; -//# sourceMappingURL=always-bump-patch.js.map - -/***/ }), - -/***/ 94073: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DefaultVersioningStrategy = void 0; -const versioning_strategy_1 = __nccwpck_require__(41941); -const version_1 = __nccwpck_require__(17348); -const logger_1 = __nccwpck_require__(68809); -/** - * This is the default VersioningStrategy for release-please. Breaking - * changes should bump the major, features should bump the minor, and other - * significant changes should bump the patch version. - */ -class DefaultVersioningStrategy { - /** - * Create a new DefaultVersioningStrategy - * @param {DefaultVersioningStrategyOptions} options Configuration options - * @param {boolean} options.bumpMinorPreMajor If the current version is less than 1.0.0, - * then bump the minor version for breaking changes - * @param {boolean} options.bumpPatchForMinorPreMajor If the current version is less than - * 1.0.0, then bump the patch version for features - */ - constructor(options = {}) { - var _a; - this.bumpMinorPreMajor = options.bumpMinorPreMajor === true; - this.bumpPatchForMinorPreMajor = options.bumpPatchForMinorPreMajor === true; - this.logger = (_a = options.logger) !== null && _a !== void 0 ? _a : logger_1.logger; - } - /** - * Given the current version of an artifact and a list of commits, - * return a VersionUpdater that knows how to bump the version. - * - * This is useful for chaining together versioning strategies. - * - * @param {Version} version The current version - * @param {ConventionalCommit[]} commits The list of commits to consider - * @returns {VersionUpdater} Updater for bumping the next version. - */ - determineReleaseType(version, commits) { - // iterate through list of commits and find biggest commit type - let breaking = 0; - let features = 0; - for (const commit of commits) { - const releaseAs = commit.notes.find(note => note.title === 'RELEASE AS'); - if (releaseAs) { - // commits are handled newest to oldest, so take the first one (newest) found - this.logger.debug(`found Release-As: ${releaseAs.text}, forcing version`); - return new versioning_strategy_1.CustomVersionUpdate(version_1.Version.parse(releaseAs.text).toString()); - } - if (commit.breaking) { - breaking++; - } - else if (commit.type === 'feat' || commit.type === 'feature') { - features++; - } - } - if (breaking > 0) { - if (version.isPreMajor && this.bumpMinorPreMajor) { - return new versioning_strategy_1.MinorVersionUpdate(); - } - else { - return new versioning_strategy_1.MajorVersionUpdate(); - } - } - else if (features > 0) { - if (version.isPreMajor && this.bumpPatchForMinorPreMajor) { - return new versioning_strategy_1.PatchVersionUpdate(); - } - else { - return new versioning_strategy_1.MinorVersionUpdate(); - } - } - return new versioning_strategy_1.PatchVersionUpdate(); - } - /** - * Given the current version of an artifact and a list of commits, - * return the next version. - * - * @param {Version} version The current version - * @param {ConventionalCommit[]} commits The list of commits to consider - * @returns {Version} The next version - */ - bump(version, commits) { - return this.determineReleaseType(version, commits).bump(version); - } -} -exports.DefaultVersioningStrategy = DefaultVersioningStrategy; -//# sourceMappingURL=default.js.map - -/***/ }), - -/***/ 25029: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DependencyManifest = void 0; -const version_1 = __nccwpck_require__(17348); -const semver = __nccwpck_require__(11383); -const default_1 = __nccwpck_require__(94073); -const versioning_strategy_1 = __nccwpck_require__(41941); -const DEPENDENCY_UPDATE_REGEX = /^deps: update dependency (.*) to (v[^\s]*)(\s\(#\d+\))?$/m; -/** - * This VersioningStrategy looks at `deps` type commits and tries to - * mirror the semantic version bump for that dependency update. For - * example, an update to v2, would be treated as a major version bump. - * - * It also respects the default commit types and will pick the - * greatest version bump. - */ -class DependencyManifest extends default_1.DefaultVersioningStrategy { - determineReleaseType(version, commits) { - const regularBump = super.determineReleaseType(version, commits); - const dependencyUpdates = buildDependencyUpdates(commits); - let breaking = 0; - let features = 0; - for (const dep in dependencyUpdates) { - const version = dependencyUpdates[dep]; - if (version.patch === 0) { - if (version.minor === 0) { - breaking++; - } - else { - features++; - } - } - } - let dependencyBump; - if (breaking > 0) { - if (version.isPreMajor && this.bumpMinorPreMajor) { - dependencyBump = new versioning_strategy_1.MinorVersionUpdate(); - } - else { - dependencyBump = new versioning_strategy_1.MajorVersionUpdate(); - } - } - else if (features > 0) { - if (version.isPreMajor && this.bumpPatchForMinorPreMajor) { - dependencyBump = new versioning_strategy_1.PatchVersionUpdate(); - } - else { - dependencyBump = new versioning_strategy_1.MinorVersionUpdate(); - } - } - else { - dependencyBump = new versioning_strategy_1.PatchVersionUpdate(); - } - if (semver.lte(dependencyBump.bump(version).toString(), regularBump.bump(version).toString())) { - return regularBump; - } - else { - return dependencyBump; - } - } -} -exports.DependencyManifest = DependencyManifest; -function buildDependencyUpdates(commits) { - const versionsMap = {}; - for (const commit of commits) { - const match = commit.message.match(DEPENDENCY_UPDATE_REGEX); - if (!match) - continue; - const versionString = match[2]; - let version; - try { - version = version_1.Version.parse(versionString); - } - catch (_a) { - version = version_1.Version.parse(`${versionString}.0.0`); - } - // commits are sorted by latest first, so if there is a collision, - // then we've already recorded the latest version - if (versionsMap[match[1]]) - continue; - versionsMap[match[1]] = version; - } - return versionsMap; -} -//# sourceMappingURL=dependency-manifest.js.map - -/***/ }), - -/***/ 87719: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.JavaAddSnapshot = void 0; -const version_1 = __nccwpck_require__(17348); -const fakeCommit = { - message: 'fix: fake fix', - type: 'fix', - scope: null, - notes: [], - references: [], - bareMessage: 'fake fix', - breaking: false, - sha: 'abc123', - files: [], -}; -class AddSnapshotVersionUpdate { - constructor(strategy) { - this.strategy = strategy; - } - bump(version) { - const nextPatch = this.strategy.bump(version, [fakeCommit]); - return new version_1.Version(nextPatch.major, nextPatch.minor, nextPatch.patch, nextPatch.preRelease ? `${nextPatch.preRelease}-SNAPSHOT` : 'SNAPSHOT', nextPatch.build); - } -} -/** - * This VersioningStrategy is used by Java releases to bump - * to the next snapshot version. - */ -class JavaAddSnapshot { - constructor(strategy) { - this.strategy = strategy; - } - determineReleaseType(_version, _commits) { - return new AddSnapshotVersionUpdate(this.strategy); - } - bump(version, commits) { - return this.determineReleaseType(version, commits).bump(version); - } -} -exports.JavaAddSnapshot = JavaAddSnapshot; -//# sourceMappingURL=java-add-snapshot.js.map - -/***/ }), - -/***/ 66860: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.JavaSnapshot = void 0; -const version_1 = __nccwpck_require__(17348); -const fakeCommit = { - message: 'fix: fake fix', - type: 'fix', - scope: null, - notes: [], - references: [], - bareMessage: 'fake fix', - breaking: false, - sha: 'abc123', - files: [], -}; -class RemoveSnapshotVersionUpdate { - constructor(parent) { - this.parent = parent; - } - bump(version) { - if (this.parent) { - version = this.parent.bump(version); - } - return new version_1.Version(version.major, version.minor, version.patch, version.preRelease - ? version.preRelease.replace(/-?SNAPSHOT/, '') - : undefined, version.build); - } -} -/** - * This VersioningStrategy is used by Java releases to bump - * to the next non-snapshot version. - */ -class JavaSnapshot { - constructor(strategy) { - this.strategy = strategy; - } - determineReleaseType(version, commits) { - var _a; - const parentBump = this.strategy.determineReleaseType(version, commits); - if ((_a = version.preRelease) === null || _a === void 0 ? void 0 : _a.match(/-?SNAPSHOT/)) { - const patchBumpVersion = this.strategy - .determineReleaseType(version, [fakeCommit]) - .bump(version); - const parentBumpVersion = parentBump.bump(version); - if (patchBumpVersion.toString() === parentBumpVersion.toString()) { - return new RemoveSnapshotVersionUpdate(); - } - return new RemoveSnapshotVersionUpdate(parentBump); - } - return parentBump; - } - bump(version, commits) { - return this.determineReleaseType(version, commits).bump(version); - } -} -exports.JavaSnapshot = JavaSnapshot; -//# sourceMappingURL=java-snapshot.js.map - -/***/ }), - -/***/ 86185: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PrereleaseVersioningStrategy = void 0; -const default_1 = __nccwpck_require__(94073); -const version_1 = __nccwpck_require__(17348); -const versioning_strategy_1 = __nccwpck_require__(41941); -/** - * Regex to match the last set of numbers in a string - * Example: 1.2.3-beta01-01 -> 01 - */ -const PRERELEASE_NUMBER = /(?\d+)(?=\D*$)/; -class AbstractPrereleaseVersionUpdate { - constructor(prereleaseType) { - this.prereleaseType = prereleaseType; - } - /** - * Returns the new bumped prerelease version - * - * That is, if the current version is 1.2.3-beta01, the next prerelease version - * will be 1.2.3-beta02. If no number is found, the prerelease version will be - * 1.2.3-beta. If multiple numbers are found, the last set of numbers will be - * incremented, e.g. 1.2.3-beta01-01 -> 1.2.3-beta01-02. - * - * @param {prerelease} string The current version - * @returns {Version} The bumped version - */ - bumpPrerelease(prerelease) { - const match = prerelease.match(PRERELEASE_NUMBER); - let nextPrerelease = `${prerelease}.1`; - if (match === null || match === void 0 ? void 0 : match.groups) { - const numberLength = match.groups.number.length; - const nextPrereleaseNumber = Number(match.groups.number) + 1; - const paddedNextPrereleaseNumber = `${nextPrereleaseNumber}`.padStart(numberLength, '0'); - nextPrerelease = prerelease.replace(PRERELEASE_NUMBER, paddedNextPrereleaseNumber); - } - return nextPrerelease; - } -} -class PrereleasePatchVersionUpdate extends AbstractPrereleaseVersionUpdate { - /** - * Returns the new bumped version - * - * @param {Version} version The current version - * @returns {Version} The bumped version - */ - bump(version) { - if (version.preRelease) { - const nextPrerelease = this.bumpPrerelease(version.preRelease); - return new version_1.Version(version.major, version.minor, version.patch, nextPrerelease, version.build); - } - return new version_1.Version(version.major, version.minor, version.patch + 1, this.prereleaseType, version.build); - } -} -class PrereleaseMinorVersionUpdate extends AbstractPrereleaseVersionUpdate { - /** - * Returns the new bumped version - * - * @param {Version} version The current version - * @returns {Version} The bumped version - */ - bump(version) { - if (version.preRelease) { - if (version.patch === 0) { - const nextPrerelease = this.bumpPrerelease(version.preRelease); - return new version_1.Version(version.major, version.minor, version.patch, nextPrerelease, version.build); - } - return new versioning_strategy_1.MinorVersionUpdate().bump(version); - } - return new version_1.Version(version.major, version.minor + 1, 0, this.prereleaseType, version.build); - } -} -class PrereleaseMajorVersionUpdate extends AbstractPrereleaseVersionUpdate { - /** - * Returns the new bumped version - * - * @param {Version} version The current version - * @returns {Version} The bumped version - */ - bump(version) { - if (version.preRelease) { - if (version.patch === 0 && version.minor === 0) { - const nextPrerelease = this.bumpPrerelease(version.preRelease); - return new version_1.Version(version.major, version.minor, version.patch, nextPrerelease, version.build); - } - return new versioning_strategy_1.MajorVersionUpdate().bump(version); - } - return new version_1.Version(version.major + 1, 0, 0, this.prereleaseType, version.build); - } -} -/** - * This versioning strategy will increment the pre-release number for patch - * bumps if there is a pre-release number (preserving any leading 0s). - * Example: 1.2.3-beta01 -> 1.2.3-beta02. - */ -class PrereleaseVersioningStrategy extends default_1.DefaultVersioningStrategy { - constructor(options = {}) { - super(options); - this.prereleaseType = options.prereleaseType; - } - determineReleaseType(version, commits) { - // iterate through list of commits and find biggest commit type - let breaking = 0; - let features = 0; - for (const commit of commits) { - const releaseAs = commit.notes.find(note => note.title === 'RELEASE AS'); - if (releaseAs) { - // commits are handled newest to oldest, so take the first one (newest) found - this.logger.debug(`found Release-As: ${releaseAs.text}, forcing version`); - return new versioning_strategy_1.CustomVersionUpdate(version_1.Version.parse(releaseAs.text).toString()); - } - if (commit.breaking) { - breaking++; - } - else if (commit.type === 'feat' || commit.type === 'feature') { - features++; - } - } - if (breaking > 0) { - if (version.isPreMajor && this.bumpMinorPreMajor) { - return new PrereleaseMinorVersionUpdate(this.prereleaseType); - } - else { - return new PrereleaseMajorVersionUpdate(this.prereleaseType); - } - } - else if (features > 0) { - if (version.isPreMajor && this.bumpPatchForMinorPreMajor) { - return new PrereleasePatchVersionUpdate(this.prereleaseType); - } - else { - return new PrereleaseMinorVersionUpdate(this.prereleaseType); - } - } - return new PrereleasePatchVersionUpdate(this.prereleaseType); - } -} -exports.PrereleaseVersioningStrategy = PrereleaseVersioningStrategy; -//# sourceMappingURL=prerelease.js.map - -/***/ }), - -/***/ 56772: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ServicePackVersioningStrategy = void 0; -const version_1 = __nccwpck_require__(17348); -const default_1 = __nccwpck_require__(94073); -const SERVICE_PACK_PATTERN = /sp\.(\d+)/; -/** - * This version updater knows how to bump from a non-service pack - * version to a service pack version and increment the service - * pack number in subsequent releases. - */ -class ServicePackVersionUpdate { - bump(version) { - var _a; - const match = (_a = version.preRelease) === null || _a === void 0 ? void 0 : _a.match(SERVICE_PACK_PATTERN); - if (match) { - const spNumber = Number(match[1]); - return new version_1.Version(version.major, version.minor, version.patch, `sp.${spNumber + 1}`, version.build); - } - return new version_1.Version(version.major, version.minor, version.patch, 'sp.1', version.build); - } -} -/** - * This VersioningStrategy is used for "service pack" versioning. In this - * strategy, we use the pre-release field with a pattern of `sp-\d+` where - * the number is an auto-incrementing integer starting with 1. - */ -class ServicePackVersioningStrategy extends default_1.DefaultVersioningStrategy { - determineReleaseType(_version, _commits) { - return new ServicePackVersionUpdate(); - } -} -exports.ServicePackVersioningStrategy = ServicePackVersioningStrategy; -//# sourceMappingURL=service-pack.js.map - -/***/ }), - -/***/ 41941: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.CustomVersionUpdate = exports.PatchVersionUpdate = exports.MinorVersionUpdate = exports.MajorVersionUpdate = void 0; -const version_1 = __nccwpck_require__(17348); -/** - * This VersionUpdater performs a SemVer major version bump. - */ -class MajorVersionUpdate { - /** - * Returns the new bumped version - * - * @param {Version} version The current version - * @returns {Version} The bumped version - */ - bump(version) { - return new version_1.Version(version.major + 1, 0, 0, version.preRelease, version.build); - } -} -exports.MajorVersionUpdate = MajorVersionUpdate; -/** - * This VersionUpdater performs a SemVer minor version bump. - */ -class MinorVersionUpdate { - /** - * Returns the new bumped version - * - * @param {Version} version The current version - * @returns {Version} The bumped version - */ - bump(version) { - return new version_1.Version(version.major, version.minor + 1, 0, version.preRelease, version.build); - } -} -exports.MinorVersionUpdate = MinorVersionUpdate; -/** - * This VersionUpdater performs a SemVer patch version bump. - */ -class PatchVersionUpdate { - /** - * Returns the new bumped version - * - * @param {Version} version The current version - * @returns {Version} The bumped version - */ - bump(version) { - return new version_1.Version(version.major, version.minor, version.patch + 1, version.preRelease, version.build); - } -} -exports.PatchVersionUpdate = PatchVersionUpdate; -/** - * This VersionUpdater sets the version to a specific version. - */ -class CustomVersionUpdate { - constructor(versionString) { - this.versionString = versionString; - } - /** - * Returns the new bumped version. This version is specified - * at initialization. - * - * @param {Version} version The current version - * @returns {Version} The bumped version - */ - bump(_version) { - return version_1.Version.parse(this.versionString); - } -} -exports.CustomVersionUpdate = CustomVersionUpdate; -//# sourceMappingURL=versioning-strategy.js.map /***/ }), @@ -64124,6 +72883,78 @@ RetryOperation.prototype.mainError = function() { }; +/***/ }), + +/***/ 21867: +/***/ ((module, exports, __nccwpck_require__) => { + +/*! safe-buffer. MIT License. Feross Aboukhadijeh */ +/* eslint-disable node/no-deprecated-api */ +var buffer = __nccwpck_require__(14300) +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.prototype = Object.create(Buffer.prototype) + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} + + /***/ }), /***/ 15118: @@ -66683,6 +75514,200 @@ const validRange = (range, options) => { module.exports = validRange +/***/ }), + +/***/ 13251: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var Buffer = (__nccwpck_require__(21867).Buffer) + +// prototype class for hash functions +function Hash (blockSize, finalSize) { + this._block = Buffer.alloc(blockSize) + this._finalSize = finalSize + this._blockSize = blockSize + this._len = 0 +} + +Hash.prototype.update = function (data, enc) { + if (typeof data === 'string') { + enc = enc || 'utf8' + data = Buffer.from(data, enc) + } + + var block = this._block + var blockSize = this._blockSize + var length = data.length + var accum = this._len + + for (var offset = 0; offset < length;) { + var assigned = accum % blockSize + var remainder = Math.min(length - offset, blockSize - assigned) + + for (var i = 0; i < remainder; i++) { + block[assigned + i] = data[offset + i] + } + + accum += remainder + offset += remainder + + if ((accum % blockSize) === 0) { + this._update(block) + } + } + + this._len += length + return this +} + +Hash.prototype.digest = function (enc) { + var rem = this._len % this._blockSize + + this._block[rem] = 0x80 + + // zero (rem + 1) trailing bits, where (rem + 1) is the smallest + // non-negative solution to the equation (length + 1 + (rem + 1)) === finalSize mod blockSize + this._block.fill(0, rem + 1) + + if (rem >= this._finalSize) { + this._update(this._block) + this._block.fill(0) + } + + var bits = this._len * 8 + + // uint32 + if (bits <= 0xffffffff) { + this._block.writeUInt32BE(bits, this._blockSize - 4) + + // uint64 + } else { + var lowBits = (bits & 0xffffffff) >>> 0 + var highBits = (bits - lowBits) / 0x100000000 + + this._block.writeUInt32BE(highBits, this._blockSize - 8) + this._block.writeUInt32BE(lowBits, this._blockSize - 4) + } + + this._update(this._block) + var hash = this._hash() + + return enc ? hash.toString(enc) : hash +} + +Hash.prototype._update = function () { + throw new Error('_update must be implemented by subclass') +} + +module.exports = Hash + + +/***/ }), + +/***/ 72398: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/* + * A JavaScript implementation of the Secure Hash Algorithm, SHA-1, as defined + * in FIPS PUB 180-1 + * Version 2.1a Copyright Paul Johnston 2000 - 2002. + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for details. + */ + +var inherits = __nccwpck_require__(44124) +var Hash = __nccwpck_require__(13251) +var Buffer = (__nccwpck_require__(21867).Buffer) + +var K = [ + 0x5a827999, 0x6ed9eba1, 0x8f1bbcdc | 0, 0xca62c1d6 | 0 +] + +var W = new Array(80) + +function Sha1 () { + this.init() + this._w = W + + Hash.call(this, 64, 56) +} + +inherits(Sha1, Hash) + +Sha1.prototype.init = function () { + this._a = 0x67452301 + this._b = 0xefcdab89 + this._c = 0x98badcfe + this._d = 0x10325476 + this._e = 0xc3d2e1f0 + + return this +} + +function rotl1 (num) { + return (num << 1) | (num >>> 31) +} + +function rotl5 (num) { + return (num << 5) | (num >>> 27) +} + +function rotl30 (num) { + return (num << 30) | (num >>> 2) +} + +function ft (s, b, c, d) { + if (s === 0) return (b & c) | ((~b) & d) + if (s === 2) return (b & c) | (b & d) | (c & d) + return b ^ c ^ d +} + +Sha1.prototype._update = function (M) { + var W = this._w + + var a = this._a | 0 + var b = this._b | 0 + var c = this._c | 0 + var d = this._d | 0 + var e = this._e | 0 + + for (var i = 0; i < 16; ++i) W[i] = M.readInt32BE(i * 4) + for (; i < 80; ++i) W[i] = rotl1(W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16]) + + for (var j = 0; j < 80; ++j) { + var s = ~~(j / 20) + var t = (rotl5(a) + ft(s, b, c, d) + e + W[j] + K[s]) | 0 + + e = d + d = c + c = rotl30(b) + b = a + a = t + } + + this._a = (a + this._a) | 0 + this._b = (b + this._b) | 0 + this._c = (c + this._c) | 0 + this._d = (d + this._d) | 0 + this._e = (e + this._e) | 0 +} + +Sha1.prototype._hash = function () { + var H = Buffer.allocUnsafe(20) + + H.writeInt32BE(this._a | 0, 0) + H.writeInt32BE(this._b | 0, 4) + H.writeInt32BE(this._c | 0, 8) + H.writeInt32BE(this._d | 0, 12) + H.writeInt32BE(this._e | 0, 16) + + return H +} + +module.exports = Sha1 + + /***/ }), /***/ 26375: @@ -101135,6 +110160,14 @@ module.exports = require("fs"); /***/ }), +/***/ 60843: +/***/ ((module) => { + +"use strict"; +module.exports = require("fs/promises"); + +/***/ }), + /***/ 13685: /***/ ((module) => { @@ -101175,6 +110208,22 @@ module.exports = require("node:events"); /***/ }), +/***/ 70612: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:os"); + +/***/ }), + +/***/ 49411: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:path"); + +/***/ }), + /***/ 84492: /***/ ((module) => { @@ -109742,7 +118791,7 @@ exports.visitAsync = visitAsync; /***/ }), -/***/ 24697: +/***/ 2452: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -109750,14 +118799,1379 @@ exports.visitAsync = visitAsync; var vm = __nccwpck_require__(26144); -/* eslint-disable camelcase, unicorn/prefer-string-replace-all, - unicorn/prefer-at */ -const { - hasOwnProperty: hasOwnProp -} = Object.prototype; +/** + * @implements {IHooks} + */ +class Hooks { + /** + * @callback HookCallback + * @this {*|Jsep} this + * @param {Jsep} env + * @returns: void + */ + /** + * Adds the given callback to the list of callbacks for the given hook. + * + * The callback will be invoked when the hook it is registered for is run. + * + * One callback function can be registered to multiple hooks and the same hook multiple times. + * + * @param {string|object} name The name of the hook, or an object of callbacks keyed by name + * @param {HookCallback|boolean} callback The callback function which is given environment variables. + * @param {?boolean} [first=false] Will add the hook to the top of the list (defaults to the bottom) + * @public + */ + add(name, callback, first) { + if (typeof arguments[0] != 'string') { + // Multiple hook callbacks, keyed by name + for (let name in arguments[0]) { + this.add(name, arguments[0][name], arguments[1]); + } + } else { + (Array.isArray(name) ? name : [name]).forEach(function (name) { + this[name] = this[name] || []; + if (callback) { + this[name][first ? 'unshift' : 'push'](callback); + } + }, this); + } + } + + /** + * Runs a hook invoking all registered callbacks with the given environment variables. + * + * Callbacks will be invoked synchronously and in the order in which they were registered. + * + * @param {string} name The name of the hook. + * @param {Object} env The environment variables of the hook passed to all callbacks registered. + * @public + */ + run(name, env) { + this[name] = this[name] || []; + this[name].forEach(function (callback) { + callback.call(env && env.context ? env.context : env, env); + }); + } +} /** - * @typedef {null|boolean|number|string|PlainObject|GenericArray} JSONObject + * @implements {IPlugins} + */ +class Plugins { + constructor(jsep) { + this.jsep = jsep; + this.registered = {}; + } + + /** + * @callback PluginSetup + * @this {Jsep} jsep + * @returns: void + */ + /** + * Adds the given plugin(s) to the registry + * + * @param {object} plugins + * @param {string} plugins.name The name of the plugin + * @param {PluginSetup} plugins.init The init function + * @public + */ + register(...plugins) { + plugins.forEach(plugin => { + if (typeof plugin !== 'object' || !plugin.name || !plugin.init) { + throw new Error('Invalid JSEP plugin format'); + } + if (this.registered[plugin.name]) { + // already registered. Ignore. + return; + } + plugin.init(this.jsep); + this.registered[plugin.name] = plugin; + }); + } +} + +// JavaScript Expression Parser (JSEP) 1.4.0 + +class Jsep { + /** + * @returns {string} + */ + static get version() { + // To be filled in by the template + return '1.4.0'; + } + + /** + * @returns {string} + */ + static toString() { + return 'JavaScript Expression Parser (JSEP) v' + Jsep.version; + } + // ==================== CONFIG ================================ + /** + * @method addUnaryOp + * @param {string} op_name The name of the unary op to add + * @returns {Jsep} + */ + static addUnaryOp(op_name) { + Jsep.max_unop_len = Math.max(op_name.length, Jsep.max_unop_len); + Jsep.unary_ops[op_name] = 1; + return Jsep; + } + + /** + * @method jsep.addBinaryOp + * @param {string} op_name The name of the binary op to add + * @param {number} precedence The precedence of the binary op (can be a float). Higher number = higher precedence + * @param {boolean} [isRightAssociative=false] whether operator is right-associative + * @returns {Jsep} + */ + static addBinaryOp(op_name, precedence, isRightAssociative) { + Jsep.max_binop_len = Math.max(op_name.length, Jsep.max_binop_len); + Jsep.binary_ops[op_name] = precedence; + if (isRightAssociative) { + Jsep.right_associative.add(op_name); + } else { + Jsep.right_associative.delete(op_name); + } + return Jsep; + } + + /** + * @method addIdentifierChar + * @param {string} char The additional character to treat as a valid part of an identifier + * @returns {Jsep} + */ + static addIdentifierChar(char) { + Jsep.additional_identifier_chars.add(char); + return Jsep; + } + + /** + * @method addLiteral + * @param {string} literal_name The name of the literal to add + * @param {*} literal_value The value of the literal + * @returns {Jsep} + */ + static addLiteral(literal_name, literal_value) { + Jsep.literals[literal_name] = literal_value; + return Jsep; + } + + /** + * @method removeUnaryOp + * @param {string} op_name The name of the unary op to remove + * @returns {Jsep} + */ + static removeUnaryOp(op_name) { + delete Jsep.unary_ops[op_name]; + if (op_name.length === Jsep.max_unop_len) { + Jsep.max_unop_len = Jsep.getMaxKeyLen(Jsep.unary_ops); + } + return Jsep; + } + + /** + * @method removeAllUnaryOps + * @returns {Jsep} + */ + static removeAllUnaryOps() { + Jsep.unary_ops = {}; + Jsep.max_unop_len = 0; + return Jsep; + } + + /** + * @method removeIdentifierChar + * @param {string} char The additional character to stop treating as a valid part of an identifier + * @returns {Jsep} + */ + static removeIdentifierChar(char) { + Jsep.additional_identifier_chars.delete(char); + return Jsep; + } + + /** + * @method removeBinaryOp + * @param {string} op_name The name of the binary op to remove + * @returns {Jsep} + */ + static removeBinaryOp(op_name) { + delete Jsep.binary_ops[op_name]; + if (op_name.length === Jsep.max_binop_len) { + Jsep.max_binop_len = Jsep.getMaxKeyLen(Jsep.binary_ops); + } + Jsep.right_associative.delete(op_name); + return Jsep; + } + + /** + * @method removeAllBinaryOps + * @returns {Jsep} + */ + static removeAllBinaryOps() { + Jsep.binary_ops = {}; + Jsep.max_binop_len = 0; + return Jsep; + } + + /** + * @method removeLiteral + * @param {string} literal_name The name of the literal to remove + * @returns {Jsep} + */ + static removeLiteral(literal_name) { + delete Jsep.literals[literal_name]; + return Jsep; + } + + /** + * @method removeAllLiterals + * @returns {Jsep} + */ + static removeAllLiterals() { + Jsep.literals = {}; + return Jsep; + } + // ==================== END CONFIG ============================ + + /** + * @returns {string} + */ + get char() { + return this.expr.charAt(this.index); + } + + /** + * @returns {number} + */ + get code() { + return this.expr.charCodeAt(this.index); + } + /** + * @param {string} expr a string with the passed in express + * @returns Jsep + */ + constructor(expr) { + // `index` stores the character number we are currently at + // All of the gobbles below will modify `index` as we move along + this.expr = expr; + this.index = 0; + } + + /** + * static top-level parser + * @returns {jsep.Expression} + */ + static parse(expr) { + return new Jsep(expr).parse(); + } + + /** + * Get the longest key length of any object + * @param {object} obj + * @returns {number} + */ + static getMaxKeyLen(obj) { + return Math.max(0, ...Object.keys(obj).map(k => k.length)); + } + + /** + * `ch` is a character code in the next three functions + * @param {number} ch + * @returns {boolean} + */ + static isDecimalDigit(ch) { + return ch >= 48 && ch <= 57; // 0...9 + } + + /** + * Returns the precedence of a binary operator or `0` if it isn't a binary operator. Can be float. + * @param {string} op_val + * @returns {number} + */ + static binaryPrecedence(op_val) { + return Jsep.binary_ops[op_val] || 0; + } + + /** + * Looks for start of identifier + * @param {number} ch + * @returns {boolean} + */ + static isIdentifierStart(ch) { + return ch >= 65 && ch <= 90 || + // A...Z + ch >= 97 && ch <= 122 || + // a...z + ch >= 128 && !Jsep.binary_ops[String.fromCharCode(ch)] || + // any non-ASCII that is not an operator + Jsep.additional_identifier_chars.has(String.fromCharCode(ch)); // additional characters + } + + /** + * @param {number} ch + * @returns {boolean} + */ + static isIdentifierPart(ch) { + return Jsep.isIdentifierStart(ch) || Jsep.isDecimalDigit(ch); + } + + /** + * throw error at index of the expression + * @param {string} message + * @throws + */ + throwError(message) { + const error = new Error(message + ' at character ' + this.index); + error.index = this.index; + error.description = message; + throw error; + } + + /** + * Run a given hook + * @param {string} name + * @param {jsep.Expression|false} [node] + * @returns {?jsep.Expression} + */ + runHook(name, node) { + if (Jsep.hooks[name]) { + const env = { + context: this, + node + }; + Jsep.hooks.run(name, env); + return env.node; + } + return node; + } + + /** + * Runs a given hook until one returns a node + * @param {string} name + * @returns {?jsep.Expression} + */ + searchHook(name) { + if (Jsep.hooks[name]) { + const env = { + context: this + }; + Jsep.hooks[name].find(function (callback) { + callback.call(env.context, env); + return env.node; + }); + return env.node; + } + } + + /** + * Push `index` up to the next non-space character + */ + gobbleSpaces() { + let ch = this.code; + // Whitespace + while (ch === Jsep.SPACE_CODE || ch === Jsep.TAB_CODE || ch === Jsep.LF_CODE || ch === Jsep.CR_CODE) { + ch = this.expr.charCodeAt(++this.index); + } + this.runHook('gobble-spaces'); + } + + /** + * Top-level method to parse all expressions and returns compound or single node + * @returns {jsep.Expression} + */ + parse() { + this.runHook('before-all'); + const nodes = this.gobbleExpressions(); + + // If there's only one expression just try returning the expression + const node = nodes.length === 1 ? nodes[0] : { + type: Jsep.COMPOUND, + body: nodes + }; + return this.runHook('after-all', node); + } + + /** + * top-level parser (but can be reused within as well) + * @param {number} [untilICode] + * @returns {jsep.Expression[]} + */ + gobbleExpressions(untilICode) { + let nodes = [], + ch_i, + node; + while (this.index < this.expr.length) { + ch_i = this.code; + + // Expressions can be separated by semicolons, commas, or just inferred without any + // separators + if (ch_i === Jsep.SEMCOL_CODE || ch_i === Jsep.COMMA_CODE) { + this.index++; // ignore separators + } else { + // Try to gobble each expression individually + if (node = this.gobbleExpression()) { + nodes.push(node); + // If we weren't able to find a binary expression and are out of room, then + // the expression passed in probably has too much + } else if (this.index < this.expr.length) { + if (ch_i === untilICode) { + break; + } + this.throwError('Unexpected "' + this.char + '"'); + } + } + } + return nodes; + } + + /** + * The main parsing function. + * @returns {?jsep.Expression} + */ + gobbleExpression() { + const node = this.searchHook('gobble-expression') || this.gobbleBinaryExpression(); + this.gobbleSpaces(); + return this.runHook('after-expression', node); + } + + /** + * Search for the operation portion of the string (e.g. `+`, `===`) + * Start by taking the longest possible binary operations (3 characters: `===`, `!==`, `>>>`) + * and move down from 3 to 2 to 1 character until a matching binary operation is found + * then, return that binary operation + * @returns {string|boolean} + */ + gobbleBinaryOp() { + this.gobbleSpaces(); + let to_check = this.expr.substr(this.index, Jsep.max_binop_len); + let tc_len = to_check.length; + while (tc_len > 0) { + // Don't accept a binary op when it is an identifier. + // Binary ops that start with a identifier-valid character must be followed + // by a non identifier-part valid character + if (Jsep.binary_ops.hasOwnProperty(to_check) && (!Jsep.isIdentifierStart(this.code) || this.index + to_check.length < this.expr.length && !Jsep.isIdentifierPart(this.expr.charCodeAt(this.index + to_check.length)))) { + this.index += tc_len; + return to_check; + } + to_check = to_check.substr(0, --tc_len); + } + return false; + } + + /** + * This function is responsible for gobbling an individual expression, + * e.g. `1`, `1+2`, `a+(b*2)-Math.sqrt(2)` + * @returns {?jsep.BinaryExpression} + */ + gobbleBinaryExpression() { + let node, biop, prec, stack, biop_info, left, right, i, cur_biop; + + // First, try to get the leftmost thing + // Then, check to see if there's a binary operator operating on that leftmost thing + // Don't gobbleBinaryOp without a left-hand-side + left = this.gobbleToken(); + if (!left) { + return left; + } + biop = this.gobbleBinaryOp(); + + // If there wasn't a binary operator, just return the leftmost node + if (!biop) { + return left; + } + + // Otherwise, we need to start a stack to properly place the binary operations in their + // precedence structure + biop_info = { + value: biop, + prec: Jsep.binaryPrecedence(biop), + right_a: Jsep.right_associative.has(biop) + }; + right = this.gobbleToken(); + if (!right) { + this.throwError("Expected expression after " + biop); + } + stack = [left, biop_info, right]; + + // Properly deal with precedence using [recursive descent](http://www.engr.mun.ca/~theo/Misc/exp_parsing.htm) + while (biop = this.gobbleBinaryOp()) { + prec = Jsep.binaryPrecedence(biop); + if (prec === 0) { + this.index -= biop.length; + break; + } + biop_info = { + value: biop, + prec, + right_a: Jsep.right_associative.has(biop) + }; + cur_biop = biop; + + // Reduce: make a binary expression from the three topmost entries. + const comparePrev = prev => biop_info.right_a && prev.right_a ? prec > prev.prec : prec <= prev.prec; + while (stack.length > 2 && comparePrev(stack[stack.length - 2])) { + right = stack.pop(); + biop = stack.pop().value; + left = stack.pop(); + node = { + type: Jsep.BINARY_EXP, + operator: biop, + left, + right + }; + stack.push(node); + } + node = this.gobbleToken(); + if (!node) { + this.throwError("Expected expression after " + cur_biop); + } + stack.push(biop_info, node); + } + i = stack.length - 1; + node = stack[i]; + while (i > 1) { + node = { + type: Jsep.BINARY_EXP, + operator: stack[i - 1].value, + left: stack[i - 2], + right: node + }; + i -= 2; + } + return node; + } + + /** + * An individual part of a binary expression: + * e.g. `foo.bar(baz)`, `1`, `"abc"`, `(a % 2)` (because it's in parenthesis) + * @returns {boolean|jsep.Expression} + */ + gobbleToken() { + let ch, to_check, tc_len, node; + this.gobbleSpaces(); + node = this.searchHook('gobble-token'); + if (node) { + return this.runHook('after-token', node); + } + ch = this.code; + if (Jsep.isDecimalDigit(ch) || ch === Jsep.PERIOD_CODE) { + // Char code 46 is a dot `.` which can start off a numeric literal + return this.gobbleNumericLiteral(); + } + if (ch === Jsep.SQUOTE_CODE || ch === Jsep.DQUOTE_CODE) { + // Single or double quotes + node = this.gobbleStringLiteral(); + } else if (ch === Jsep.OBRACK_CODE) { + node = this.gobbleArray(); + } else { + to_check = this.expr.substr(this.index, Jsep.max_unop_len); + tc_len = to_check.length; + while (tc_len > 0) { + // Don't accept an unary op when it is an identifier. + // Unary ops that start with a identifier-valid character must be followed + // by a non identifier-part valid character + if (Jsep.unary_ops.hasOwnProperty(to_check) && (!Jsep.isIdentifierStart(this.code) || this.index + to_check.length < this.expr.length && !Jsep.isIdentifierPart(this.expr.charCodeAt(this.index + to_check.length)))) { + this.index += tc_len; + const argument = this.gobbleToken(); + if (!argument) { + this.throwError('missing unaryOp argument'); + } + return this.runHook('after-token', { + type: Jsep.UNARY_EXP, + operator: to_check, + argument, + prefix: true + }); + } + to_check = to_check.substr(0, --tc_len); + } + if (Jsep.isIdentifierStart(ch)) { + node = this.gobbleIdentifier(); + if (Jsep.literals.hasOwnProperty(node.name)) { + node = { + type: Jsep.LITERAL, + value: Jsep.literals[node.name], + raw: node.name + }; + } else if (node.name === Jsep.this_str) { + node = { + type: Jsep.THIS_EXP + }; + } + } else if (ch === Jsep.OPAREN_CODE) { + // open parenthesis + node = this.gobbleGroup(); + } + } + if (!node) { + return this.runHook('after-token', false); + } + node = this.gobbleTokenProperty(node); + return this.runHook('after-token', node); + } + + /** + * Gobble properties of of identifiers/strings/arrays/groups. + * e.g. `foo`, `bar.baz`, `foo['bar'].baz` + * It also gobbles function calls: + * e.g. `Math.acos(obj.angle)` + * @param {jsep.Expression} node + * @returns {jsep.Expression} + */ + gobbleTokenProperty(node) { + this.gobbleSpaces(); + let ch = this.code; + while (ch === Jsep.PERIOD_CODE || ch === Jsep.OBRACK_CODE || ch === Jsep.OPAREN_CODE || ch === Jsep.QUMARK_CODE) { + let optional; + if (ch === Jsep.QUMARK_CODE) { + if (this.expr.charCodeAt(this.index + 1) !== Jsep.PERIOD_CODE) { + break; + } + optional = true; + this.index += 2; + this.gobbleSpaces(); + ch = this.code; + } + this.index++; + if (ch === Jsep.OBRACK_CODE) { + node = { + type: Jsep.MEMBER_EXP, + computed: true, + object: node, + property: this.gobbleExpression() + }; + if (!node.property) { + this.throwError('Unexpected "' + this.char + '"'); + } + this.gobbleSpaces(); + ch = this.code; + if (ch !== Jsep.CBRACK_CODE) { + this.throwError('Unclosed ['); + } + this.index++; + } else if (ch === Jsep.OPAREN_CODE) { + // A function call is being made; gobble all the arguments + node = { + type: Jsep.CALL_EXP, + 'arguments': this.gobbleArguments(Jsep.CPAREN_CODE), + callee: node + }; + } else if (ch === Jsep.PERIOD_CODE || optional) { + if (optional) { + this.index--; + } + this.gobbleSpaces(); + node = { + type: Jsep.MEMBER_EXP, + computed: false, + object: node, + property: this.gobbleIdentifier() + }; + } + if (optional) { + node.optional = true; + } // else leave undefined for compatibility with esprima + + this.gobbleSpaces(); + ch = this.code; + } + return node; + } + + /** + * Parse simple numeric literals: `12`, `3.4`, `.5`. Do this by using a string to + * keep track of everything in the numeric literal and then calling `parseFloat` on that string + * @returns {jsep.Literal} + */ + gobbleNumericLiteral() { + let number = '', + ch, + chCode; + while (Jsep.isDecimalDigit(this.code)) { + number += this.expr.charAt(this.index++); + } + if (this.code === Jsep.PERIOD_CODE) { + // can start with a decimal marker + number += this.expr.charAt(this.index++); + while (Jsep.isDecimalDigit(this.code)) { + number += this.expr.charAt(this.index++); + } + } + ch = this.char; + if (ch === 'e' || ch === 'E') { + // exponent marker + number += this.expr.charAt(this.index++); + ch = this.char; + if (ch === '+' || ch === '-') { + // exponent sign + number += this.expr.charAt(this.index++); + } + while (Jsep.isDecimalDigit(this.code)) { + // exponent itself + number += this.expr.charAt(this.index++); + } + if (!Jsep.isDecimalDigit(this.expr.charCodeAt(this.index - 1))) { + this.throwError('Expected exponent (' + number + this.char + ')'); + } + } + chCode = this.code; + + // Check to make sure this isn't a variable name that start with a number (123abc) + if (Jsep.isIdentifierStart(chCode)) { + this.throwError('Variable names cannot start with a number (' + number + this.char + ')'); + } else if (chCode === Jsep.PERIOD_CODE || number.length === 1 && number.charCodeAt(0) === Jsep.PERIOD_CODE) { + this.throwError('Unexpected period'); + } + return { + type: Jsep.LITERAL, + value: parseFloat(number), + raw: number + }; + } + + /** + * Parses a string literal, staring with single or double quotes with basic support for escape codes + * e.g. `"hello world"`, `'this is\nJSEP'` + * @returns {jsep.Literal} + */ + gobbleStringLiteral() { + let str = ''; + const startIndex = this.index; + const quote = this.expr.charAt(this.index++); + let closed = false; + while (this.index < this.expr.length) { + let ch = this.expr.charAt(this.index++); + if (ch === quote) { + closed = true; + break; + } else if (ch === '\\') { + // Check for all of the common escape codes + ch = this.expr.charAt(this.index++); + switch (ch) { + case 'n': + str += '\n'; + break; + case 'r': + str += '\r'; + break; + case 't': + str += '\t'; + break; + case 'b': + str += '\b'; + break; + case 'f': + str += '\f'; + break; + case 'v': + str += '\x0B'; + break; + default: + str += ch; + } + } else { + str += ch; + } + } + if (!closed) { + this.throwError('Unclosed quote after "' + str + '"'); + } + return { + type: Jsep.LITERAL, + value: str, + raw: this.expr.substring(startIndex, this.index) + }; + } + + /** + * Gobbles only identifiers + * e.g.: `foo`, `_value`, `$x1` + * Also, this function checks if that identifier is a literal: + * (e.g. `true`, `false`, `null`) or `this` + * @returns {jsep.Identifier} + */ + gobbleIdentifier() { + let ch = this.code, + start = this.index; + if (Jsep.isIdentifierStart(ch)) { + this.index++; + } else { + this.throwError('Unexpected ' + this.char); + } + while (this.index < this.expr.length) { + ch = this.code; + if (Jsep.isIdentifierPart(ch)) { + this.index++; + } else { + break; + } + } + return { + type: Jsep.IDENTIFIER, + name: this.expr.slice(start, this.index) + }; + } + + /** + * Gobbles a list of arguments within the context of a function call + * or array literal. This function also assumes that the opening character + * `(` or `[` has already been gobbled, and gobbles expressions and commas + * until the terminator character `)` or `]` is encountered. + * e.g. `foo(bar, baz)`, `my_func()`, or `[bar, baz]` + * @param {number} termination + * @returns {jsep.Expression[]} + */ + gobbleArguments(termination) { + const args = []; + let closed = false; + let separator_count = 0; + while (this.index < this.expr.length) { + this.gobbleSpaces(); + let ch_i = this.code; + if (ch_i === termination) { + // done parsing + closed = true; + this.index++; + if (termination === Jsep.CPAREN_CODE && separator_count && separator_count >= args.length) { + this.throwError('Unexpected token ' + String.fromCharCode(termination)); + } + break; + } else if (ch_i === Jsep.COMMA_CODE) { + // between expressions + this.index++; + separator_count++; + if (separator_count !== args.length) { + // missing argument + if (termination === Jsep.CPAREN_CODE) { + this.throwError('Unexpected token ,'); + } else if (termination === Jsep.CBRACK_CODE) { + for (let arg = args.length; arg < separator_count; arg++) { + args.push(null); + } + } + } + } else if (args.length !== separator_count && separator_count !== 0) { + // NOTE: `&& separator_count !== 0` allows for either all commas, or all spaces as arguments + this.throwError('Expected comma'); + } else { + const node = this.gobbleExpression(); + if (!node || node.type === Jsep.COMPOUND) { + this.throwError('Expected comma'); + } + args.push(node); + } + } + if (!closed) { + this.throwError('Expected ' + String.fromCharCode(termination)); + } + return args; + } + + /** + * Responsible for parsing a group of things within parentheses `()` + * that have no identifier in front (so not a function call) + * This function assumes that it needs to gobble the opening parenthesis + * and then tries to gobble everything within that parenthesis, assuming + * that the next thing it should see is the close parenthesis. If not, + * then the expression probably doesn't have a `)` + * @returns {boolean|jsep.Expression} + */ + gobbleGroup() { + this.index++; + let nodes = this.gobbleExpressions(Jsep.CPAREN_CODE); + if (this.code === Jsep.CPAREN_CODE) { + this.index++; + if (nodes.length === 1) { + return nodes[0]; + } else if (!nodes.length) { + return false; + } else { + return { + type: Jsep.SEQUENCE_EXP, + expressions: nodes + }; + } + } else { + this.throwError('Unclosed ('); + } + } + + /** + * Responsible for parsing Array literals `[1, 2, 3]` + * This function assumes that it needs to gobble the opening bracket + * and then tries to gobble the expressions as arguments. + * @returns {jsep.ArrayExpression} + */ + gobbleArray() { + this.index++; + return { + type: Jsep.ARRAY_EXP, + elements: this.gobbleArguments(Jsep.CBRACK_CODE) + }; + } +} + +// Static fields: +const hooks = new Hooks(); +Object.assign(Jsep, { + hooks, + plugins: new Plugins(Jsep), + // Node Types + // ---------- + // This is the full set of types that any JSEP node can be. + // Store them here to save space when minified + COMPOUND: 'Compound', + SEQUENCE_EXP: 'SequenceExpression', + IDENTIFIER: 'Identifier', + MEMBER_EXP: 'MemberExpression', + LITERAL: 'Literal', + THIS_EXP: 'ThisExpression', + CALL_EXP: 'CallExpression', + UNARY_EXP: 'UnaryExpression', + BINARY_EXP: 'BinaryExpression', + ARRAY_EXP: 'ArrayExpression', + TAB_CODE: 9, + LF_CODE: 10, + CR_CODE: 13, + SPACE_CODE: 32, + PERIOD_CODE: 46, + // '.' + COMMA_CODE: 44, + // ',' + SQUOTE_CODE: 39, + // single quote + DQUOTE_CODE: 34, + // double quotes + OPAREN_CODE: 40, + // ( + CPAREN_CODE: 41, + // ) + OBRACK_CODE: 91, + // [ + CBRACK_CODE: 93, + // ] + QUMARK_CODE: 63, + // ? + SEMCOL_CODE: 59, + // ; + COLON_CODE: 58, + // : + + // Operations + // ---------- + // Use a quickly-accessible map to store all of the unary operators + // Values are set to `1` (it really doesn't matter) + unary_ops: { + '-': 1, + '!': 1, + '~': 1, + '+': 1 + }, + // Also use a map for the binary operations but set their values to their + // binary precedence for quick reference (higher number = higher precedence) + // see [Order of operations](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Operator_Precedence) + binary_ops: { + '||': 1, + '??': 1, + '&&': 2, + '|': 3, + '^': 4, + '&': 5, + '==': 6, + '!=': 6, + '===': 6, + '!==': 6, + '<': 7, + '>': 7, + '<=': 7, + '>=': 7, + '<<': 8, + '>>': 8, + '>>>': 8, + '+': 9, + '-': 9, + '*': 10, + '/': 10, + '%': 10, + '**': 11 + }, + // sets specific binary_ops as right-associative + right_associative: new Set(['**']), + // Additional valid identifier chars, apart from a-z, A-Z and 0-9 (except on the starting char) + additional_identifier_chars: new Set(['$', '_']), + // Literals + // ---------- + // Store the values to return for the various literals we may encounter + literals: { + 'true': true, + 'false': false, + 'null': null + }, + // Except for `this`, which is special. This could be changed to something like `'self'` as well + this_str: 'this' +}); +Jsep.max_unop_len = Jsep.getMaxKeyLen(Jsep.unary_ops); +Jsep.max_binop_len = Jsep.getMaxKeyLen(Jsep.binary_ops); + +// Backward Compatibility: +const jsep = expr => new Jsep(expr).parse(); +const stdClassProps = Object.getOwnPropertyNames(class Test {}); +Object.getOwnPropertyNames(Jsep).filter(prop => !stdClassProps.includes(prop) && jsep[prop] === undefined).forEach(m => { + jsep[m] = Jsep[m]; +}); +jsep.Jsep = Jsep; // allows for const { Jsep } = require('jsep'); + +const CONDITIONAL_EXP = 'ConditionalExpression'; +var ternary = { + name: 'ternary', + init(jsep) { + // Ternary expression: test ? consequent : alternate + jsep.hooks.add('after-expression', function gobbleTernary(env) { + if (env.node && this.code === jsep.QUMARK_CODE) { + this.index++; + const test = env.node; + const consequent = this.gobbleExpression(); + if (!consequent) { + this.throwError('Expected expression'); + } + this.gobbleSpaces(); + if (this.code === jsep.COLON_CODE) { + this.index++; + const alternate = this.gobbleExpression(); + if (!alternate) { + this.throwError('Expected expression'); + } + env.node = { + type: CONDITIONAL_EXP, + test, + consequent, + alternate + }; + + // check for operators of higher priority than ternary (i.e. assignment) + // jsep sets || at 1, and assignment at 0.9, and conditional should be between them + if (test.operator && jsep.binary_ops[test.operator] <= 0.9) { + let newTest = test; + while (newTest.right.operator && jsep.binary_ops[newTest.right.operator] <= 0.9) { + newTest = newTest.right; + } + env.node.test = newTest.right; + newTest.right = env.node; + env.node = test; + } + } else { + this.throwError('Expected :'); + } + } + }); + } +}; + +// Add default plugins: + +jsep.plugins.register(ternary); + +const FSLASH_CODE = 47; // '/' +const BSLASH_CODE = 92; // '\\' + +var index = { + name: 'regex', + init(jsep) { + // Regex literal: /abc123/ig + jsep.hooks.add('gobble-token', function gobbleRegexLiteral(env) { + if (this.code === FSLASH_CODE) { + const patternIndex = ++this.index; + let inCharSet = false; + while (this.index < this.expr.length) { + if (this.code === FSLASH_CODE && !inCharSet) { + const pattern = this.expr.slice(patternIndex, this.index); + let flags = ''; + while (++this.index < this.expr.length) { + const code = this.code; + if (code >= 97 && code <= 122 // a...z + || code >= 65 && code <= 90 // A...Z + || code >= 48 && code <= 57) { + // 0-9 + flags += this.char; + } else { + break; + } + } + let value; + try { + value = new RegExp(pattern, flags); + } catch (e) { + this.throwError(e.message); + } + env.node = { + type: jsep.LITERAL, + value, + raw: this.expr.slice(patternIndex - 1, this.index) + }; + + // allow . [] and () after regex: /regex/.test(a) + env.node = this.gobbleTokenProperty(env.node); + return env.node; + } + if (this.code === jsep.OBRACK_CODE) { + inCharSet = true; + } else if (inCharSet && this.code === jsep.CBRACK_CODE) { + inCharSet = false; + } + this.index += this.code === BSLASH_CODE ? 2 : 1; + } + this.throwError('Unclosed Regex'); + } + }); + } +}; + +const PLUS_CODE = 43; // + +const MINUS_CODE = 45; // - + +const plugin = { + name: 'assignment', + assignmentOperators: new Set(['=', '*=', '**=', '/=', '%=', '+=', '-=', '<<=', '>>=', '>>>=', '&=', '^=', '|=', '||=', '&&=', '??=']), + updateOperators: [PLUS_CODE, MINUS_CODE], + assignmentPrecedence: 0.9, + init(jsep) { + const updateNodeTypes = [jsep.IDENTIFIER, jsep.MEMBER_EXP]; + plugin.assignmentOperators.forEach(op => jsep.addBinaryOp(op, plugin.assignmentPrecedence, true)); + jsep.hooks.add('gobble-token', function gobbleUpdatePrefix(env) { + const code = this.code; + if (plugin.updateOperators.some(c => c === code && c === this.expr.charCodeAt(this.index + 1))) { + this.index += 2; + env.node = { + type: 'UpdateExpression', + operator: code === PLUS_CODE ? '++' : '--', + argument: this.gobbleTokenProperty(this.gobbleIdentifier()), + prefix: true + }; + if (!env.node.argument || !updateNodeTypes.includes(env.node.argument.type)) { + this.throwError(`Unexpected ${env.node.operator}`); + } + } + }); + jsep.hooks.add('after-token', function gobbleUpdatePostfix(env) { + if (env.node) { + const code = this.code; + if (plugin.updateOperators.some(c => c === code && c === this.expr.charCodeAt(this.index + 1))) { + if (!updateNodeTypes.includes(env.node.type)) { + this.throwError(`Unexpected ${env.node.operator}`); + } + this.index += 2; + env.node = { + type: 'UpdateExpression', + operator: code === PLUS_CODE ? '++' : '--', + argument: env.node, + prefix: false + }; + } + } + }); + jsep.hooks.add('after-expression', function gobbleAssignment(env) { + if (env.node) { + // Note: Binaries can be chained in a single expression to respect + // operator precedence (i.e. a = b = 1 + 2 + 3) + // Update all binary assignment nodes in the tree + updateBinariesToAssignments(env.node); + } + }); + function updateBinariesToAssignments(node) { + if (plugin.assignmentOperators.has(node.operator)) { + node.type = 'AssignmentExpression'; + updateBinariesToAssignments(node.left); + updateBinariesToAssignments(node.right); + } else if (!node.operator) { + Object.values(node).forEach(val => { + if (val && typeof val === 'object') { + updateBinariesToAssignments(val); + } + }); + } + } + } +}; + +/* eslint-disable no-bitwise -- Convenient */ + +// register plugins +jsep.plugins.register(index, plugin); +jsep.addUnaryOp('typeof'); +jsep.addLiteral('null', null); +jsep.addLiteral('undefined', undefined); +const BLOCKED_PROTO_PROPERTIES = new Set(['constructor', '__proto__', '__defineGetter__', '__defineSetter__']); +const SafeEval = { + /** + * @param {jsep.Expression} ast + * @param {Record} subs + */ + evalAst(ast, subs) { + switch (ast.type) { + case 'BinaryExpression': + case 'LogicalExpression': + return SafeEval.evalBinaryExpression(ast, subs); + case 'Compound': + return SafeEval.evalCompound(ast, subs); + case 'ConditionalExpression': + return SafeEval.evalConditionalExpression(ast, subs); + case 'Identifier': + return SafeEval.evalIdentifier(ast, subs); + case 'Literal': + return SafeEval.evalLiteral(ast, subs); + case 'MemberExpression': + return SafeEval.evalMemberExpression(ast, subs); + case 'UnaryExpression': + return SafeEval.evalUnaryExpression(ast, subs); + case 'ArrayExpression': + return SafeEval.evalArrayExpression(ast, subs); + case 'CallExpression': + return SafeEval.evalCallExpression(ast, subs); + case 'AssignmentExpression': + return SafeEval.evalAssignmentExpression(ast, subs); + default: + throw SyntaxError('Unexpected expression', ast); + } + }, + evalBinaryExpression(ast, subs) { + const result = { + '||': (a, b) => a || b(), + '&&': (a, b) => a && b(), + '|': (a, b) => a | b(), + '^': (a, b) => a ^ b(), + '&': (a, b) => a & b(), + // eslint-disable-next-line eqeqeq -- API + '==': (a, b) => a == b(), + // eslint-disable-next-line eqeqeq -- API + '!=': (a, b) => a != b(), + '===': (a, b) => a === b(), + '!==': (a, b) => a !== b(), + '<': (a, b) => a < b(), + '>': (a, b) => a > b(), + '<=': (a, b) => a <= b(), + '>=': (a, b) => a >= b(), + '<<': (a, b) => a << b(), + '>>': (a, b) => a >> b(), + '>>>': (a, b) => a >>> b(), + '+': (a, b) => a + b(), + '-': (a, b) => a - b(), + '*': (a, b) => a * b(), + '/': (a, b) => a / b(), + '%': (a, b) => a % b() + }[ast.operator](SafeEval.evalAst(ast.left, subs), () => SafeEval.evalAst(ast.right, subs)); + return result; + }, + evalCompound(ast, subs) { + let last; + for (let i = 0; i < ast.body.length; i++) { + if (ast.body[i].type === 'Identifier' && ['var', 'let', 'const'].includes(ast.body[i].name) && ast.body[i + 1] && ast.body[i + 1].type === 'AssignmentExpression') { + // var x=2; is detected as + // [{Identifier var}, {AssignmentExpression x=2}] + // eslint-disable-next-line @stylistic/max-len -- Long + // eslint-disable-next-line sonarjs/updated-loop-counter -- Convenient + i += 1; + } + const expr = ast.body[i]; + last = SafeEval.evalAst(expr, subs); + } + return last; + }, + evalConditionalExpression(ast, subs) { + if (SafeEval.evalAst(ast.test, subs)) { + return SafeEval.evalAst(ast.consequent, subs); + } + return SafeEval.evalAst(ast.alternate, subs); + }, + evalIdentifier(ast, subs) { + if (Object.hasOwn(subs, ast.name)) { + return subs[ast.name]; + } + throw ReferenceError(`${ast.name} is not defined`); + }, + evalLiteral(ast) { + return ast.value; + }, + evalMemberExpression(ast, subs) { + const prop = ast.computed ? SafeEval.evalAst(ast.property) // `object[property]` + : ast.property.name; // `object.property` property is Identifier + const obj = SafeEval.evalAst(ast.object, subs); + if (obj === undefined || obj === null) { + throw TypeError(`Cannot read properties of ${obj} (reading '${prop}')`); + } + if (!Object.hasOwn(obj, prop) && BLOCKED_PROTO_PROPERTIES.has(prop)) { + throw TypeError(`Cannot read properties of ${obj} (reading '${prop}')`); + } + const result = obj[prop]; + if (typeof result === 'function') { + return result.bind(obj); // arrow functions aren't affected by bind. + } + return result; + }, + evalUnaryExpression(ast, subs) { + const result = { + '-': a => -SafeEval.evalAst(a, subs), + '!': a => !SafeEval.evalAst(a, subs), + '~': a => ~SafeEval.evalAst(a, subs), + // eslint-disable-next-line no-implicit-coercion -- API + '+': a => +SafeEval.evalAst(a, subs), + typeof: a => typeof SafeEval.evalAst(a, subs) + }[ast.operator](ast.argument); + return result; + }, + evalArrayExpression(ast, subs) { + return ast.elements.map(el => SafeEval.evalAst(el, subs)); + }, + evalCallExpression(ast, subs) { + const args = ast.arguments.map(arg => SafeEval.evalAst(arg, subs)); + const func = SafeEval.evalAst(ast.callee, subs); + // if (func === Function) { + // throw new Error('Function constructor is disabled'); + // } + return func(...args); + }, + evalAssignmentExpression(ast, subs) { + if (ast.left.type !== 'Identifier') { + throw SyntaxError('Invalid left-hand side in assignment'); + } + const id = ast.left.name; + const value = SafeEval.evalAst(ast.right, subs); + subs[id] = value; + return subs[id]; + } +}; + +/** + * A replacement for NodeJS' VM.Script which is also {@link https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP | Content Security Policy} friendly. + */ +class SafeScript { + /** + * @param {string} expr Expression to evaluate + */ + constructor(expr) { + this.code = expr; + this.ast = jsep(this.code); + } + + /** + * @param {object} context Object whose items will be added + * to evaluation + * @returns {EvaluatedResult} Result of evaluated code + */ + runInNewContext(context) { + // `Object.create(null)` creates a prototypeless object + const keyMap = Object.assign(Object.create(null), context); + return SafeEval.evalAst(this.ast, keyMap); + } +} + +/* eslint-disable camelcase -- Convenient for escaping */ + + +/** + * @typedef {null|boolean|number|string|object|GenericArray} JSONObject */ /** @@ -109808,16 +120222,16 @@ class NewError extends Error { } /** -* @typedef {PlainObject} ReturnObject +* @typedef {object} ReturnObject * @property {string} path * @property {JSONObject} value -* @property {PlainObject|GenericArray} parent +* @property {object|GenericArray} parent * @property {string} parentProperty */ /** * @callback JSONPathCallback -* @param {string|PlainObject} preferredOutput +* @param {string|object} preferredOutput * @param {"value"|"property"} type * @param {ReturnObject} fullRetObj * @returns {void} @@ -109827,7 +120241,7 @@ class NewError extends Error { * @callback OtherTypeCallback * @param {JSONObject} val * @param {string} path -* @param {PlainObject|GenericArray} parent +* @param {object|GenericArray} parent * @param {string} parentPropName * @returns {boolean} */ @@ -109848,21 +120262,21 @@ class NewError extends Error { */ /** - * @typedef {typeof import('./jsonpath-browser').SafeScript} EvalClass + * @typedef {typeof SafeScript} EvalClass */ /** - * @typedef {PlainObject} JSONPathOptions + * @typedef {object} JSONPathOptions * @property {JSON} json * @property {string|string[]} path * @property {"value"|"path"|"pointer"|"parent"|"parentProperty"| * "all"} [resultType="value"] * @property {boolean} [flatten=false] * @property {boolean} [wrap=true] - * @property {PlainObject} [sandbox={}] + * @property {object} [sandbox={}] * @property {EvalCallback|EvalClass|'safe'|'native'| * boolean} [eval = 'safe'] - * @property {PlainObject|GenericArray|null} [parent=null] + * @property {object|GenericArray|null} [parent=null] * @property {string|null} [parentProperty=null] * @property {JSONPathCallback} [callback] * @property {OtherTypeCallback} [otherTypeCallback] Defaults to @@ -109886,7 +120300,7 @@ class NewError extends Error { * @class */ function JSONPath(opts, expr, obj, callback, otherTypeCallback) { - // eslint-disable-next-line no-restricted-syntax + // eslint-disable-next-line no-restricted-syntax -- Allow for pseudo-class if (!(this instanceof JSONPath)) { try { return new JSONPath(opts, expr, obj, callback, otherTypeCallback); @@ -109910,7 +120324,7 @@ function JSONPath(opts, expr, obj, callback, otherTypeCallback) { this.path = opts.path || expr; this.resultType = opts.resultType || 'value'; this.flatten = opts.flatten || false; - this.wrap = hasOwnProp.call(opts, 'wrap') ? opts.wrap : true; + this.wrap = Object.hasOwn(opts, 'wrap') ? opts.wrap : true; this.sandbox = opts.sandbox || {}; this.eval = opts.eval === undefined ? 'safe' : opts.eval; this.ignoreEvalErrors = typeof opts.ignoreEvalErrors === 'undefined' ? false : opts.ignoreEvalErrors; @@ -109956,21 +120370,21 @@ JSONPath.prototype.evaluate = function (expr, json, callback, otherTypeCallback) if (!expr.path && expr.path !== '') { throw new TypeError('You must supply a "path" property when providing an object ' + 'argument to JSONPath.evaluate().'); } - if (!hasOwnProp.call(expr, 'json')) { + if (!Object.hasOwn(expr, 'json')) { throw new TypeError('You must supply a "json" property when providing an object ' + 'argument to JSONPath.evaluate().'); } ({ json } = expr); - flatten = hasOwnProp.call(expr, 'flatten') ? expr.flatten : flatten; - this.currResultType = hasOwnProp.call(expr, 'resultType') ? expr.resultType : this.currResultType; - this.currSandbox = hasOwnProp.call(expr, 'sandbox') ? expr.sandbox : this.currSandbox; - wrap = hasOwnProp.call(expr, 'wrap') ? expr.wrap : wrap; - this.currEval = hasOwnProp.call(expr, 'eval') ? expr.eval : this.currEval; - callback = hasOwnProp.call(expr, 'callback') ? expr.callback : callback; - this.currOtherTypeCallback = hasOwnProp.call(expr, 'otherTypeCallback') ? expr.otherTypeCallback : this.currOtherTypeCallback; - currParent = hasOwnProp.call(expr, 'parent') ? expr.parent : currParent; - currParentProperty = hasOwnProp.call(expr, 'parentProperty') ? expr.parentProperty : currParentProperty; + flatten = Object.hasOwn(expr, 'flatten') ? expr.flatten : flatten; + this.currResultType = Object.hasOwn(expr, 'resultType') ? expr.resultType : this.currResultType; + this.currSandbox = Object.hasOwn(expr, 'sandbox') ? expr.sandbox : this.currSandbox; + wrap = Object.hasOwn(expr, 'wrap') ? expr.wrap : wrap; + this.currEval = Object.hasOwn(expr, 'eval') ? expr.eval : this.currEval; + callback = Object.hasOwn(expr, 'callback') ? expr.callback : callback; + this.currOtherTypeCallback = Object.hasOwn(expr, 'otherTypeCallback') ? expr.otherTypeCallback : this.currOtherTypeCallback; + currParent = Object.hasOwn(expr, 'parent') ? expr.parent : currParent; + currParentProperty = Object.hasOwn(expr, 'parentProperty') ? expr.parentProperty : currParentProperty; expr = expr.path; } currParent = currParent || null; @@ -110034,7 +120448,7 @@ JSONPath.prototype._handleCallback = function (fullRetObj, callback, type) { if (callback) { const preferredOutput = this._getPreferredOutput(fullRetObj); fullRetObj.path = typeof fullRetObj.path === 'string' ? fullRetObj.path : JSONPath.toPathString(fullRetObj.path); - // eslint-disable-next-line n/callback-return + // eslint-disable-next-line n/callback-return -- No need to return callback(preferredOutput, type, fullRetObj); } }; @@ -110044,7 +120458,7 @@ JSONPath.prototype._handleCallback = function (fullRetObj, callback, type) { * @param {string} expr * @param {JSONObject} val * @param {string} path - * @param {PlainObject|GenericArray} parent + * @param {object|GenericArray} parent * @param {string} parentPropName * @param {JSONPathCallback} callback * @param {boolean} hasArrExpr @@ -110089,7 +120503,7 @@ JSONPath.prototype._trace = function (expr, val, path, parent, parentPropName, c ret.push(elems); } } - if ((typeof loc !== 'string' || literalPriority) && val && hasOwnProp.call(val, loc)) { + if ((typeof loc !== 'string' || literalPriority) && val && Object.hasOwn(val, loc)) { // simple case--directly follow property addRet(this._trace(x, val[loc], push(path, loc), val, loc, callback, hasArrExpr)); // eslint-disable-next-line unicorn/prefer-switch -- Part of larger `if` @@ -110171,7 +120585,7 @@ JSONPath.prototype._trace = function (expr, val, path, parent, parentPropName, c // As this will resolve to a property name (but we don't know it // yet), property and parent information is relative to the // parent of the property to which this expression will resolve - addRet(this._trace(unshift(this._eval(loc, val, path[path.length - 1], path.slice(0, -1), parent, parentPropName), x), val, path, parent, parentPropName, callback, hasArrExpr)); + addRet(this._trace(unshift(this._eval(loc, val, path.at(-1), path.slice(0, -1), parent, parentPropName), x), val, path, parent, parentPropName, callback, hasArrExpr)); } else if (loc[0] === '@') { // value type: @boolean(), etc. let addType = false; @@ -110238,7 +120652,7 @@ JSONPath.prototype._trace = function (expr, val, path, parent, parentPropName, c return retObj; } // `-escaped property - } else if (loc[0] === '`' && val && hasOwnProp.call(val, loc.slice(1))) { + } else if (loc[0] === '`' && val && Object.hasOwn(val, loc.slice(1))) { const locProp = loc.slice(1); addRet(this._trace(x, val[locProp], push(path, locProp), val, locProp, callback, hasArrExpr, true)); } else if (loc.includes(',')) { @@ -110248,7 +120662,7 @@ JSONPath.prototype._trace = function (expr, val, path, parent, parentPropName, c addRet(this._trace(unshift(part, x), val, path, parent, parentPropName, callback, true)); } // simple case--directly follow property - } else if (!literalPriority && val && hasOwnProp.call(val, loc)) { + } else if (!literalPriority && val && Object.hasOwn(val, loc)) { addRet(this._trace(x, val[loc], push(path, loc), val, loc, callback, hasArrExpr, true)); } @@ -110264,6 +120678,8 @@ JSONPath.prototype._trace = function (expr, val, path, parent, parentPropName, c ret[t] = tmp[0]; const tl = tmp.length; for (let tt = 1; tt < tl; tt++) { + // eslint-disable-next-line @stylistic/max-len -- Long + // eslint-disable-next-line sonarjs/updated-loop-counter -- Convenient t++; ret.splice(t, 0, tmp[tt]); } @@ -110324,15 +120740,15 @@ JSONPath.prototype._eval = function (code, _v, _vname, path, parent, parentPropN } const scriptCacheKey = this.currEval + 'Script:' + code; if (!JSONPath.cache[scriptCacheKey]) { - let script = code.replace(/@parentProperty/gu, '_$_parentProperty').replace(/@parent/gu, '_$_parent').replace(/@property/gu, '_$_property').replace(/@root/gu, '_$_root').replace(/@([.\s)[])/gu, '_$_v$1'); + let script = code.replaceAll('@parentProperty', '_$_parentProperty').replaceAll('@parent', '_$_parent').replaceAll('@property', '_$_property').replaceAll('@root', '_$_root').replaceAll(/@([.\s)[])/gu, '_$_v$1'); if (containsPath) { - script = script.replace(/@path/gu, '_$_path'); + script = script.replaceAll('@path', '_$_path'); } if (this.currEval === 'safe' || this.currEval === true || this.currEval === undefined) { JSONPath.cache[scriptCacheKey] = new this.safeVm.Script(script); } else if (this.currEval === 'native') { JSONPath.cache[scriptCacheKey] = new this.vm.Script(script); - } else if (typeof this.currEval === 'function' && this.currEval.prototype && hasOwnProp.call(this.currEval.prototype, 'runInNewContext')) { + } else if (typeof this.currEval === 'function' && this.currEval.prototype && Object.hasOwn(this.currEval.prototype, 'runInNewContext')) { const CurrEval = this.currEval; JSONPath.cache[scriptCacheKey] = new CurrEval(script); } else if (typeof this.currEval === 'function') { @@ -110384,7 +120800,7 @@ JSONPath.toPointer = function (pointer) { let p = ''; for (let i = 1; i < n; i++) { if (!/^(~|\^|@.*?\(\))$/u.test(x[i])) { - p += '/' + x[i].toString().replace(/~/gu, '~0').replace(/\//gu, '~1'); + p += '/' + x[i].toString().replaceAll('~', '~0').replaceAll('/', '~1'); } } return p; @@ -110404,32 +120820,32 @@ JSONPath.toPathArray = function (expr) { const subx = []; const normalized = expr // Properties - .replace(/@(?:null|boolean|number|string|integer|undefined|nonFinite|scalar|array|object|function|other)\(\)/gu, ';$&;') + .replaceAll(/@(?:null|boolean|number|string|integer|undefined|nonFinite|scalar|array|object|function|other)\(\)/gu, ';$&;') // Parenthetical evaluations (filtering and otherwise), directly // within brackets or single quotes - .replace(/[['](\??\(.*?\))[\]'](?!.\])/gu, function ($0, $1) { + .replaceAll(/[['](\??\(.*?\))[\]'](?!.\])/gu, function ($0, $1) { return '[#' + (subx.push($1) - 1) + ']'; }) // Escape periods and tildes within properties - .replace(/\[['"]([^'\]]*)['"]\]/gu, function ($0, prop) { - return "['" + prop.replace(/\./gu, '%@%').replace(/~/gu, '%%@@%%') + "']"; + .replaceAll(/\[['"]([^'\]]*)['"]\]/gu, function ($0, prop) { + return "['" + prop.replaceAll('.', '%@%').replaceAll('~', '%%@@%%') + "']"; }) // Properties operator - .replace(/~/gu, ';~;') + .replaceAll('~', ';~;') // Split by property boundaries - .replace(/['"]?\.['"]?(?![^[]*\])|\[['"]?/gu, ';') + .replaceAll(/['"]?\.['"]?(?![^[]*\])|\[['"]?/gu, ';') // Reinsert periods within properties - .replace(/%@%/gu, '.') + .replaceAll('%@%', '.') // Reinsert tildes within properties - .replace(/%%@@%%/gu, '~') + .replaceAll('%%@@%%', '~') // Parent - .replace(/(?:;)?(\^+)(?:;)?/gu, function ($0, ups) { + .replaceAll(/(?:;)?(\^+)(?:;)?/gu, function ($0, ups) { return ';' + ups.split('').join(';') + ';'; }) // Descendents - .replace(/;;;|;;/gu, ';..;') + .replaceAll(/;;;|;;/gu, ';..;') // Remove trailing - .replace(/;$|'?\]|'$/gu, ''); + .replaceAll(/;$|'?\]|'$/gu, ''); const exprList = normalized.split(';').map(function (exp) { const match = exp.match(/#(\d+)/u); return !match || !match[1] ? exp : subx[match[1]]; @@ -110437,15 +120853,15660 @@ JSONPath.toPathArray = function (expr) { cache[expr] = exprList; return cache[expr].concat(); }; +JSONPath.prototype.safeVm = { + Script: SafeScript +}; JSONPath.prototype.vm = vm; -JSONPath.prototype.safeVm = vm; -const SafeScript = vm.Script; exports.JSONPath = JSONPath; -exports.SafeScript = SafeScript; +/***/ }), + +/***/ 26672: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +/** + * @typedef {Object} GitProgressEvent + * @property {string} phase + * @property {number} loaded + * @property {number} total + */ + +/** + * @callback ProgressCallback + * @param {GitProgressEvent} progress + * @returns {void | Promise} + */ + +/** + * @typedef {Object} GitHttpRequest + * @property {string} url - The URL to request + * @property {string} [method='GET'] - The HTTP method to use + * @property {Object} [headers={}] - Headers to include in the HTTP request + * @property {Object} [agent] - An HTTP or HTTPS agent that manages connections for the HTTP client (Node.js only) + * @property {AsyncIterableIterator} [body] - An async iterator of Uint8Arrays that make up the body of POST requests + * @property {ProgressCallback} [onProgress] - Reserved for future use (emitting `GitProgressEvent`s) + * @property {object} [signal] - Reserved for future use (canceling a request) + */ + +/** + * @typedef {Object} GitHttpResponse + * @property {string} url - The final URL that was fetched after any redirects + * @property {string} [method] - The HTTP method that was used + * @property {Object} [headers] - HTTP response headers + * @property {AsyncIterableIterator} [body] - An async iterator of Uint8Arrays that make up the body of the response + * @property {number} statusCode - The HTTP status code + * @property {string} statusMessage - The HTTP status message + */ + +/** + * @callback HttpFetch + * @param {GitHttpRequest} request + * @returns {Promise} + */ + +/** + * @typedef {Object} HttpClient + * @property {HttpFetch} request + */ + +// Convert a value to an Async Iterator +// This will be easier with async generator functions. +function fromValue(value) { + let queue = [value]; + return { + next() { + return Promise.resolve({ done: queue.length === 0, value: queue.pop() }) + }, + return() { + queue = []; + return {} + }, + [Symbol.asyncIterator]() { + return this + }, + } +} + +function getIterator(iterable) { + if (iterable[Symbol.asyncIterator]) { + return iterable[Symbol.asyncIterator]() + } + if (iterable[Symbol.iterator]) { + return iterable[Symbol.iterator]() + } + if (iterable.next) { + return iterable + } + return fromValue(iterable) +} + +// Currently 'for await' upsets my linters. +async function forAwait(iterable, cb) { + const iter = getIterator(iterable); + while (true) { + const { value, done } = await iter.next(); + if (value) await cb(value); + if (done) break + } + if (iter.return) iter.return(); +} + +async function collect(iterable) { + let size = 0; + const buffers = []; + // This will be easier once `for await ... of` loops are available. + await forAwait(iterable, value => { + buffers.push(value); + size += value.byteLength; + }); + const result = new Uint8Array(size); + let nextIndex = 0; + for (const buffer of buffers) { + result.set(buffer, nextIndex); + nextIndex += buffer.byteLength; + } + return result +} + +// Convert a web ReadableStream (not Node stream!) to an Async Iterator +// adapted from https://jakearchibald.com/2017/async-iterators-and-generators/ +function fromStream(stream) { + // Use native async iteration if it's available. + if (stream[Symbol.asyncIterator]) return stream + const reader = stream.getReader(); + return { + next() { + return reader.read() + }, + return() { + reader.releaseLock(); + return {} + }, + [Symbol.asyncIterator]() { + return this + }, + } +} + +/* eslint-env browser */ + +/** + * HttpClient + * + * @param {GitHttpRequest} request + * @returns {Promise} + */ +async function request({ + onProgress, + url, + method = 'GET', + headers = {}, + body, +}) { + // streaming uploads aren't possible yet in the browser + if (body) { + body = await collect(body); + } + const res = await fetch(url, { method, headers, body }); + const iter = + res.body && res.body.getReader + ? fromStream(res.body) + : [new Uint8Array(await res.arrayBuffer())]; + // convert Header object to ordinary JSON + headers = {}; + for (const [key, value] of res.headers.entries()) { + headers[key] = value; + } + return { + url: res.url, + method: res.method, + statusCode: res.status, + statusMessage: res.statusText, + body: iter, + headers: headers, + } +} + +var index = { request }; + +exports["default"] = index; +exports.request = request; + + +/***/ }), + +/***/ 85114: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var AsyncLock = _interopDefault(__nccwpck_require__(41542)); +var Hash = _interopDefault(__nccwpck_require__(72398)); +var crc32 = _interopDefault(__nccwpck_require__(83201)); +var pako = _interopDefault(__nccwpck_require__(31726)); +var pify = _interopDefault(__nccwpck_require__(64810)); +var ignore = _interopDefault(__nccwpck_require__(91230)); +var cleanGitRef = _interopDefault(__nccwpck_require__(43268)); +var diff3Merge = _interopDefault(__nccwpck_require__(25211)); + +/** + * @typedef {Object} GitProgressEvent + * @property {string} phase + * @property {number} loaded + * @property {number} total + */ + +/** + * @callback ProgressCallback + * @param {GitProgressEvent} progress + * @returns {void | Promise} + */ + +/** + * @typedef {Object} GitHttpRequest + * @property {string} url - The URL to request + * @property {string} [method='GET'] - The HTTP method to use + * @property {Object} [headers={}] - Headers to include in the HTTP request + * @property {Object} [agent] - An HTTP or HTTPS agent that manages connections for the HTTP client (Node.js only) + * @property {AsyncIterableIterator} [body] - An async iterator of Uint8Arrays that make up the body of POST requests + * @property {ProgressCallback} [onProgress] - Reserved for future use (emitting `GitProgressEvent`s) + * @property {object} [signal] - Reserved for future use (canceling a request) + */ + +/** + * @typedef {Object} GitHttpResponse + * @property {string} url - The final URL that was fetched after any redirects + * @property {string} [method] - The HTTP method that was used + * @property {Object} [headers] - HTTP response headers + * @property {AsyncIterableIterator} [body] - An async iterator of Uint8Arrays that make up the body of the response + * @property {number} statusCode - The HTTP status code + * @property {string} statusMessage - The HTTP status message + */ + +/** + * @callback HttpFetch + * @param {GitHttpRequest} request + * @returns {Promise} + */ + +/** + * @typedef {Object} HttpClient + * @property {HttpFetch} request + */ + +/** + * A git commit object. + * + * @typedef {Object} CommitObject + * @property {string} message Commit message + * @property {string} tree SHA-1 object id of corresponding file tree + * @property {string[]} parent an array of zero or more SHA-1 object ids + * @property {Object} author + * @property {string} author.name The author's name + * @property {string} author.email The author's email + * @property {number} author.timestamp UTC Unix timestamp in seconds + * @property {number} author.timezoneOffset Timezone difference from UTC in minutes + * @property {Object} committer + * @property {string} committer.name The committer's name + * @property {string} committer.email The committer's email + * @property {number} committer.timestamp UTC Unix timestamp in seconds + * @property {number} committer.timezoneOffset Timezone difference from UTC in minutes + * @property {string} [gpgsig] PGP signature (if present) + */ + +/** + * An entry from a git tree object. Files are called 'blobs' and directories are called 'trees'. + * + * @typedef {Object} TreeEntry + * @property {string} mode the 6 digit hexadecimal mode + * @property {string} path the name of the file or directory + * @property {string} oid the SHA-1 object id of the blob or tree + * @property {'commit'|'blob'|'tree'} type the type of object + */ + +/** + * A git tree object. Trees represent a directory snapshot. + * + * @typedef {TreeEntry[]} TreeObject + */ + +/** + * A git annotated tag object. + * + * @typedef {Object} TagObject + * @property {string} object SHA-1 object id of object being tagged + * @property {'blob' | 'tree' | 'commit' | 'tag'} type the type of the object being tagged + * @property {string} tag the tag name + * @property {Object} tagger + * @property {string} tagger.name the tagger's name + * @property {string} tagger.email the tagger's email + * @property {number} tagger.timestamp UTC Unix timestamp in seconds + * @property {number} tagger.timezoneOffset timezone difference from UTC in minutes + * @property {string} message tag message + * @property {string} [gpgsig] PGP signature (if present) + */ + +/** + * @typedef {Object} ReadCommitResult + * @property {string} oid - SHA-1 object id of this commit + * @property {CommitObject} commit - the parsed commit object + * @property {string} payload - PGP signing payload + */ + +/** + * @typedef {Object} ServerRef - This object has the following schema: + * @property {string} ref - The name of the ref + * @property {string} oid - The SHA-1 object id the ref points to + * @property {string} [target] - The target ref pointed to by a symbolic ref + * @property {string} [peeled] - If the oid is the SHA-1 object id of an annotated tag, this is the SHA-1 object id that the annotated tag points to + */ + +/** + * @typedef Walker + * @property {Symbol} Symbol('GitWalkerSymbol') + */ + +/** + * Normalized subset of filesystem `stat` data: + * + * @typedef {Object} Stat + * @property {number} ctimeSeconds + * @property {number} ctimeNanoseconds + * @property {number} mtimeSeconds + * @property {number} mtimeNanoseconds + * @property {number} dev + * @property {number} ino + * @property {number} mode + * @property {number} uid + * @property {number} gid + * @property {number} size + */ + +/** + * The `WalkerEntry` is an interface that abstracts computing many common tree / blob stats. + * + * @typedef {Object} WalkerEntry + * @property {function(): Promise<'tree'|'blob'|'special'|'commit'>} type + * @property {function(): Promise} mode + * @property {function(): Promise} oid + * @property {function(): Promise} content + * @property {function(): Promise} stat + */ + +/** + * @typedef {Object} CallbackFsClient + * @property {function} readFile - https://nodejs.org/api/fs.html#fs_fs_readfile_path_options_callback + * @property {function} writeFile - https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback + * @property {function} unlink - https://nodejs.org/api/fs.html#fs_fs_unlink_path_callback + * @property {function} readdir - https://nodejs.org/api/fs.html#fs_fs_readdir_path_options_callback + * @property {function} mkdir - https://nodejs.org/api/fs.html#fs_fs_mkdir_path_mode_callback + * @property {function} rmdir - https://nodejs.org/api/fs.html#fs_fs_rmdir_path_callback + * @property {function} stat - https://nodejs.org/api/fs.html#fs_fs_stat_path_options_callback + * @property {function} lstat - https://nodejs.org/api/fs.html#fs_fs_lstat_path_options_callback + * @property {function} [readlink] - https://nodejs.org/api/fs.html#fs_fs_readlink_path_options_callback + * @property {function} [symlink] - https://nodejs.org/api/fs.html#fs_fs_symlink_target_path_type_callback + * @property {function} [chmod] - https://nodejs.org/api/fs.html#fs_fs_chmod_path_mode_callback + */ + +/** + * @typedef {Object} PromiseFsClient + * @property {Object} promises + * @property {function} promises.readFile - https://nodejs.org/api/fs.html#fs_fspromises_readfile_path_options + * @property {function} promises.writeFile - https://nodejs.org/api/fs.html#fs_fspromises_writefile_file_data_options + * @property {function} promises.unlink - https://nodejs.org/api/fs.html#fs_fspromises_unlink_path + * @property {function} promises.readdir - https://nodejs.org/api/fs.html#fs_fspromises_readdir_path_options + * @property {function} promises.mkdir - https://nodejs.org/api/fs.html#fs_fspromises_mkdir_path_options + * @property {function} promises.rmdir - https://nodejs.org/api/fs.html#fs_fspromises_rmdir_path + * @property {function} promises.stat - https://nodejs.org/api/fs.html#fs_fspromises_stat_path_options + * @property {function} promises.lstat - https://nodejs.org/api/fs.html#fs_fspromises_lstat_path_options + * @property {function} [promises.readlink] - https://nodejs.org/api/fs.html#fs_fspromises_readlink_path_options + * @property {function} [promises.symlink] - https://nodejs.org/api/fs.html#fs_fspromises_symlink_target_path_type + * @property {function} [promises.chmod] - https://nodejs.org/api/fs.html#fs_fspromises_chmod_path_mode + */ + +/** + * @typedef {CallbackFsClient | PromiseFsClient} FsClient + */ + +/** + * @callback MessageCallback + * @param {string} message + * @returns {void | Promise} + */ + +/** + * @typedef {Object} GitAuth + * @property {string} [username] + * @property {string} [password] + * @property {Object} [headers] + * @property {boolean} [cancel] Tells git to throw a `UserCanceledError` (instead of an `HttpError`). + */ + +/** + * @callback AuthCallback + * @param {string} url + * @param {GitAuth} auth Might have some values if the URL itself originally contained a username or password. + * @returns {GitAuth | void | Promise} + */ + +/** + * @callback AuthFailureCallback + * @param {string} url + * @param {GitAuth} auth The credentials that failed + * @returns {GitAuth | void | Promise} + */ + +/** + * @callback AuthSuccessCallback + * @param {string} url + * @param {GitAuth} auth + * @returns {void | Promise} + */ + +/** + * @typedef {Object} SignParams + * @property {string} payload - a plaintext message + * @property {string} secretKey - an 'ASCII armor' encoded PGP key (technically can actually contain _multiple_ keys) + */ + +/** + * @callback SignCallback + * @param {SignParams} args + * @return {{signature: string} | Promise<{signature: string}>} - an 'ASCII armor' encoded "detached" signature + */ + +/** + * @typedef {Object} MergeDriverParams + * @property {Array} branches + * @property {Array} contents + * @property {string} path + */ + +/** + * @callback MergeDriverCallback + * @param {MergeDriverParams} args + * @return {{cleanMerge: boolean, mergedText: string} | Promise<{cleanMerge: boolean, mergedText: string}>} + */ + +/** + * @callback WalkerMap + * @param {string} filename + * @param {WalkerEntry[]} entries + * @returns {Promise} + */ + +/** + * @callback WalkerReduce + * @param {any} parent + * @param {any[]} children + * @returns {Promise} + */ + +/** + * @callback WalkerIterateCallback + * @param {WalkerEntry[]} entries + * @returns {Promise} + */ + +/** + * @callback WalkerIterate + * @param {WalkerIterateCallback} walk + * @param {IterableIterator} children + * @returns {Promise} + */ + +/** + * @typedef {Object} RefUpdateStatus + * @property {boolean} ok + * @property {string} error + */ + +/** + * @typedef {Object} PushResult + * @property {boolean} ok + * @property {?string} error + * @property {Object} refs + * @property {Object} [headers] + */ + +/** + * @typedef {0|1} HeadStatus + */ + +/** + * @typedef {0|1|2} WorkdirStatus + */ + +/** + * @typedef {0|1|2|3} StageStatus + */ + +/** + * @typedef {[string, HeadStatus, WorkdirStatus, StageStatus]} StatusRow + */ + +/** + * @typedef {Object} ClientRef + * @property {string} ref The name of the ref + * @property {string} oid The SHA-1 object id the ref points to + */ + +/** + * @typedef {Object} PrePushParams + * @property {string} remote The expanded name of target remote + * @property {string} url The URL address of target remote + * @property {ClientRef} localRef The ref which the client wants to push to the remote + * @property {ClientRef} remoteRef The ref which is known by the remote + */ + +/** + * @callback PrePushCallback + * @param {PrePushParams} args + * @returns {boolean | Promise} Returns false if push must be cancelled + */ + +/** + * @typedef {Object} PostCheckoutParams + * @property {string} previousHead The SHA-1 object id of HEAD before checkout + * @property {string} newHead The SHA-1 object id of HEAD after checkout + * @property {'branch' | 'file'} type flag determining whether a branch or a set of files was checked + */ + +/** + * @callback PostCheckoutCallback + * @param {PostCheckoutParams} args + * @returns {void | Promise} + */ + +class BaseError extends Error { + constructor(message) { + super(message); + // Setting this here allows TS to infer that all git errors have a `caller` property and + // that its type is string. + this.caller = ''; + } + + toJSON() { + // Error objects aren't normally serializable. So we do something about that. + return { + code: this.code, + data: this.data, + caller: this.caller, + message: this.message, + stack: this.stack, + } + } + + fromJSON(json) { + const e = new BaseError(json.message); + e.code = json.code; + e.data = json.data; + e.caller = json.caller; + e.stack = json.stack; + return e + } + + get isIsomorphicGitError() { + return true + } +} + +class UnmergedPathsError extends BaseError { + /** + * @param {Array} filepaths + */ + constructor(filepaths) { + super( + `Modifying the index is not possible because you have unmerged files: ${filepaths.toString}. Fix them up in the work tree, and then use 'git add/rm as appropriate to mark resolution and make a commit.` + ); + this.code = this.name = UnmergedPathsError.code; + this.data = { filepaths }; + } +} +/** @type {'UnmergedPathsError'} */ +UnmergedPathsError.code = 'UnmergedPathsError'; + +class InternalError extends BaseError { + /** + * @param {string} message + */ + constructor(message) { + super( + `An internal error caused this command to fail. Please file a bug report at https://github.com/isomorphic-git/isomorphic-git/issues with this error message: ${message}` + ); + this.code = this.name = InternalError.code; + this.data = { message }; + } +} +/** @type {'InternalError'} */ +InternalError.code = 'InternalError'; + +class UnsafeFilepathError extends BaseError { + /** + * @param {string} filepath + */ + constructor(filepath) { + super(`The filepath "${filepath}" contains unsafe character sequences`); + this.code = this.name = UnsafeFilepathError.code; + this.data = { filepath }; + } +} +/** @type {'UnsafeFilepathError'} */ +UnsafeFilepathError.code = 'UnsafeFilepathError'; + +// Modeled after https://github.com/tjfontaine/node-buffercursor +// but with the goal of being much lighter weight. +class BufferCursor { + constructor(buffer) { + this.buffer = buffer; + this._start = 0; + } + + eof() { + return this._start >= this.buffer.length + } + + tell() { + return this._start + } + + seek(n) { + this._start = n; + } + + slice(n) { + const r = this.buffer.slice(this._start, this._start + n); + this._start += n; + return r + } + + toString(enc, length) { + const r = this.buffer.toString(enc, this._start, this._start + length); + this._start += length; + return r + } + + write(value, length, enc) { + const r = this.buffer.write(value, this._start, length, enc); + this._start += length; + return r + } + + copy(source, start, end) { + const r = source.copy(this.buffer, this._start, start, end); + this._start += r; + return r + } + + readUInt8() { + const r = this.buffer.readUInt8(this._start); + this._start += 1; + return r + } + + writeUInt8(value) { + const r = this.buffer.writeUInt8(value, this._start); + this._start += 1; + return r + } + + readUInt16BE() { + const r = this.buffer.readUInt16BE(this._start); + this._start += 2; + return r + } + + writeUInt16BE(value) { + const r = this.buffer.writeUInt16BE(value, this._start); + this._start += 2; + return r + } + + readUInt32BE() { + const r = this.buffer.readUInt32BE(this._start); + this._start += 4; + return r + } + + writeUInt32BE(value) { + const r = this.buffer.writeUInt32BE(value, this._start); + this._start += 4; + return r + } +} + +function compareStrings(a, b) { + // https://stackoverflow.com/a/40355107/2168416 + return -(a < b) || +(a > b) +} + +function comparePath(a, b) { + // https://stackoverflow.com/a/40355107/2168416 + return compareStrings(a.path, b.path) +} + +/** + * From https://github.com/git/git/blob/master/Documentation/technical/index-format.txt + * + * 32-bit mode, split into (high to low bits) + * + * 4-bit object type + * valid values in binary are 1000 (regular file), 1010 (symbolic link) + * and 1110 (gitlink) + * + * 3-bit unused + * + * 9-bit unix permission. Only 0755 and 0644 are valid for regular files. + * Symbolic links and gitlinks have value 0 in this field. + */ +function normalizeMode(mode) { + // Note: BrowserFS will use -1 for "unknown" + // I need to make it non-negative for these bitshifts to work. + let type = mode > 0 ? mode >> 12 : 0; + // If it isn't valid, assume it as a "regular file" + // 0100 = directory + // 1000 = regular file + // 1010 = symlink + // 1110 = gitlink + if ( + type !== 0b0100 && + type !== 0b1000 && + type !== 0b1010 && + type !== 0b1110 + ) { + type = 0b1000; + } + let permissions = mode & 0o777; + // Is the file executable? then 755. Else 644. + if (permissions & 0b001001001) { + permissions = 0o755; + } else { + permissions = 0o644; + } + // If it's not a regular file, scrub all permissions + if (type !== 0b1000) permissions = 0; + return (type << 12) + permissions +} + +const MAX_UINT32 = 2 ** 32; + +function SecondsNanoseconds( + givenSeconds, + givenNanoseconds, + milliseconds, + date +) { + if (givenSeconds !== undefined && givenNanoseconds !== undefined) { + return [givenSeconds, givenNanoseconds] + } + if (milliseconds === undefined) { + milliseconds = date.valueOf(); + } + const seconds = Math.floor(milliseconds / 1000); + const nanoseconds = (milliseconds - seconds * 1000) * 1000000; + return [seconds, nanoseconds] +} + +function normalizeStats(e) { + const [ctimeSeconds, ctimeNanoseconds] = SecondsNanoseconds( + e.ctimeSeconds, + e.ctimeNanoseconds, + e.ctimeMs, + e.ctime + ); + const [mtimeSeconds, mtimeNanoseconds] = SecondsNanoseconds( + e.mtimeSeconds, + e.mtimeNanoseconds, + e.mtimeMs, + e.mtime + ); + + return { + ctimeSeconds: ctimeSeconds % MAX_UINT32, + ctimeNanoseconds: ctimeNanoseconds % MAX_UINT32, + mtimeSeconds: mtimeSeconds % MAX_UINT32, + mtimeNanoseconds: mtimeNanoseconds % MAX_UINT32, + dev: e.dev % MAX_UINT32, + ino: e.ino % MAX_UINT32, + mode: normalizeMode(e.mode % MAX_UINT32), + uid: e.uid % MAX_UINT32, + gid: e.gid % MAX_UINT32, + // size of -1 happens over a BrowserFS HTTP Backend that doesn't serve Content-Length headers + // (like the Karma webserver) because BrowserFS HTTP Backend uses HTTP HEAD requests to do fs.stat + size: e.size > -1 ? e.size % MAX_UINT32 : 0, + } +} + +function toHex(buffer) { + let hex = ''; + for (const byte of new Uint8Array(buffer)) { + if (byte < 16) hex += '0'; + hex += byte.toString(16); + } + return hex +} + +/* eslint-env node, browser */ + +let supportsSubtleSHA1 = null; + +async function shasum(buffer) { + if (supportsSubtleSHA1 === null) { + supportsSubtleSHA1 = await testSubtleSHA1(); + } + return supportsSubtleSHA1 ? subtleSHA1(buffer) : shasumSync(buffer) +} + +// This is modeled after @dominictarr's "shasum" module, +// but without the 'json-stable-stringify' dependency and +// extra type-casting features. +function shasumSync(buffer) { + return new Hash().update(buffer).digest('hex') +} + +async function subtleSHA1(buffer) { + const hash = await crypto.subtle.digest('SHA-1', buffer); + return toHex(hash) +} + +async function testSubtleSHA1() { + // I'm using a rather crude method of progressive enhancement, because + // some browsers that have crypto.subtle.digest don't actually implement SHA-1. + try { + const hash = await subtleSHA1(new Uint8Array([])); + if (hash === 'da39a3ee5e6b4b0d3255bfef95601890afd80709') return true + } catch (_) { + // no bother + } + return false +} + +// Extract 1-bit assume-valid, 1-bit extended flag, 2-bit merge state flag, 12-bit path length flag +function parseCacheEntryFlags(bits) { + return { + assumeValid: Boolean(bits & 0b1000000000000000), + extended: Boolean(bits & 0b0100000000000000), + stage: (bits & 0b0011000000000000) >> 12, + nameLength: bits & 0b0000111111111111, + } +} + +function renderCacheEntryFlags(entry) { + const flags = entry.flags; + // 1-bit extended flag (must be zero in version 2) + flags.extended = false; + // 12-bit name length if the length is less than 0xFFF; otherwise 0xFFF + // is stored in this field. + flags.nameLength = Math.min(Buffer.from(entry.path).length, 0xfff); + return ( + (flags.assumeValid ? 0b1000000000000000 : 0) + + (flags.extended ? 0b0100000000000000 : 0) + + ((flags.stage & 0b11) << 12) + + (flags.nameLength & 0b111111111111) + ) +} + +class GitIndex { + /*:: + _entries: Map + _dirty: boolean // Used to determine if index needs to be saved to filesystem + */ + constructor(entries, unmergedPaths) { + this._dirty = false; + this._unmergedPaths = unmergedPaths || new Set(); + this._entries = entries || new Map(); + } + + _addEntry(entry) { + if (entry.flags.stage === 0) { + entry.stages = [entry]; + this._entries.set(entry.path, entry); + this._unmergedPaths.delete(entry.path); + } else { + let existingEntry = this._entries.get(entry.path); + if (!existingEntry) { + this._entries.set(entry.path, entry); + existingEntry = entry; + } + existingEntry.stages[entry.flags.stage] = entry; + this._unmergedPaths.add(entry.path); + } + } + + static async from(buffer) { + if (Buffer.isBuffer(buffer)) { + return GitIndex.fromBuffer(buffer) + } else if (buffer === null) { + return new GitIndex(null) + } else { + throw new InternalError('invalid type passed to GitIndex.from') + } + } + + static async fromBuffer(buffer) { + if (buffer.length === 0) { + throw new InternalError('Index file is empty (.git/index)') + } + + const index = new GitIndex(); + const reader = new BufferCursor(buffer); + const magic = reader.toString('utf8', 4); + if (magic !== 'DIRC') { + throw new InternalError(`Invalid dircache magic file number: ${magic}`) + } + + // Verify shasum after we ensured that the file has a magic number + const shaComputed = await shasum(buffer.slice(0, -20)); + const shaClaimed = buffer.slice(-20).toString('hex'); + if (shaClaimed !== shaComputed) { + throw new InternalError( + `Invalid checksum in GitIndex buffer: expected ${shaClaimed} but saw ${shaComputed}` + ) + } + + const version = reader.readUInt32BE(); + if (version !== 2) { + throw new InternalError(`Unsupported dircache version: ${version}`) + } + const numEntries = reader.readUInt32BE(); + let i = 0; + while (!reader.eof() && i < numEntries) { + const entry = {}; + entry.ctimeSeconds = reader.readUInt32BE(); + entry.ctimeNanoseconds = reader.readUInt32BE(); + entry.mtimeSeconds = reader.readUInt32BE(); + entry.mtimeNanoseconds = reader.readUInt32BE(); + entry.dev = reader.readUInt32BE(); + entry.ino = reader.readUInt32BE(); + entry.mode = reader.readUInt32BE(); + entry.uid = reader.readUInt32BE(); + entry.gid = reader.readUInt32BE(); + entry.size = reader.readUInt32BE(); + entry.oid = reader.slice(20).toString('hex'); + const flags = reader.readUInt16BE(); + entry.flags = parseCacheEntryFlags(flags); + // TODO: handle if (version === 3 && entry.flags.extended) + const pathlength = buffer.indexOf(0, reader.tell() + 1) - reader.tell(); + if (pathlength < 1) { + throw new InternalError(`Got a path length of: ${pathlength}`) + } + // TODO: handle pathnames larger than 12 bits + entry.path = reader.toString('utf8', pathlength); + + // Prevent malicious paths like "..\foo" + if (entry.path.includes('..\\') || entry.path.includes('../')) { + throw new UnsafeFilepathError(entry.path) + } + + // The next bit is awkward. We expect 1 to 8 null characters + // such that the total size of the entry is a multiple of 8 bits. + // (Hence subtract 12 bytes for the header.) + let padding = 8 - ((reader.tell() - 12) % 8); + if (padding === 0) padding = 8; + while (padding--) { + const tmp = reader.readUInt8(); + if (tmp !== 0) { + throw new InternalError( + `Expected 1-8 null characters but got '${tmp}' after ${entry.path}` + ) + } else if (reader.eof()) { + throw new InternalError('Unexpected end of file') + } + } + // end of awkward part + entry.stages = []; + + index._addEntry(entry); + + i++; + } + return index + } + + get unmergedPaths() { + return [...this._unmergedPaths] + } + + get entries() { + return [...this._entries.values()].sort(comparePath) + } + + get entriesMap() { + return this._entries + } + + get entriesFlat() { + return [...this.entries].flatMap(entry => { + return entry.stages.length > 1 ? entry.stages.filter(x => x) : entry + }) + } + + *[Symbol.iterator]() { + for (const entry of this.entries) { + yield entry; + } + } + + insert({ filepath, stats, oid, stage = 0 }) { + if (!stats) { + stats = { + ctimeSeconds: 0, + ctimeNanoseconds: 0, + mtimeSeconds: 0, + mtimeNanoseconds: 0, + dev: 0, + ino: 0, + mode: 0, + uid: 0, + gid: 0, + size: 0, + }; + } + stats = normalizeStats(stats); + const bfilepath = Buffer.from(filepath); + const entry = { + ctimeSeconds: stats.ctimeSeconds, + ctimeNanoseconds: stats.ctimeNanoseconds, + mtimeSeconds: stats.mtimeSeconds, + mtimeNanoseconds: stats.mtimeNanoseconds, + dev: stats.dev, + ino: stats.ino, + // We provide a fallback value for `mode` here because not all fs + // implementations assign it, but we use it in GitTree. + // '100644' is for a "regular non-executable file" + mode: stats.mode || 0o100644, + uid: stats.uid, + gid: stats.gid, + size: stats.size, + path: filepath, + oid: oid, + flags: { + assumeValid: false, + extended: false, + stage, + nameLength: bfilepath.length < 0xfff ? bfilepath.length : 0xfff, + }, + stages: [], + }; + + this._addEntry(entry); + + this._dirty = true; + } + + delete({ filepath }) { + if (this._entries.has(filepath)) { + this._entries.delete(filepath); + } else { + for (const key of this._entries.keys()) { + if (key.startsWith(filepath + '/')) { + this._entries.delete(key); + } + } + } + + if (this._unmergedPaths.has(filepath)) { + this._unmergedPaths.delete(filepath); + } + this._dirty = true; + } + + clear() { + this._entries.clear(); + this._dirty = true; + } + + has({ filepath }) { + return this._entries.has(filepath) + } + + render() { + return this.entries + .map(entry => `${entry.mode.toString(8)} ${entry.oid} ${entry.path}`) + .join('\n') + } + + static async _entryToBuffer(entry) { + const bpath = Buffer.from(entry.path); + // the fixed length + the filename + at least one null char => align by 8 + const length = Math.ceil((62 + bpath.length + 1) / 8) * 8; + const written = Buffer.alloc(length); + const writer = new BufferCursor(written); + const stat = normalizeStats(entry); + writer.writeUInt32BE(stat.ctimeSeconds); + writer.writeUInt32BE(stat.ctimeNanoseconds); + writer.writeUInt32BE(stat.mtimeSeconds); + writer.writeUInt32BE(stat.mtimeNanoseconds); + writer.writeUInt32BE(stat.dev); + writer.writeUInt32BE(stat.ino); + writer.writeUInt32BE(stat.mode); + writer.writeUInt32BE(stat.uid); + writer.writeUInt32BE(stat.gid); + writer.writeUInt32BE(stat.size); + writer.write(entry.oid, 20, 'hex'); + writer.writeUInt16BE(renderCacheEntryFlags(entry)); + writer.write(entry.path, bpath.length, 'utf8'); + return written + } + + async toObject() { + const header = Buffer.alloc(12); + const writer = new BufferCursor(header); + writer.write('DIRC', 4, 'utf8'); + writer.writeUInt32BE(2); + writer.writeUInt32BE(this.entriesFlat.length); + + let entryBuffers = []; + for (const entry of this.entries) { + entryBuffers.push(GitIndex._entryToBuffer(entry)); + if (entry.stages.length > 1) { + for (const stage of entry.stages) { + if (stage && stage !== entry) { + entryBuffers.push(GitIndex._entryToBuffer(stage)); + } + } + } + } + entryBuffers = await Promise.all(entryBuffers); + + const body = Buffer.concat(entryBuffers); + const main = Buffer.concat([header, body]); + const sum = await shasum(main); + return Buffer.concat([main, Buffer.from(sum, 'hex')]) + } +} + +function compareStats(entry, stats, filemode = true, trustino = true) { + // Comparison based on the description in Paragraph 4 of + // https://www.kernel.org/pub/software/scm/git/docs/technical/racy-git.txt + const e = normalizeStats(entry); + const s = normalizeStats(stats); + const staleness = + (filemode && e.mode !== s.mode) || + e.mtimeSeconds !== s.mtimeSeconds || + e.ctimeSeconds !== s.ctimeSeconds || + e.uid !== s.uid || + e.gid !== s.gid || + (trustino && e.ino !== s.ino) || + e.size !== s.size; + return staleness +} + +// import LockManager from 'travix-lock-manager' + +// import Lock from '../utils.js' + +// const lm = new LockManager() +let lock = null; + +const IndexCache = Symbol('IndexCache'); + +function createCache() { + return { + map: new Map(), + stats: new Map(), + } +} + +async function updateCachedIndexFile(fs, filepath, cache) { + const stat = await fs.lstat(filepath); + const rawIndexFile = await fs.read(filepath); + const index = await GitIndex.from(rawIndexFile); + // cache the GitIndex object so we don't need to re-read it every time. + cache.map.set(filepath, index); + // Save the stat data for the index so we know whether the cached file is stale (modified by an outside process). + cache.stats.set(filepath, stat); +} + +// Determine whether our copy of the index file is stale +async function isIndexStale(fs, filepath, cache) { + const savedStats = cache.stats.get(filepath); + if (savedStats === undefined) return true + const currStats = await fs.lstat(filepath); + if (savedStats === null) return false + if (currStats === null) return false + return compareStats(savedStats, currStats) +} + +class GitIndexManager { + /** + * + * @param {object} opts + * @param {import('../models/FileSystem.js').FileSystem} opts.fs + * @param {string} opts.gitdir + * @param {object} opts.cache + * @param {bool} opts.allowUnmerged + * @param {function(GitIndex): any} closure + */ + static async acquire({ fs, gitdir, cache, allowUnmerged = true }, closure) { + if (!cache[IndexCache]) cache[IndexCache] = createCache(); + + const filepath = `${gitdir}/index`; + if (lock === null) lock = new AsyncLock({ maxPending: Infinity }); + let result; + let unmergedPaths = []; + await lock.acquire(filepath, async () => { + // Acquire a file lock while we're reading the index + // to make sure other processes aren't writing to it + // simultaneously, which could result in a corrupted index. + // const fileLock = await Lock(filepath) + if (await isIndexStale(fs, filepath, cache[IndexCache])) { + await updateCachedIndexFile(fs, filepath, cache[IndexCache]); + } + const index = cache[IndexCache].map.get(filepath); + unmergedPaths = index.unmergedPaths; + + if (unmergedPaths.length && !allowUnmerged) + throw new UnmergedPathsError(unmergedPaths) + + result = await closure(index); + if (index._dirty) { + // Acquire a file lock while we're writing the index file + // let fileLock = await Lock(filepath) + const buffer = await index.toObject(); + await fs.write(filepath, buffer); + // Update cached stat value + cache[IndexCache].stats.set(filepath, await fs.lstat(filepath)); + index._dirty = false; + } + }); + + return result + } +} + +function basename(path) { + const last = Math.max(path.lastIndexOf('/'), path.lastIndexOf('\\')); + if (last > -1) { + path = path.slice(last + 1); + } + return path +} + +function dirname(path) { + const last = Math.max(path.lastIndexOf('/'), path.lastIndexOf('\\')); + if (last === -1) return '.' + if (last === 0) return '/' + return path.slice(0, last) +} + +/*:: +type Node = { + type: string, + fullpath: string, + basename: string, + metadata: Object, // mode, oid + parent?: Node, + children: Array +} +*/ + +function flatFileListToDirectoryStructure(files) { + const inodes = new Map(); + const mkdir = function(name) { + if (!inodes.has(name)) { + const dir = { + type: 'tree', + fullpath: name, + basename: basename(name), + metadata: {}, + children: [], + }; + inodes.set(name, dir); + // This recursively generates any missing parent folders. + // We do it after we've added the inode to the set so that + // we don't recurse infinitely trying to create the root '.' dirname. + dir.parent = mkdir(dirname(name)); + if (dir.parent && dir.parent !== dir) dir.parent.children.push(dir); + } + return inodes.get(name) + }; + + const mkfile = function(name, metadata) { + if (!inodes.has(name)) { + const file = { + type: 'blob', + fullpath: name, + basename: basename(name), + metadata: metadata, + // This recursively generates any missing parent folders. + parent: mkdir(dirname(name)), + children: [], + }; + if (file.parent) file.parent.children.push(file); + inodes.set(name, file); + } + return inodes.get(name) + }; + + mkdir('.'); + for (const file of files) { + mkfile(file.path, file); + } + return inodes +} + +/** + * + * @param {number} mode + */ +function mode2type(mode) { + // prettier-ignore + switch (mode) { + case 0o040000: return 'tree' + case 0o100644: return 'blob' + case 0o100755: return 'blob' + case 0o120000: return 'blob' + case 0o160000: return 'commit' + } + throw new InternalError(`Unexpected GitTree entry mode: ${mode.toString(8)}`) +} + +class GitWalkerIndex { + constructor({ fs, gitdir, cache }) { + this.treePromise = GitIndexManager.acquire( + { fs, gitdir, cache }, + async function(index) { + return flatFileListToDirectoryStructure(index.entries) + } + ); + const walker = this; + this.ConstructEntry = class StageEntry { + constructor(fullpath) { + this._fullpath = fullpath; + this._type = false; + this._mode = false; + this._stat = false; + this._oid = false; + } + + async type() { + return walker.type(this) + } + + async mode() { + return walker.mode(this) + } + + async stat() { + return walker.stat(this) + } + + async content() { + return walker.content(this) + } + + async oid() { + return walker.oid(this) + } + }; + } + + async readdir(entry) { + const filepath = entry._fullpath; + const tree = await this.treePromise; + const inode = tree.get(filepath); + if (!inode) return null + if (inode.type === 'blob') return null + if (inode.type !== 'tree') { + throw new Error(`ENOTDIR: not a directory, scandir '${filepath}'`) + } + const names = inode.children.map(inode => inode.fullpath); + names.sort(compareStrings); + return names + } + + async type(entry) { + if (entry._type === false) { + await entry.stat(); + } + return entry._type + } + + async mode(entry) { + if (entry._mode === false) { + await entry.stat(); + } + return entry._mode + } + + async stat(entry) { + if (entry._stat === false) { + const tree = await this.treePromise; + const inode = tree.get(entry._fullpath); + if (!inode) { + throw new Error( + `ENOENT: no such file or directory, lstat '${entry._fullpath}'` + ) + } + const stats = inode.type === 'tree' ? {} : normalizeStats(inode.metadata); + entry._type = inode.type === 'tree' ? 'tree' : mode2type(stats.mode); + entry._mode = stats.mode; + if (inode.type === 'tree') { + entry._stat = undefined; + } else { + entry._stat = stats; + } + } + return entry._stat + } + + async content(_entry) { + // Cannot get content for an index entry + } + + async oid(entry) { + if (entry._oid === false) { + const tree = await this.treePromise; + const inode = tree.get(entry._fullpath); + entry._oid = inode.metadata.oid; + } + return entry._oid + } +} + +// This is part of an elaborate system to facilitate code-splitting / tree-shaking. +// commands/walk.js can depend on only this, and the actual Walker classes exported +// can be opaque - only having a single property (this symbol) that is not enumerable, +// and thus the constructor can be passed as an argument to walk while being "unusable" +// outside of it. +const GitWalkSymbol = Symbol('GitWalkSymbol'); + +// @ts-check + +/** + * @returns {Walker} + */ +function STAGE() { + const o = Object.create(null); + Object.defineProperty(o, GitWalkSymbol, { + value: function({ fs, gitdir, cache }) { + return new GitWalkerIndex({ fs, gitdir, cache }) + }, + }); + Object.freeze(o); + return o +} + +// @ts-check + +class NotFoundError extends BaseError { + /** + * @param {string} what + */ + constructor(what) { + super(`Could not find ${what}.`); + this.code = this.name = NotFoundError.code; + this.data = { what }; + } +} +/** @type {'NotFoundError'} */ +NotFoundError.code = 'NotFoundError'; + +class ObjectTypeError extends BaseError { + /** + * @param {string} oid + * @param {'blob'|'commit'|'tag'|'tree'} actual + * @param {'blob'|'commit'|'tag'|'tree'} expected + * @param {string} [filepath] + */ + constructor(oid, actual, expected, filepath) { + super( + `Object ${oid} ${ + filepath ? `at ${filepath}` : '' + }was anticipated to be a ${expected} but it is a ${actual}.` + ); + this.code = this.name = ObjectTypeError.code; + this.data = { oid, actual, expected, filepath }; + } +} +/** @type {'ObjectTypeError'} */ +ObjectTypeError.code = 'ObjectTypeError'; + +class InvalidOidError extends BaseError { + /** + * @param {string} value + */ + constructor(value) { + super(`Expected a 40-char hex object id but saw "${value}".`); + this.code = this.name = InvalidOidError.code; + this.data = { value }; + } +} +/** @type {'InvalidOidError'} */ +InvalidOidError.code = 'InvalidOidError'; + +class NoRefspecError extends BaseError { + /** + * @param {string} remote + */ + constructor(remote) { + super(`Could not find a fetch refspec for remote "${remote}". Make sure the config file has an entry like the following: +[remote "${remote}"] +\tfetch = +refs/heads/*:refs/remotes/origin/* +`); + this.code = this.name = NoRefspecError.code; + this.data = { remote }; + } +} +/** @type {'NoRefspecError'} */ +NoRefspecError.code = 'NoRefspecError'; + +class GitPackedRefs { + constructor(text) { + this.refs = new Map(); + this.parsedConfig = []; + if (text) { + let key = null; + this.parsedConfig = text + .trim() + .split('\n') + .map(line => { + if (/^\s*#/.test(line)) { + return { line, comment: true } + } + const i = line.indexOf(' '); + if (line.startsWith('^')) { + // This is a oid for the commit associated with the annotated tag immediately preceding this line. + // Trim off the '^' + const value = line.slice(1); + // The tagname^{} syntax is based on the output of `git show-ref --tags -d` + this.refs.set(key + '^{}', value); + return { line, ref: key, peeled: value } + } else { + // This is an oid followed by the ref name + const value = line.slice(0, i); + key = line.slice(i + 1); + this.refs.set(key, value); + return { line, ref: key, oid: value } + } + }); + } + return this + } + + static from(text) { + return new GitPackedRefs(text) + } + + delete(ref) { + this.parsedConfig = this.parsedConfig.filter(entry => entry.ref !== ref); + this.refs.delete(ref); + } + + toString() { + return this.parsedConfig.map(({ line }) => line).join('\n') + '\n' + } +} + +class GitRefSpec { + constructor({ remotePath, localPath, force, matchPrefix }) { + Object.assign(this, { + remotePath, + localPath, + force, + matchPrefix, + }); + } + + static from(refspec) { + const [ + forceMatch, + remotePath, + remoteGlobMatch, + localPath, + localGlobMatch, + ] = refspec.match(/^(\+?)(.*?)(\*?):(.*?)(\*?)$/).slice(1); + const force = forceMatch === '+'; + const remoteIsGlob = remoteGlobMatch === '*'; + const localIsGlob = localGlobMatch === '*'; + // validate + // TODO: Make this check more nuanced, and depend on whether this is a fetch refspec or a push refspec + if (remoteIsGlob !== localIsGlob) { + throw new InternalError('Invalid refspec') + } + return new GitRefSpec({ + remotePath, + localPath, + force, + matchPrefix: remoteIsGlob, + }) + // TODO: We need to run resolveRef on both paths to expand them to their full name. + } + + translate(remoteBranch) { + if (this.matchPrefix) { + if (remoteBranch.startsWith(this.remotePath)) { + return this.localPath + remoteBranch.replace(this.remotePath, '') + } + } else { + if (remoteBranch === this.remotePath) return this.localPath + } + return null + } + + reverseTranslate(localBranch) { + if (this.matchPrefix) { + if (localBranch.startsWith(this.localPath)) { + return this.remotePath + localBranch.replace(this.localPath, '') + } + } else { + if (localBranch === this.localPath) return this.remotePath + } + return null + } +} + +class GitRefSpecSet { + constructor(rules = []) { + this.rules = rules; + } + + static from(refspecs) { + const rules = []; + for (const refspec of refspecs) { + rules.push(GitRefSpec.from(refspec)); // might throw + } + return new GitRefSpecSet(rules) + } + + add(refspec) { + const rule = GitRefSpec.from(refspec); // might throw + this.rules.push(rule); + } + + translate(remoteRefs) { + const result = []; + for (const rule of this.rules) { + for (const remoteRef of remoteRefs) { + const localRef = rule.translate(remoteRef); + if (localRef) { + result.push([remoteRef, localRef]); + } + } + } + return result + } + + translateOne(remoteRef) { + let result = null; + for (const rule of this.rules) { + const localRef = rule.translate(remoteRef); + if (localRef) { + result = localRef; + } + } + return result + } + + localNamespaces() { + return this.rules + .filter(rule => rule.matchPrefix) + .map(rule => rule.localPath.replace(/\/$/, '')) + } +} + +function compareRefNames(a, b) { + // https://stackoverflow.com/a/40355107/2168416 + const _a = a.replace(/\^\{\}$/, ''); + const _b = b.replace(/\^\{\}$/, ''); + const tmp = -(_a < _b) || +(_a > _b); + if (tmp === 0) { + return a.endsWith('^{}') ? 1 : -1 + } + return tmp +} + +const memo = new Map(); +function normalizePath(path) { + let normalizedPath = memo.get(path); + if (!normalizedPath) { + normalizedPath = normalizePathInternal(path); + memo.set(path, normalizedPath); + } + return normalizedPath +} + +function normalizePathInternal(path) { + path = path + .split('/./') + .join('/') // Replace '/./' with '/' + .replace(/\/{2,}/g, '/'); // Replace consecutive '/' + + if (path === '/.') return '/' // if path === '/.' return '/' + if (path === './') return '.' // if path === './' return '.' + + if (path.startsWith('./')) path = path.slice(2); // Remove leading './' + if (path.endsWith('/.')) path = path.slice(0, -2); // Remove trailing '/.' + if (path.length > 1 && path.endsWith('/')) path = path.slice(0, -1); // Remove trailing '/' + + if (path === '') return '.' // if path === '' return '.' + + return path +} + +// For some reason path.posix.join is undefined in webpack + +function join(...parts) { + return normalizePath(parts.map(normalizePath).join('/')) +} + +// This is straight from parse_unit_factor in config.c of canonical git +const num = val => { + val = val.toLowerCase(); + let n = parseInt(val); + if (val.endsWith('k')) n *= 1024; + if (val.endsWith('m')) n *= 1024 * 1024; + if (val.endsWith('g')) n *= 1024 * 1024 * 1024; + return n +}; + +// This is straight from git_parse_maybe_bool_text in config.c of canonical git +const bool = val => { + val = val.trim().toLowerCase(); + if (val === 'true' || val === 'yes' || val === 'on') return true + if (val === 'false' || val === 'no' || val === 'off') return false + throw Error( + `Expected 'true', 'false', 'yes', 'no', 'on', or 'off', but got ${val}` + ) +}; + +const schema = { + core: { + filemode: bool, + bare: bool, + logallrefupdates: bool, + symlinks: bool, + ignorecase: bool, + bigFileThreshold: num, + }, +}; + +// https://git-scm.com/docs/git-config#_syntax + +// section starts with [ and ends with ] +// section is alphanumeric (ASCII) with - and . +// section is case insensitive +// subsection is optional +// subsection is specified after section and one or more spaces +// subsection is specified between double quotes +const SECTION_LINE_REGEX = /^\[([A-Za-z0-9-.]+)(?: "(.*)")?\]$/; +const SECTION_REGEX = /^[A-Za-z0-9-.]+$/; + +// variable lines contain a name, and equal sign and then a value +// variable lines can also only contain a name (the implicit value is a boolean true) +// variable name is alphanumeric (ASCII) with - +// variable name starts with an alphabetic character +// variable name is case insensitive +const VARIABLE_LINE_REGEX = /^([A-Za-z][A-Za-z-]*)(?: *= *(.*))?$/; +const VARIABLE_NAME_REGEX = /^[A-Za-z][A-Za-z-]*$/; + +// Comments start with either # or ; and extend to the end of line +const VARIABLE_VALUE_COMMENT_REGEX = /^(.*?)( *[#;].*)$/; + +const extractSectionLine = line => { + const matches = SECTION_LINE_REGEX.exec(line); + if (matches != null) { + const [section, subsection] = matches.slice(1); + return [section, subsection] + } + return null +}; + +const extractVariableLine = line => { + const matches = VARIABLE_LINE_REGEX.exec(line); + if (matches != null) { + const [name, rawValue = 'true'] = matches.slice(1); + const valueWithoutComments = removeComments(rawValue); + const valueWithoutQuotes = removeQuotes(valueWithoutComments); + return [name, valueWithoutQuotes] + } + return null +}; + +const removeComments = rawValue => { + const commentMatches = VARIABLE_VALUE_COMMENT_REGEX.exec(rawValue); + if (commentMatches == null) { + return rawValue + } + const [valueWithoutComment, comment] = commentMatches.slice(1); + // if odd number of quotes before and after comment => comment is escaped + if ( + hasOddNumberOfQuotes(valueWithoutComment) && + hasOddNumberOfQuotes(comment) + ) { + return `${valueWithoutComment}${comment}` + } + return valueWithoutComment +}; + +const hasOddNumberOfQuotes = text => { + const numberOfQuotes = (text.match(/(?:^|[^\\])"/g) || []).length; + return numberOfQuotes % 2 !== 0 +}; + +const removeQuotes = text => { + return text.split('').reduce((newText, c, idx, text) => { + const isQuote = c === '"' && text[idx - 1] !== '\\'; + const isEscapeForQuote = c === '\\' && text[idx + 1] === '"'; + if (isQuote || isEscapeForQuote) { + return newText + } + return newText + c + }, '') +}; + +const lower = text => { + return text != null ? text.toLowerCase() : null +}; + +const getPath = (section, subsection, name) => { + return [lower(section), subsection, lower(name)] + .filter(a => a != null) + .join('.') +}; + +const normalizePath$1 = path => { + const pathSegments = path.split('.'); + const section = pathSegments.shift(); + const name = pathSegments.pop(); + const subsection = pathSegments.length ? pathSegments.join('.') : undefined; + + return { + section, + subsection, + name, + path: getPath(section, subsection, name), + sectionPath: getPath(section, subsection, null), + } +}; + +const findLastIndex = (array, callback) => { + return array.reduce((lastIndex, item, index) => { + return callback(item) ? index : lastIndex + }, -1) +}; + +// Note: there are a LOT of edge cases that aren't covered (e.g. keys in sections that also +// have subsections, [include] directives, etc. +class GitConfig { + constructor(text) { + let section = null; + let subsection = null; + this.parsedConfig = text + ? text.split('\n').map(line => { + let name = null; + let value = null; + + const trimmedLine = line.trim(); + const extractedSection = extractSectionLine(trimmedLine); + const isSection = extractedSection != null; + if (isSection) { + ;[section, subsection] = extractedSection; + } else { + const extractedVariable = extractVariableLine(trimmedLine); + const isVariable = extractedVariable != null; + if (isVariable) { + ;[name, value] = extractedVariable; + } + } + + const path = getPath(section, subsection, name); + return { line, isSection, section, subsection, name, value, path } + }) + : []; + } + + static from(text) { + return new GitConfig(text) + } + + async get(path, getall = false) { + const normalizedPath = normalizePath$1(path).path; + const allValues = this.parsedConfig + .filter(config => config.path === normalizedPath) + .map(({ section, name, value }) => { + const fn = schema[section] && schema[section][name]; + return fn ? fn(value) : value + }); + return getall ? allValues : allValues.pop() + } + + async getall(path) { + return this.get(path, true) + } + + async getSubsections(section) { + return this.parsedConfig + .filter(config => config.section === section && config.isSection) + .map(config => config.subsection) + } + + async deleteSection(section, subsection) { + this.parsedConfig = this.parsedConfig.filter( + config => + !(config.section === section && config.subsection === subsection) + ); + } + + async append(path, value) { + return this.set(path, value, true) + } + + async set(path, value, append = false) { + const { + section, + subsection, + name, + path: normalizedPath, + sectionPath, + } = normalizePath$1(path); + const configIndex = findLastIndex( + this.parsedConfig, + config => config.path === normalizedPath + ); + if (value == null) { + if (configIndex !== -1) { + this.parsedConfig.splice(configIndex, 1); + } + } else { + if (configIndex !== -1) { + const config = this.parsedConfig[configIndex]; + // Name should be overwritten in case the casing changed + const modifiedConfig = Object.assign({}, config, { + name, + value, + modified: true, + }); + if (append) { + this.parsedConfig.splice(configIndex + 1, 0, modifiedConfig); + } else { + this.parsedConfig[configIndex] = modifiedConfig; + } + } else { + const sectionIndex = this.parsedConfig.findIndex( + config => config.path === sectionPath + ); + const newConfig = { + section, + subsection, + name, + value, + modified: true, + path: normalizedPath, + }; + if (SECTION_REGEX.test(section) && VARIABLE_NAME_REGEX.test(name)) { + if (sectionIndex >= 0) { + // Reuse existing section + this.parsedConfig.splice(sectionIndex + 1, 0, newConfig); + } else { + // Add a new section + const newSection = { + section, + subsection, + modified: true, + path: sectionPath, + }; + this.parsedConfig.push(newSection, newConfig); + } + } + } + } + } + + toString() { + return this.parsedConfig + .map(({ line, section, subsection, name, value, modified = false }) => { + if (!modified) { + return line + } + if (name != null && value != null) { + if (typeof value === 'string' && /[#;]/.test(value)) { + // A `#` or `;` symbol denotes a comment, so we have to wrap it in double quotes + return `\t${name} = "${value}"` + } + return `\t${name} = ${value}` + } + if (subsection != null) { + return `[${section} "${subsection}"]` + } + return `[${section}]` + }) + .join('\n') + } +} + +class GitConfigManager { + static async get({ fs, gitdir }) { + // We can improve efficiency later if needed. + // TODO: read from full list of git config files + const text = await fs.read(`${gitdir}/config`, { encoding: 'utf8' }); + return GitConfig.from(text) + } + + static async save({ fs, gitdir, config }) { + // We can improve efficiency later if needed. + // TODO: handle saving to the correct global/user/repo location + await fs.write(`${gitdir}/config`, config.toString(), { + encoding: 'utf8', + }); + } +} + +// This is a convenience wrapper for reading and writing files in the 'refs' directory. + +// @see https://git-scm.com/docs/git-rev-parse.html#_specifying_revisions +const refpaths = ref => [ + `${ref}`, + `refs/${ref}`, + `refs/tags/${ref}`, + `refs/heads/${ref}`, + `refs/remotes/${ref}`, + `refs/remotes/${ref}/HEAD`, +]; + +// @see https://git-scm.com/docs/gitrepository-layout +const GIT_FILES = ['config', 'description', 'index', 'shallow', 'commondir']; + +let lock$1; + +async function acquireLock(ref, callback) { + if (lock$1 === undefined) lock$1 = new AsyncLock(); + return lock$1.acquire(ref, callback) +} + +class GitRefManager { + static async updateRemoteRefs({ + fs, + gitdir, + remote, + refs, + symrefs, + tags, + refspecs = undefined, + prune = false, + pruneTags = false, + }) { + // Validate input + for (const value of refs.values()) { + if (!value.match(/[0-9a-f]{40}/)) { + throw new InvalidOidError(value) + } + } + const config = await GitConfigManager.get({ fs, gitdir }); + if (!refspecs) { + refspecs = await config.getall(`remote.${remote}.fetch`); + if (refspecs.length === 0) { + throw new NoRefspecError(remote) + } + // There's some interesting behavior with HEAD that doesn't follow the refspec. + refspecs.unshift(`+HEAD:refs/remotes/${remote}/HEAD`); + } + const refspec = GitRefSpecSet.from(refspecs); + const actualRefsToWrite = new Map(); + // Delete all current tags if the pruneTags argument is true. + if (pruneTags) { + const tags = await GitRefManager.listRefs({ + fs, + gitdir, + filepath: 'refs/tags', + }); + await GitRefManager.deleteRefs({ + fs, + gitdir, + refs: tags.map(tag => `refs/tags/${tag}`), + }); + } + // Add all tags if the fetch tags argument is true. + if (tags) { + for (const serverRef of refs.keys()) { + if (serverRef.startsWith('refs/tags') && !serverRef.endsWith('^{}')) { + // Git's behavior is to only fetch tags that do not conflict with tags already present. + if (!(await GitRefManager.exists({ fs, gitdir, ref: serverRef }))) { + // Always use the object id of the tag itself, and not the peeled object id. + const oid = refs.get(serverRef); + actualRefsToWrite.set(serverRef, oid); + } + } + } + } + // Combine refs and symrefs giving symrefs priority + const refTranslations = refspec.translate([...refs.keys()]); + for (const [serverRef, translatedRef] of refTranslations) { + const value = refs.get(serverRef); + actualRefsToWrite.set(translatedRef, value); + } + const symrefTranslations = refspec.translate([...symrefs.keys()]); + for (const [serverRef, translatedRef] of symrefTranslations) { + const value = symrefs.get(serverRef); + const symtarget = refspec.translateOne(value); + if (symtarget) { + actualRefsToWrite.set(translatedRef, `ref: ${symtarget}`); + } + } + // If `prune` argument is true, clear out the existing local refspec roots + const pruned = []; + if (prune) { + for (const filepath of refspec.localNamespaces()) { + const refs = ( + await GitRefManager.listRefs({ + fs, + gitdir, + filepath, + }) + ).map(file => `${filepath}/${file}`); + for (const ref of refs) { + if (!actualRefsToWrite.has(ref)) { + pruned.push(ref); + } + } + } + if (pruned.length > 0) { + await GitRefManager.deleteRefs({ fs, gitdir, refs: pruned }); + } + } + // Update files + // TODO: For large repos with a history of thousands of pull requests + // (i.e. gitlab-ce) it would be vastly more efficient to write them + // to .git/packed-refs. + // The trick is to make sure we a) don't write a packed ref that is + // already shadowed by a loose ref and b) don't loose any refs already + // in packed-refs. Doing this efficiently may be difficult. A + // solution that might work is + // a) load the current packed-refs file + // b) add actualRefsToWrite, overriding the existing values if present + // c) enumerate all the loose refs currently in .git/refs/remotes/${remote} + // d) overwrite their value with the new value. + // Examples of refs we need to avoid writing in loose format for efficieny's sake + // are .git/refs/remotes/origin/refs/remotes/remote_mirror_3059 + // and .git/refs/remotes/origin/refs/merge-requests + for (const [key, value] of actualRefsToWrite) { + await acquireLock(key, async () => + fs.write(join(gitdir, key), `${value.trim()}\n`, 'utf8') + ); + } + return { pruned } + } + + // TODO: make this less crude? + static async writeRef({ fs, gitdir, ref, value }) { + // Validate input + if (!value.match(/[0-9a-f]{40}/)) { + throw new InvalidOidError(value) + } + await acquireLock(ref, async () => + fs.write(join(gitdir, ref), `${value.trim()}\n`, 'utf8') + ); + } + + static async writeSymbolicRef({ fs, gitdir, ref, value }) { + await acquireLock(ref, async () => + fs.write(join(gitdir, ref), 'ref: ' + `${value.trim()}\n`, 'utf8') + ); + } + + static async deleteRef({ fs, gitdir, ref }) { + return GitRefManager.deleteRefs({ fs, gitdir, refs: [ref] }) + } + + static async deleteRefs({ fs, gitdir, refs }) { + // Delete regular ref + await Promise.all(refs.map(ref => fs.rm(join(gitdir, ref)))); + // Delete any packed ref + let text = await acquireLock('packed-refs', async () => + fs.read(`${gitdir}/packed-refs`, { encoding: 'utf8' }) + ); + const packed = GitPackedRefs.from(text); + const beforeSize = packed.refs.size; + for (const ref of refs) { + if (packed.refs.has(ref)) { + packed.delete(ref); + } + } + if (packed.refs.size < beforeSize) { + text = packed.toString(); + await acquireLock('packed-refs', async () => + fs.write(`${gitdir}/packed-refs`, text, { encoding: 'utf8' }) + ); + } + } + + /** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {string} args.gitdir + * @param {string} args.ref + * @param {number} [args.depth] + * @returns {Promise} + */ + static async resolve({ fs, gitdir, ref, depth = undefined }) { + if (depth !== undefined) { + depth--; + if (depth === -1) { + return ref + } + } + + // Is it a ref pointer? + if (ref.startsWith('ref: ')) { + ref = ref.slice('ref: '.length); + return GitRefManager.resolve({ fs, gitdir, ref, depth }) + } + // Is it a complete and valid SHA? + if (ref.length === 40 && /[0-9a-f]{40}/.test(ref)) { + return ref + } + // We need to alternate between the file system and the packed-refs + const packedMap = await GitRefManager.packedRefs({ fs, gitdir }); + // Look in all the proper paths, in this order + const allpaths = refpaths(ref).filter(p => !GIT_FILES.includes(p)); // exclude git system files (#709) + + for (const ref of allpaths) { + const sha = await acquireLock( + ref, + async () => + (await fs.read(`${gitdir}/${ref}`, { encoding: 'utf8' })) || + packedMap.get(ref) + ); + if (sha) { + return GitRefManager.resolve({ fs, gitdir, ref: sha.trim(), depth }) + } + } + // Do we give up? + throw new NotFoundError(ref) + } + + static async exists({ fs, gitdir, ref }) { + try { + await GitRefManager.expand({ fs, gitdir, ref }); + return true + } catch (err) { + return false + } + } + + static async expand({ fs, gitdir, ref }) { + // Is it a complete and valid SHA? + if (ref.length === 40 && /[0-9a-f]{40}/.test(ref)) { + return ref + } + // We need to alternate between the file system and the packed-refs + const packedMap = await GitRefManager.packedRefs({ fs, gitdir }); + // Look in all the proper paths, in this order + const allpaths = refpaths(ref); + for (const ref of allpaths) { + const refExists = await acquireLock(ref, async () => + fs.exists(`${gitdir}/${ref}`) + ); + if (refExists) return ref + if (packedMap.has(ref)) return ref + } + // Do we give up? + throw new NotFoundError(ref) + } + + static async expandAgainstMap({ ref, map }) { + // Look in all the proper paths, in this order + const allpaths = refpaths(ref); + for (const ref of allpaths) { + if (await map.has(ref)) return ref + } + // Do we give up? + throw new NotFoundError(ref) + } + + static resolveAgainstMap({ ref, fullref = ref, depth = undefined, map }) { + if (depth !== undefined) { + depth--; + if (depth === -1) { + return { fullref, oid: ref } + } + } + // Is it a ref pointer? + if (ref.startsWith('ref: ')) { + ref = ref.slice('ref: '.length); + return GitRefManager.resolveAgainstMap({ ref, fullref, depth, map }) + } + // Is it a complete and valid SHA? + if (ref.length === 40 && /[0-9a-f]{40}/.test(ref)) { + return { fullref, oid: ref } + } + // Look in all the proper paths, in this order + const allpaths = refpaths(ref); + for (const ref of allpaths) { + const sha = map.get(ref); + if (sha) { + return GitRefManager.resolveAgainstMap({ + ref: sha.trim(), + fullref: ref, + depth, + map, + }) + } + } + // Do we give up? + throw new NotFoundError(ref) + } + + static async packedRefs({ fs, gitdir }) { + const text = await acquireLock('packed-refs', async () => + fs.read(`${gitdir}/packed-refs`, { encoding: 'utf8' }) + ); + const packed = GitPackedRefs.from(text); + return packed.refs + } + + // List all the refs that match the `filepath` prefix + static async listRefs({ fs, gitdir, filepath }) { + const packedMap = GitRefManager.packedRefs({ fs, gitdir }); + let files = null; + try { + files = await fs.readdirDeep(`${gitdir}/${filepath}`); + files = files.map(x => x.replace(`${gitdir}/${filepath}/`, '')); + } catch (err) { + files = []; + } + + for (let key of (await packedMap).keys()) { + // filter by prefix + if (key.startsWith(filepath)) { + // remove prefix + key = key.replace(filepath + '/', ''); + // Don't include duplicates; the loose files have precedence anyway + if (!files.includes(key)) { + files.push(key); + } + } + } + // since we just appended things onto an array, we need to sort them now + files.sort(compareRefNames); + return files + } + + static async listBranches({ fs, gitdir, remote }) { + if (remote) { + return GitRefManager.listRefs({ + fs, + gitdir, + filepath: `refs/remotes/${remote}`, + }) + } else { + return GitRefManager.listRefs({ fs, gitdir, filepath: `refs/heads` }) + } + } + + static async listTags({ fs, gitdir }) { + const tags = await GitRefManager.listRefs({ + fs, + gitdir, + filepath: `refs/tags`, + }); + return tags.filter(x => !x.endsWith('^{}')) + } +} + +function compareTreeEntryPath(a, b) { + // Git sorts tree entries as if there is a trailing slash on directory names. + return compareStrings(appendSlashIfDir(a), appendSlashIfDir(b)) +} + +function appendSlashIfDir(entry) { + return entry.mode === '040000' ? entry.path + '/' : entry.path +} + +/** + * + * @typedef {Object} TreeEntry + * @property {string} mode - the 6 digit hexadecimal mode + * @property {string} path - the name of the file or directory + * @property {string} oid - the SHA-1 object id of the blob or tree + * @property {'commit'|'blob'|'tree'} type - the type of object + */ + +function mode2type$1(mode) { + // prettier-ignore + switch (mode) { + case '040000': return 'tree' + case '100644': return 'blob' + case '100755': return 'blob' + case '120000': return 'blob' + case '160000': return 'commit' + } + throw new InternalError(`Unexpected GitTree entry mode: ${mode}`) +} + +function parseBuffer(buffer) { + const _entries = []; + let cursor = 0; + while (cursor < buffer.length) { + const space = buffer.indexOf(32, cursor); + if (space === -1) { + throw new InternalError( + `GitTree: Error parsing buffer at byte location ${cursor}: Could not find the next space character.` + ) + } + const nullchar = buffer.indexOf(0, cursor); + if (nullchar === -1) { + throw new InternalError( + `GitTree: Error parsing buffer at byte location ${cursor}: Could not find the next null character.` + ) + } + let mode = buffer.slice(cursor, space).toString('utf8'); + if (mode === '40000') mode = '040000'; // makes it line up neater in printed output + const type = mode2type$1(mode); + const path = buffer.slice(space + 1, nullchar).toString('utf8'); + + // Prevent malicious git repos from writing to "..\foo" on clone etc + if (path.includes('\\') || path.includes('/')) { + throw new UnsafeFilepathError(path) + } + + const oid = buffer.slice(nullchar + 1, nullchar + 21).toString('hex'); + cursor = nullchar + 21; + _entries.push({ mode, path, oid, type }); + } + return _entries +} + +function limitModeToAllowed(mode) { + if (typeof mode === 'number') { + mode = mode.toString(8); + } + // tree + if (mode.match(/^0?4.*/)) return '040000' // Directory + if (mode.match(/^1006.*/)) return '100644' // Regular non-executable file + if (mode.match(/^1007.*/)) return '100755' // Regular executable file + if (mode.match(/^120.*/)) return '120000' // Symbolic link + if (mode.match(/^160.*/)) return '160000' // Commit (git submodule reference) + throw new InternalError(`Could not understand file mode: ${mode}`) +} + +function nudgeIntoShape(entry) { + if (!entry.oid && entry.sha) { + entry.oid = entry.sha; // Github + } + entry.mode = limitModeToAllowed(entry.mode); // index + if (!entry.type) { + entry.type = mode2type$1(entry.mode); // index + } + return entry +} + +class GitTree { + constructor(entries) { + if (Buffer.isBuffer(entries)) { + this._entries = parseBuffer(entries); + } else if (Array.isArray(entries)) { + this._entries = entries.map(nudgeIntoShape); + } else { + throw new InternalError('invalid type passed to GitTree constructor') + } + // Tree entries are not sorted alphabetically in the usual sense (see `compareTreeEntryPath`) + // but it is important later on that these be sorted in the same order as they would be returned from readdir. + this._entries.sort(comparePath); + } + + static from(tree) { + return new GitTree(tree) + } + + render() { + return this._entries + .map(entry => `${entry.mode} ${entry.type} ${entry.oid} ${entry.path}`) + .join('\n') + } + + toObject() { + // Adjust the sort order to match git's + const entries = [...this._entries]; + entries.sort(compareTreeEntryPath); + return Buffer.concat( + entries.map(entry => { + const mode = Buffer.from(entry.mode.replace(/^0/, '')); + const space = Buffer.from(' '); + const path = Buffer.from(entry.path, 'utf8'); + const nullchar = Buffer.from([0]); + const oid = Buffer.from(entry.oid, 'hex'); + return Buffer.concat([mode, space, path, nullchar, oid]) + }) + ) + } + + /** + * @returns {TreeEntry[]} + */ + entries() { + return this._entries + } + + *[Symbol.iterator]() { + for (const entry of this._entries) { + yield entry; + } + } +} + +class GitObject { + static wrap({ type, object }) { + return Buffer.concat([ + Buffer.from(`${type} ${object.byteLength.toString()}\x00`), + Buffer.from(object), + ]) + } + + static unwrap(buffer) { + const s = buffer.indexOf(32); // first space + const i = buffer.indexOf(0); // first null value + const type = buffer.slice(0, s).toString('utf8'); // get type of object + const length = buffer.slice(s + 1, i).toString('utf8'); // get type of object + const actualLength = buffer.length - (i + 1); + // verify length + if (parseInt(length) !== actualLength) { + throw new InternalError( + `Length mismatch: expected ${length} bytes but got ${actualLength} instead.` + ) + } + return { + type, + object: Buffer.from(buffer.slice(i + 1)), + } + } +} + +async function readObjectLoose({ fs, gitdir, oid }) { + const source = `objects/${oid.slice(0, 2)}/${oid.slice(2)}`; + const file = await fs.read(`${gitdir}/${source}`); + if (!file) { + return null + } + return { object: file, format: 'deflated', source } +} + +/** + * @param {Buffer} delta + * @param {Buffer} source + * @returns {Buffer} + */ +function applyDelta(delta, source) { + const reader = new BufferCursor(delta); + const sourceSize = readVarIntLE(reader); + + if (sourceSize !== source.byteLength) { + throw new InternalError( + `applyDelta expected source buffer to be ${sourceSize} bytes but the provided buffer was ${source.length} bytes` + ) + } + const targetSize = readVarIntLE(reader); + let target; + + const firstOp = readOp(reader, source); + // Speed optimization - return raw buffer if it's just single simple copy + if (firstOp.byteLength === targetSize) { + target = firstOp; + } else { + // Otherwise, allocate a fresh buffer and slices + target = Buffer.alloc(targetSize); + const writer = new BufferCursor(target); + writer.copy(firstOp); + + while (!reader.eof()) { + writer.copy(readOp(reader, source)); + } + + const tell = writer.tell(); + if (targetSize !== tell) { + throw new InternalError( + `applyDelta expected target buffer to be ${targetSize} bytes but the resulting buffer was ${tell} bytes` + ) + } + } + return target +} + +function readVarIntLE(reader) { + let result = 0; + let shift = 0; + let byte = null; + do { + byte = reader.readUInt8(); + result |= (byte & 0b01111111) << shift; + shift += 7; + } while (byte & 0b10000000) + return result +} + +function readCompactLE(reader, flags, size) { + let result = 0; + let shift = 0; + while (size--) { + if (flags & 0b00000001) { + result |= reader.readUInt8() << shift; + } + flags >>= 1; + shift += 8; + } + return result +} + +function readOp(reader, source) { + /** @type {number} */ + const byte = reader.readUInt8(); + const COPY = 0b10000000; + const OFFS = 0b00001111; + const SIZE = 0b01110000; + if (byte & COPY) { + // copy consists of 4 byte offset, 3 byte size (in LE order) + const offset = readCompactLE(reader, byte & OFFS, 4); + let size = readCompactLE(reader, (byte & SIZE) >> 4, 3); + // Yup. They really did this optimization. + if (size === 0) size = 0x10000; + return source.slice(offset, offset + size) + } else { + // insert + return reader.slice(byte) + } +} + +// Convert a value to an Async Iterator +// This will be easier with async generator functions. +function fromValue(value) { + let queue = [value]; + return { + next() { + return Promise.resolve({ done: queue.length === 0, value: queue.pop() }) + }, + return() { + queue = []; + return {} + }, + [Symbol.asyncIterator]() { + return this + }, + } +} + +function getIterator(iterable) { + if (iterable[Symbol.asyncIterator]) { + return iterable[Symbol.asyncIterator]() + } + if (iterable[Symbol.iterator]) { + return iterable[Symbol.iterator]() + } + if (iterable.next) { + return iterable + } + return fromValue(iterable) +} + +// inspired by 'gartal' but lighter-weight and more battle-tested. +class StreamReader { + constructor(stream) { + // TODO: fix usage in bundlers before Buffer dependency is removed #1855 + if (typeof Buffer === 'undefined') { + throw new Error('Missing Buffer dependency') + } + this.stream = getIterator(stream); + this.buffer = null; + this.cursor = 0; + this.undoCursor = 0; + this.started = false; + this._ended = false; + this._discardedBytes = 0; + } + + eof() { + return this._ended && this.cursor === this.buffer.length + } + + tell() { + return this._discardedBytes + this.cursor + } + + async byte() { + if (this.eof()) return + if (!this.started) await this._init(); + if (this.cursor === this.buffer.length) { + await this._loadnext(); + if (this._ended) return + } + this._moveCursor(1); + return this.buffer[this.undoCursor] + } + + async chunk() { + if (this.eof()) return + if (!this.started) await this._init(); + if (this.cursor === this.buffer.length) { + await this._loadnext(); + if (this._ended) return + } + this._moveCursor(this.buffer.length); + return this.buffer.slice(this.undoCursor, this.cursor) + } + + async read(n) { + if (this.eof()) return + if (!this.started) await this._init(); + if (this.cursor + n > this.buffer.length) { + this._trim(); + await this._accumulate(n); + } + this._moveCursor(n); + return this.buffer.slice(this.undoCursor, this.cursor) + } + + async skip(n) { + if (this.eof()) return + if (!this.started) await this._init(); + if (this.cursor + n > this.buffer.length) { + this._trim(); + await this._accumulate(n); + } + this._moveCursor(n); + } + + async undo() { + this.cursor = this.undoCursor; + } + + async _next() { + this.started = true; + let { done, value } = await this.stream.next(); + if (done) { + this._ended = true; + if (!value) return Buffer.alloc(0) + } + if (value) { + value = Buffer.from(value); + } + return value + } + + _trim() { + // Throw away parts of the buffer we don't need anymore + // assert(this.cursor <= this.buffer.length) + this.buffer = this.buffer.slice(this.undoCursor); + this.cursor -= this.undoCursor; + this._discardedBytes += this.undoCursor; + this.undoCursor = 0; + } + + _moveCursor(n) { + this.undoCursor = this.cursor; + this.cursor += n; + if (this.cursor > this.buffer.length) { + this.cursor = this.buffer.length; + } + } + + async _accumulate(n) { + if (this._ended) return + // Expand the buffer until we have N bytes of data + // or we've reached the end of the stream + const buffers = [this.buffer]; + while (this.cursor + n > lengthBuffers(buffers)) { + const nextbuffer = await this._next(); + if (this._ended) break + buffers.push(nextbuffer); + } + this.buffer = Buffer.concat(buffers); + } + + async _loadnext() { + this._discardedBytes += this.buffer.length; + this.undoCursor = 0; + this.cursor = 0; + this.buffer = await this._next(); + } + + async _init() { + this.buffer = await this._next(); + } +} + +// This helper function helps us postpone concatenating buffers, which +// would create intermediate buffer objects, +function lengthBuffers(buffers) { + return buffers.reduce((acc, buffer) => acc + buffer.length, 0) +} + +// My version of git-list-pack - roughly 15x faster than the original + +async function listpack(stream, onData) { + const reader = new StreamReader(stream); + let PACK = await reader.read(4); + PACK = PACK.toString('utf8'); + if (PACK !== 'PACK') { + throw new InternalError(`Invalid PACK header '${PACK}'`) + } + + let version = await reader.read(4); + version = version.readUInt32BE(0); + if (version !== 2) { + throw new InternalError(`Invalid packfile version: ${version}`) + } + + let numObjects = await reader.read(4); + numObjects = numObjects.readUInt32BE(0); + // If (for some godforsaken reason) this is an empty packfile, abort now. + if (numObjects < 1) return + + while (!reader.eof() && numObjects--) { + const offset = reader.tell(); + const { type, length, ofs, reference } = await parseHeader(reader); + const inflator = new pako.Inflate(); + while (!inflator.result) { + const chunk = await reader.chunk(); + if (!chunk) break + inflator.push(chunk, false); + if (inflator.err) { + throw new InternalError(`Pako error: ${inflator.msg}`) + } + if (inflator.result) { + if (inflator.result.length !== length) { + throw new InternalError( + `Inflated object size is different from that stated in packfile.` + ) + } + + // Backtrack parser to where deflated data ends + await reader.undo(); + await reader.read(chunk.length - inflator.strm.avail_in); + const end = reader.tell(); + await onData({ + data: inflator.result, + type, + num: numObjects, + offset, + end, + reference, + ofs, + }); + } + } + } +} + +async function parseHeader(reader) { + // Object type is encoded in bits 654 + let byte = await reader.byte(); + const type = (byte >> 4) & 0b111; + // The length encoding get complicated. + // Last four bits of length is encoded in bits 3210 + let length = byte & 0b1111; + // Whether the next byte is part of the variable-length encoded number + // is encoded in bit 7 + if (byte & 0b10000000) { + let shift = 4; + do { + byte = await reader.byte(); + length |= (byte & 0b01111111) << shift; + shift += 7; + } while (byte & 0b10000000) + } + // Handle deltified objects + let ofs; + let reference; + if (type === 6) { + let shift = 0; + ofs = 0; + const bytes = []; + do { + byte = await reader.byte(); + ofs |= (byte & 0b01111111) << shift; + shift += 7; + bytes.push(byte); + } while (byte & 0b10000000) + reference = Buffer.from(bytes); + } + if (type === 7) { + const buf = await reader.read(20); + reference = buf; + } + return { type, length, ofs, reference } +} + +/* eslint-env node, browser */ + +let supportsDecompressionStream = false; + +async function inflate(buffer) { + if (supportsDecompressionStream === null) { + supportsDecompressionStream = testDecompressionStream(); + } + return supportsDecompressionStream + ? browserInflate(buffer) + : pako.inflate(buffer) +} + +async function browserInflate(buffer) { + const ds = new DecompressionStream('deflate'); + const d = new Blob([buffer]).stream().pipeThrough(ds); + return new Uint8Array(await new Response(d).arrayBuffer()) +} + +function testDecompressionStream() { + try { + const ds = new DecompressionStream('deflate'); + if (ds) return true + } catch (_) { + // no bother + } + return false +} + +function decodeVarInt(reader) { + const bytes = []; + let byte = 0; + let multibyte = 0; + do { + byte = reader.readUInt8(); + // We keep bits 6543210 + const lastSeven = byte & 0b01111111; + bytes.push(lastSeven); + // Whether the next byte is part of the variable-length encoded number + // is encoded in bit 7 + multibyte = byte & 0b10000000; + } while (multibyte) + // Now that all the bytes are in big-endian order, + // alternate shifting the bits left by 7 and OR-ing the next byte. + // And... do a weird increment-by-one thing that I don't quite understand. + return bytes.reduce((a, b) => ((a + 1) << 7) | b, -1) +} + +// I'm pretty much copying this one from the git C source code, +// because it makes no sense. +function otherVarIntDecode(reader, startWith) { + let result = startWith; + let shift = 4; + let byte = null; + do { + byte = reader.readUInt8(); + result |= (byte & 0b01111111) << shift; + shift += 7; + } while (byte & 0b10000000) + return result +} + +class GitPackIndex { + constructor(stuff) { + Object.assign(this, stuff); + this.offsetCache = {}; + } + + static async fromIdx({ idx, getExternalRefDelta }) { + const reader = new BufferCursor(idx); + const magic = reader.slice(4).toString('hex'); + // Check for IDX v2 magic number + if (magic !== 'ff744f63') { + return // undefined + } + const version = reader.readUInt32BE(); + if (version !== 2) { + throw new InternalError( + `Unable to read version ${version} packfile IDX. (Only version 2 supported)` + ) + } + if (idx.byteLength > 2048 * 1024 * 1024) { + throw new InternalError( + `To keep implementation simple, I haven't implemented the layer 5 feature needed to support packfiles > 2GB in size.` + ) + } + // Skip over fanout table + reader.seek(reader.tell() + 4 * 255); + // Get hashes + const size = reader.readUInt32BE(); + const hashes = []; + for (let i = 0; i < size; i++) { + const hash = reader.slice(20).toString('hex'); + hashes[i] = hash; + } + reader.seek(reader.tell() + 4 * size); + // Skip over CRCs + // Get offsets + const offsets = new Map(); + for (let i = 0; i < size; i++) { + offsets.set(hashes[i], reader.readUInt32BE()); + } + const packfileSha = reader.slice(20).toString('hex'); + return new GitPackIndex({ + hashes, + crcs: {}, + offsets, + packfileSha, + getExternalRefDelta, + }) + } + + static async fromPack({ pack, getExternalRefDelta, onProgress }) { + const listpackTypes = { + 1: 'commit', + 2: 'tree', + 3: 'blob', + 4: 'tag', + 6: 'ofs-delta', + 7: 'ref-delta', + }; + const offsetToObject = {}; + + // Older packfiles do NOT use the shasum of the pack itself, + // so it is recommended to just use whatever bytes are in the trailer. + // Source: https://github.com/git/git/commit/1190a1acf800acdcfd7569f87ac1560e2d077414 + const packfileSha = pack.slice(-20).toString('hex'); + + const hashes = []; + const crcs = {}; + const offsets = new Map(); + let totalObjectCount = null; + let lastPercent = null; + + await listpack([pack], async ({ data, type, reference, offset, num }) => { + if (totalObjectCount === null) totalObjectCount = num; + const percent = Math.floor( + ((totalObjectCount - num) * 100) / totalObjectCount + ); + if (percent !== lastPercent) { + if (onProgress) { + await onProgress({ + phase: 'Receiving objects', + loaded: totalObjectCount - num, + total: totalObjectCount, + }); + } + } + lastPercent = percent; + // Change type from a number to a meaningful string + type = listpackTypes[type]; + + if (['commit', 'tree', 'blob', 'tag'].includes(type)) { + offsetToObject[offset] = { + type, + offset, + }; + } else if (type === 'ofs-delta') { + offsetToObject[offset] = { + type, + offset, + }; + } else if (type === 'ref-delta') { + offsetToObject[offset] = { + type, + offset, + }; + } + }); + + // We need to know the lengths of the slices to compute the CRCs. + const offsetArray = Object.keys(offsetToObject).map(Number); + for (const [i, start] of offsetArray.entries()) { + const end = + i + 1 === offsetArray.length ? pack.byteLength - 20 : offsetArray[i + 1]; + const o = offsetToObject[start]; + const crc = crc32.buf(pack.slice(start, end)) >>> 0; + o.end = end; + o.crc = crc; + } + + // We don't have the hashes yet. But we can generate them using the .readSlice function! + const p = new GitPackIndex({ + pack: Promise.resolve(pack), + packfileSha, + crcs, + hashes, + offsets, + getExternalRefDelta, + }); + + // Resolve deltas and compute the oids + lastPercent = null; + let count = 0; + const objectsByDepth = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; + for (let offset in offsetToObject) { + offset = Number(offset); + const percent = Math.floor((count * 100) / totalObjectCount); + if (percent !== lastPercent) { + if (onProgress) { + await onProgress({ + phase: 'Resolving deltas', + loaded: count, + total: totalObjectCount, + }); + } + } + count++; + lastPercent = percent; + + const o = offsetToObject[offset]; + if (o.oid) continue + try { + p.readDepth = 0; + p.externalReadDepth = 0; + const { type, object } = await p.readSlice({ start: offset }); + objectsByDepth[p.readDepth] += 1; + const oid = await shasum(GitObject.wrap({ type, object })); + o.oid = oid; + hashes.push(oid); + offsets.set(oid, offset); + crcs[oid] = o.crc; + } catch (err) { + continue + } + } + + hashes.sort(); + return p + } + + async toBuffer() { + const buffers = []; + const write = (str, encoding) => { + buffers.push(Buffer.from(str, encoding)); + }; + // Write out IDX v2 magic number + write('ff744f63', 'hex'); + // Write out version number 2 + write('00000002', 'hex'); + // Write fanout table + const fanoutBuffer = new BufferCursor(Buffer.alloc(256 * 4)); + for (let i = 0; i < 256; i++) { + let count = 0; + for (const hash of this.hashes) { + if (parseInt(hash.slice(0, 2), 16) <= i) count++; + } + fanoutBuffer.writeUInt32BE(count); + } + buffers.push(fanoutBuffer.buffer); + // Write out hashes + for (const hash of this.hashes) { + write(hash, 'hex'); + } + // Write out crcs + const crcsBuffer = new BufferCursor(Buffer.alloc(this.hashes.length * 4)); + for (const hash of this.hashes) { + crcsBuffer.writeUInt32BE(this.crcs[hash]); + } + buffers.push(crcsBuffer.buffer); + // Write out offsets + const offsetsBuffer = new BufferCursor(Buffer.alloc(this.hashes.length * 4)); + for (const hash of this.hashes) { + offsetsBuffer.writeUInt32BE(this.offsets.get(hash)); + } + buffers.push(offsetsBuffer.buffer); + // Write out packfile checksum + write(this.packfileSha, 'hex'); + // Write out shasum + const totalBuffer = Buffer.concat(buffers); + const sha = await shasum(totalBuffer); + const shaBuffer = Buffer.alloc(20); + shaBuffer.write(sha, 'hex'); + return Buffer.concat([totalBuffer, shaBuffer]) + } + + async load({ pack }) { + this.pack = pack; + } + + async unload() { + this.pack = null; + } + + async read({ oid }) { + if (!this.offsets.get(oid)) { + if (this.getExternalRefDelta) { + this.externalReadDepth++; + return this.getExternalRefDelta(oid) + } else { + throw new InternalError(`Could not read object ${oid} from packfile`) + } + } + const start = this.offsets.get(oid); + return this.readSlice({ start }) + } + + async readSlice({ start }) { + if (this.offsetCache[start]) { + return Object.assign({}, this.offsetCache[start]) + } + this.readDepth++; + const types = { + 0b0010000: 'commit', + 0b0100000: 'tree', + 0b0110000: 'blob', + 0b1000000: 'tag', + 0b1100000: 'ofs_delta', + 0b1110000: 'ref_delta', + }; + if (!this.pack) { + throw new InternalError( + 'Tried to read from a GitPackIndex with no packfile loaded into memory' + ) + } + const raw = (await this.pack).slice(start); + const reader = new BufferCursor(raw); + const byte = reader.readUInt8(); + // Object type is encoded in bits 654 + const btype = byte & 0b1110000; + let type = types[btype]; + if (type === undefined) { + throw new InternalError('Unrecognized type: 0b' + btype.toString(2)) + } + // The length encoding get complicated. + // Last four bits of length is encoded in bits 3210 + const lastFour = byte & 0b1111; + let length = lastFour; + // Whether the next byte is part of the variable-length encoded number + // is encoded in bit 7 + const multibyte = byte & 0b10000000; + if (multibyte) { + length = otherVarIntDecode(reader, lastFour); + } + let base = null; + let object = null; + // Handle deltified objects + if (type === 'ofs_delta') { + const offset = decodeVarInt(reader); + const baseOffset = start - offset + ;({ object: base, type } = await this.readSlice({ start: baseOffset })); + } + if (type === 'ref_delta') { + const oid = reader.slice(20).toString('hex') + ;({ object: base, type } = await this.read({ oid })); + } + // Handle undeltified objects + const buffer = raw.slice(reader.tell()); + object = Buffer.from(await inflate(buffer)); + // Assert that the object length is as expected. + if (object.byteLength !== length) { + throw new InternalError( + `Packfile told us object would have length ${length} but it had length ${object.byteLength}` + ) + } + if (base) { + object = Buffer.from(applyDelta(object, base)); + } + // Cache the result based on depth. + if (this.readDepth > 3) { + // hand tuned for speed / memory usage tradeoff + this.offsetCache[start] = { type, object }; + } + return { type, format: 'content', object } + } +} + +const PackfileCache = Symbol('PackfileCache'); + +async function loadPackIndex({ + fs, + filename, + getExternalRefDelta, + emitter, + emitterPrefix, +}) { + const idx = await fs.read(filename); + return GitPackIndex.fromIdx({ idx, getExternalRefDelta }) +} + +function readPackIndex({ + fs, + cache, + filename, + getExternalRefDelta, + emitter, + emitterPrefix, +}) { + // Try to get the packfile index from the in-memory cache + if (!cache[PackfileCache]) cache[PackfileCache] = new Map(); + let p = cache[PackfileCache].get(filename); + if (!p) { + p = loadPackIndex({ + fs, + filename, + getExternalRefDelta, + emitter, + emitterPrefix, + }); + cache[PackfileCache].set(filename, p); + } + return p +} + +async function readObjectPacked({ + fs, + cache, + gitdir, + oid, + format = 'content', + getExternalRefDelta, +}) { + // Check to see if it's in a packfile. + // Iterate through all the .idx files + let list = await fs.readdir(join(gitdir, 'objects/pack')); + list = list.filter(x => x.endsWith('.idx')); + for (const filename of list) { + const indexFile = `${gitdir}/objects/pack/${filename}`; + const p = await readPackIndex({ + fs, + cache, + filename: indexFile, + getExternalRefDelta, + }); + if (p.error) throw new InternalError(p.error) + // If the packfile DOES have the oid we're looking for... + if (p.offsets.has(oid)) { + // Get the resolved git object from the packfile + if (!p.pack) { + const packFile = indexFile.replace(/idx$/, 'pack'); + p.pack = fs.read(packFile); + } + const result = await p.read({ oid, getExternalRefDelta }); + result.format = 'content'; + result.source = `objects/pack/${filename.replace(/idx$/, 'pack')}`; + return result + } + } + // Failed to find it + return null +} + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {any} args.cache + * @param {string} args.gitdir + * @param {string} args.oid + * @param {string} [args.format] + */ +async function _readObject({ + fs, + cache, + gitdir, + oid, + format = 'content', +}) { + // Curry the current read method so that the packfile un-deltification + // process can acquire external ref-deltas. + const getExternalRefDelta = oid => _readObject({ fs, cache, gitdir, oid }); + + let result; + // Empty tree - hard-coded so we can use it as a shorthand. + // Note: I think the canonical git implementation must do this too because + // `git cat-file -t 4b825dc642cb6eb9a060e54bf8d69288fbee4904` prints "tree" even in empty repos. + if (oid === '4b825dc642cb6eb9a060e54bf8d69288fbee4904') { + result = { format: 'wrapped', object: Buffer.from(`tree 0\x00`) }; + } + // Look for it in the loose object directory. + if (!result) { + result = await readObjectLoose({ fs, gitdir, oid }); + } + // Check to see if it's in a packfile. + if (!result) { + result = await readObjectPacked({ + fs, + cache, + gitdir, + oid, + getExternalRefDelta, + }); + + if (!result) { + throw new NotFoundError(oid) + } + + // Directly return packed result, as specified: packed objects always return the 'content' format. + return result + } + + // Loose objects are always deflated, return early + if (format === 'deflated') { + return result + } + + // All loose objects are deflated but the hard-coded empty tree is `wrapped` so we have to check if we need to inflate the object. + if (result.format === 'deflated') { + result.object = Buffer.from(await inflate(result.object)); + result.format = 'wrapped'; + } + + if (format === 'wrapped') { + return result + } + + const sha = await shasum(result.object); + if (sha !== oid) { + throw new InternalError( + `SHA check failed! Expected ${oid}, computed ${sha}` + ) + } + const { object, type } = GitObject.unwrap(result.object); + result.type = type; + result.object = object; + result.format = 'content'; + + if (format === 'content') { + return result + } + + throw new InternalError(`invalid requested format "${format}"`) +} + +class AlreadyExistsError extends BaseError { + /** + * @param {'note'|'remote'|'tag'|'branch'} noun + * @param {string} where + * @param {boolean} canForce + */ + constructor(noun, where, canForce = true) { + super( + `Failed to create ${noun} at ${where} because it already exists.${ + canForce + ? ` (Hint: use 'force: true' parameter to overwrite existing ${noun}.)` + : '' + }` + ); + this.code = this.name = AlreadyExistsError.code; + this.data = { noun, where, canForce }; + } +} +/** @type {'AlreadyExistsError'} */ +AlreadyExistsError.code = 'AlreadyExistsError'; + +class AmbiguousError extends BaseError { + /** + * @param {'oids'|'refs'} nouns + * @param {string} short + * @param {string[]} matches + */ + constructor(nouns, short, matches) { + super( + `Found multiple ${nouns} matching "${short}" (${matches.join( + ', ' + )}). Use a longer abbreviation length to disambiguate them.` + ); + this.code = this.name = AmbiguousError.code; + this.data = { nouns, short, matches }; + } +} +/** @type {'AmbiguousError'} */ +AmbiguousError.code = 'AmbiguousError'; + +class CheckoutConflictError extends BaseError { + /** + * @param {string[]} filepaths + */ + constructor(filepaths) { + super( + `Your local changes to the following files would be overwritten by checkout: ${filepaths.join( + ', ' + )}` + ); + this.code = this.name = CheckoutConflictError.code; + this.data = { filepaths }; + } +} +/** @type {'CheckoutConflictError'} */ +CheckoutConflictError.code = 'CheckoutConflictError'; + +class CommitNotFetchedError extends BaseError { + /** + * @param {string} ref + * @param {string} oid + */ + constructor(ref, oid) { + super( + `Failed to checkout "${ref}" because commit ${oid} is not available locally. Do a git fetch to make the branch available locally.` + ); + this.code = this.name = CommitNotFetchedError.code; + this.data = { ref, oid }; + } +} +/** @type {'CommitNotFetchedError'} */ +CommitNotFetchedError.code = 'CommitNotFetchedError'; + +class EmptyServerResponseError extends BaseError { + constructor() { + super(`Empty response from git server.`); + this.code = this.name = EmptyServerResponseError.code; + this.data = {}; + } +} +/** @type {'EmptyServerResponseError'} */ +EmptyServerResponseError.code = 'EmptyServerResponseError'; + +class FastForwardError extends BaseError { + constructor() { + super(`A simple fast-forward merge was not possible.`); + this.code = this.name = FastForwardError.code; + this.data = {}; + } +} +/** @type {'FastForwardError'} */ +FastForwardError.code = 'FastForwardError'; + +class GitPushError extends BaseError { + /** + * @param {string} prettyDetails + * @param {PushResult} result + */ + constructor(prettyDetails, result) { + super(`One or more branches were not updated: ${prettyDetails}`); + this.code = this.name = GitPushError.code; + this.data = { prettyDetails, result }; + } +} +/** @type {'GitPushError'} */ +GitPushError.code = 'GitPushError'; + +class HttpError extends BaseError { + /** + * @param {number} statusCode + * @param {string} statusMessage + * @param {string} response + */ + constructor(statusCode, statusMessage, response) { + super(`HTTP Error: ${statusCode} ${statusMessage}`); + this.code = this.name = HttpError.code; + this.data = { statusCode, statusMessage, response }; + } +} +/** @type {'HttpError'} */ +HttpError.code = 'HttpError'; + +class InvalidFilepathError extends BaseError { + /** + * @param {'leading-slash'|'trailing-slash'|'directory'} [reason] + */ + constructor(reason) { + let message = 'invalid filepath'; + if (reason === 'leading-slash' || reason === 'trailing-slash') { + message = `"filepath" parameter should not include leading or trailing directory separators because these can cause problems on some platforms.`; + } else if (reason === 'directory') { + message = `"filepath" should not be a directory.`; + } + super(message); + this.code = this.name = InvalidFilepathError.code; + this.data = { reason }; + } +} +/** @type {'InvalidFilepathError'} */ +InvalidFilepathError.code = 'InvalidFilepathError'; + +class InvalidRefNameError extends BaseError { + /** + * @param {string} ref + * @param {string} suggestion + * @param {boolean} canForce + */ + constructor(ref, suggestion) { + super( + `"${ref}" would be an invalid git reference. (Hint: a valid alternative would be "${suggestion}".)` + ); + this.code = this.name = InvalidRefNameError.code; + this.data = { ref, suggestion }; + } +} +/** @type {'InvalidRefNameError'} */ +InvalidRefNameError.code = 'InvalidRefNameError'; + +class MaxDepthError extends BaseError { + /** + * @param {number} depth + */ + constructor(depth) { + super(`Maximum search depth of ${depth} exceeded.`); + this.code = this.name = MaxDepthError.code; + this.data = { depth }; + } +} +/** @type {'MaxDepthError'} */ +MaxDepthError.code = 'MaxDepthError'; + +class MergeNotSupportedError extends BaseError { + constructor() { + super(`Merges with conflicts are not supported yet.`); + this.code = this.name = MergeNotSupportedError.code; + this.data = {}; + } +} +/** @type {'MergeNotSupportedError'} */ +MergeNotSupportedError.code = 'MergeNotSupportedError'; + +class MergeConflictError extends BaseError { + /** + * @param {Array} filepaths + * @param {Array} bothModified + * @param {Array} deleteByUs + * @param {Array} deleteByTheirs + */ + constructor(filepaths, bothModified, deleteByUs, deleteByTheirs) { + super( + `Automatic merge failed with one or more merge conflicts in the following files: ${filepaths.toString()}. Fix conflicts then commit the result.` + ); + this.code = this.name = MergeConflictError.code; + this.data = { filepaths, bothModified, deleteByUs, deleteByTheirs }; + } +} +/** @type {'MergeConflictError'} */ +MergeConflictError.code = 'MergeConflictError'; + +class MissingNameError extends BaseError { + /** + * @param {'author'|'committer'|'tagger'} role + */ + constructor(role) { + super( + `No name was provided for ${role} in the argument or in the .git/config file.` + ); + this.code = this.name = MissingNameError.code; + this.data = { role }; + } +} +/** @type {'MissingNameError'} */ +MissingNameError.code = 'MissingNameError'; + +class MissingParameterError extends BaseError { + /** + * @param {string} parameter + */ + constructor(parameter) { + super( + `The function requires a "${parameter}" parameter but none was provided.` + ); + this.code = this.name = MissingParameterError.code; + this.data = { parameter }; + } +} +/** @type {'MissingParameterError'} */ +MissingParameterError.code = 'MissingParameterError'; + +class MultipleGitError extends BaseError { + /** + * @param {Error[]} errors + * @param {string} message + */ + constructor(errors) { + super( + `There are multiple errors that were thrown by the method. Please refer to the "errors" property to see more` + ); + this.code = this.name = MultipleGitError.code; + this.data = { errors }; + this.errors = errors; + } +} +/** @type {'MultipleGitError'} */ +MultipleGitError.code = 'MultipleGitError'; + +class ParseError extends BaseError { + /** + * @param {string} expected + * @param {string} actual + */ + constructor(expected, actual) { + super(`Expected "${expected}" but received "${actual}".`); + this.code = this.name = ParseError.code; + this.data = { expected, actual }; + } +} +/** @type {'ParseError'} */ +ParseError.code = 'ParseError'; + +class PushRejectedError extends BaseError { + /** + * @param {'not-fast-forward'|'tag-exists'} reason + */ + constructor(reason) { + let message = ''; + if (reason === 'not-fast-forward') { + message = ' because it was not a simple fast-forward'; + } else if (reason === 'tag-exists') { + message = ' because tag already exists'; + } + super(`Push rejected${message}. Use "force: true" to override.`); + this.code = this.name = PushRejectedError.code; + this.data = { reason }; + } +} +/** @type {'PushRejectedError'} */ +PushRejectedError.code = 'PushRejectedError'; + +class RemoteCapabilityError extends BaseError { + /** + * @param {'shallow'|'deepen-since'|'deepen-not'|'deepen-relative'} capability + * @param {'depth'|'since'|'exclude'|'relative'} parameter + */ + constructor(capability, parameter) { + super( + `Remote does not support the "${capability}" so the "${parameter}" parameter cannot be used.` + ); + this.code = this.name = RemoteCapabilityError.code; + this.data = { capability, parameter }; + } +} +/** @type {'RemoteCapabilityError'} */ +RemoteCapabilityError.code = 'RemoteCapabilityError'; + +class SmartHttpError extends BaseError { + /** + * @param {string} preview + * @param {string} response + */ + constructor(preview, response) { + super( + `Remote did not reply using the "smart" HTTP protocol. Expected "001e# service=git-upload-pack" but received: ${preview}` + ); + this.code = this.name = SmartHttpError.code; + this.data = { preview, response }; + } +} +/** @type {'SmartHttpError'} */ +SmartHttpError.code = 'SmartHttpError'; + +class UnknownTransportError extends BaseError { + /** + * @param {string} url + * @param {string} transport + * @param {string} [suggestion] + */ + constructor(url, transport, suggestion) { + super( + `Git remote "${url}" uses an unrecognized transport protocol: "${transport}"` + ); + this.code = this.name = UnknownTransportError.code; + this.data = { url, transport, suggestion }; + } +} +/** @type {'UnknownTransportError'} */ +UnknownTransportError.code = 'UnknownTransportError'; + +class UrlParseError extends BaseError { + /** + * @param {string} url + */ + constructor(url) { + super(`Cannot parse remote URL: "${url}"`); + this.code = this.name = UrlParseError.code; + this.data = { url }; + } +} +/** @type {'UrlParseError'} */ +UrlParseError.code = 'UrlParseError'; + +class UserCanceledError extends BaseError { + constructor() { + super(`The operation was canceled.`); + this.code = this.name = UserCanceledError.code; + this.data = {}; + } +} +/** @type {'UserCanceledError'} */ +UserCanceledError.code = 'UserCanceledError'; + +class IndexResetError extends BaseError { + /** + * @param {Array} filepaths + */ + constructor(filepath) { + super( + `Could not merge index: Entry for '${filepath}' is not up to date. Either reset the index entry to HEAD, or stage your unstaged changes.` + ); + this.code = this.name = IndexResetError.code; + this.data = { filepath }; + } +} +/** @type {'IndexResetError'} */ +IndexResetError.code = 'IndexResetError'; + +class NoCommitError extends BaseError { + /** + * @param {string} ref + */ + constructor(ref) { + super( + `"${ref}" does not point to any commit. You're maybe working on a repository with no commits yet. ` + ); + this.code = this.name = NoCommitError.code; + this.data = { ref }; + } +} +/** @type {'NoCommitError'} */ +NoCommitError.code = 'NoCommitError'; + + + +var Errors = /*#__PURE__*/Object.freeze({ + __proto__: null, + AlreadyExistsError: AlreadyExistsError, + AmbiguousError: AmbiguousError, + CheckoutConflictError: CheckoutConflictError, + CommitNotFetchedError: CommitNotFetchedError, + EmptyServerResponseError: EmptyServerResponseError, + FastForwardError: FastForwardError, + GitPushError: GitPushError, + HttpError: HttpError, + InternalError: InternalError, + InvalidFilepathError: InvalidFilepathError, + InvalidOidError: InvalidOidError, + InvalidRefNameError: InvalidRefNameError, + MaxDepthError: MaxDepthError, + MergeNotSupportedError: MergeNotSupportedError, + MergeConflictError: MergeConflictError, + MissingNameError: MissingNameError, + MissingParameterError: MissingParameterError, + MultipleGitError: MultipleGitError, + NoRefspecError: NoRefspecError, + NotFoundError: NotFoundError, + ObjectTypeError: ObjectTypeError, + ParseError: ParseError, + PushRejectedError: PushRejectedError, + RemoteCapabilityError: RemoteCapabilityError, + SmartHttpError: SmartHttpError, + UnknownTransportError: UnknownTransportError, + UnsafeFilepathError: UnsafeFilepathError, + UrlParseError: UrlParseError, + UserCanceledError: UserCanceledError, + UnmergedPathsError: UnmergedPathsError, + IndexResetError: IndexResetError, + NoCommitError: NoCommitError +}); + +function formatAuthor({ name, email, timestamp, timezoneOffset }) { + timezoneOffset = formatTimezoneOffset(timezoneOffset); + return `${name} <${email}> ${timestamp} ${timezoneOffset}` +} + +// The amount of effort that went into crafting these cases to handle +// -0 (just so we don't lose that information when parsing and reconstructing) +// but can also default to +0 was extraordinary. + +function formatTimezoneOffset(minutes) { + const sign = simpleSign(negateExceptForZero(minutes)); + minutes = Math.abs(minutes); + const hours = Math.floor(minutes / 60); + minutes -= hours * 60; + let strHours = String(hours); + let strMinutes = String(minutes); + if (strHours.length < 2) strHours = '0' + strHours; + if (strMinutes.length < 2) strMinutes = '0' + strMinutes; + return (sign === -1 ? '-' : '+') + strHours + strMinutes +} + +function simpleSign(n) { + return Math.sign(n) || (Object.is(n, -0) ? -1 : 1) +} + +function negateExceptForZero(n) { + return n === 0 ? n : -n +} + +function normalizeNewlines(str) { + // remove all + str = str.replace(/\r/g, ''); + // no extra newlines up front + str = str.replace(/^\n+/, ''); + // and a single newline at the end + str = str.replace(/\n+$/, '') + '\n'; + return str +} + +function parseAuthor(author) { + const [, name, email, timestamp, offset] = author.match( + /^(.*) <(.*)> (.*) (.*)$/ + ); + return { + name: name, + email: email, + timestamp: Number(timestamp), + timezoneOffset: parseTimezoneOffset(offset), + } +} + +// The amount of effort that went into crafting these cases to handle +// -0 (just so we don't lose that information when parsing and reconstructing) +// but can also default to +0 was extraordinary. + +function parseTimezoneOffset(offset) { + let [, sign, hours, minutes] = offset.match(/(\+|-)(\d\d)(\d\d)/); + minutes = (sign === '+' ? 1 : -1) * (Number(hours) * 60 + Number(minutes)); + return negateExceptForZero$1(minutes) +} + +function negateExceptForZero$1(n) { + return n === 0 ? n : -n +} + +class GitAnnotatedTag { + constructor(tag) { + if (typeof tag === 'string') { + this._tag = tag; + } else if (Buffer.isBuffer(tag)) { + this._tag = tag.toString('utf8'); + } else if (typeof tag === 'object') { + this._tag = GitAnnotatedTag.render(tag); + } else { + throw new InternalError( + 'invalid type passed to GitAnnotatedTag constructor' + ) + } + } + + static from(tag) { + return new GitAnnotatedTag(tag) + } + + static render(obj) { + return `object ${obj.object} +type ${obj.type} +tag ${obj.tag} +tagger ${formatAuthor(obj.tagger)} + +${obj.message} +${obj.gpgsig ? obj.gpgsig : ''}` + } + + justHeaders() { + return this._tag.slice(0, this._tag.indexOf('\n\n')) + } + + message() { + const tag = this.withoutSignature(); + return tag.slice(tag.indexOf('\n\n') + 2) + } + + parse() { + return Object.assign(this.headers(), { + message: this.message(), + gpgsig: this.gpgsig(), + }) + } + + render() { + return this._tag + } + + headers() { + const headers = this.justHeaders().split('\n'); + const hs = []; + for (const h of headers) { + if (h[0] === ' ') { + // combine with previous header (without space indent) + hs[hs.length - 1] += '\n' + h.slice(1); + } else { + hs.push(h); + } + } + const obj = {}; + for (const h of hs) { + const key = h.slice(0, h.indexOf(' ')); + const value = h.slice(h.indexOf(' ') + 1); + if (Array.isArray(obj[key])) { + obj[key].push(value); + } else { + obj[key] = value; + } + } + if (obj.tagger) { + obj.tagger = parseAuthor(obj.tagger); + } + if (obj.committer) { + obj.committer = parseAuthor(obj.committer); + } + return obj + } + + withoutSignature() { + const tag = normalizeNewlines(this._tag); + if (tag.indexOf('\n-----BEGIN PGP SIGNATURE-----') === -1) return tag + return tag.slice(0, tag.lastIndexOf('\n-----BEGIN PGP SIGNATURE-----')) + } + + gpgsig() { + if (this._tag.indexOf('\n-----BEGIN PGP SIGNATURE-----') === -1) return + const signature = this._tag.slice( + this._tag.indexOf('-----BEGIN PGP SIGNATURE-----'), + this._tag.indexOf('-----END PGP SIGNATURE-----') + + '-----END PGP SIGNATURE-----'.length + ); + return normalizeNewlines(signature) + } + + payload() { + return this.withoutSignature() + '\n' + } + + toObject() { + return Buffer.from(this._tag, 'utf8') + } + + static async sign(tag, sign, secretKey) { + const payload = tag.payload(); + let { signature } = await sign({ payload, secretKey }); + // renormalize the line endings to the one true line-ending + signature = normalizeNewlines(signature); + const signedTag = payload + signature; + // return a new tag object + return GitAnnotatedTag.from(signedTag) + } +} + +function indent(str) { + return ( + str + .trim() + .split('\n') + .map(x => ' ' + x) + .join('\n') + '\n' + ) +} + +function outdent(str) { + return str + .split('\n') + .map(x => x.replace(/^ /, '')) + .join('\n') +} + +class GitCommit { + constructor(commit) { + if (typeof commit === 'string') { + this._commit = commit; + } else if (Buffer.isBuffer(commit)) { + this._commit = commit.toString('utf8'); + } else if (typeof commit === 'object') { + this._commit = GitCommit.render(commit); + } else { + throw new InternalError('invalid type passed to GitCommit constructor') + } + } + + static fromPayloadSignature({ payload, signature }) { + const headers = GitCommit.justHeaders(payload); + const message = GitCommit.justMessage(payload); + const commit = normalizeNewlines( + headers + '\ngpgsig' + indent(signature) + '\n' + message + ); + return new GitCommit(commit) + } + + static from(commit) { + return new GitCommit(commit) + } + + toObject() { + return Buffer.from(this._commit, 'utf8') + } + + // Todo: allow setting the headers and message + headers() { + return this.parseHeaders() + } + + // Todo: allow setting the headers and message + message() { + return GitCommit.justMessage(this._commit) + } + + parse() { + return Object.assign({ message: this.message() }, this.headers()) + } + + static justMessage(commit) { + return normalizeNewlines(commit.slice(commit.indexOf('\n\n') + 2)) + } + + static justHeaders(commit) { + return commit.slice(0, commit.indexOf('\n\n')) + } + + parseHeaders() { + const headers = GitCommit.justHeaders(this._commit).split('\n'); + const hs = []; + for (const h of headers) { + if (h[0] === ' ') { + // combine with previous header (without space indent) + hs[hs.length - 1] += '\n' + h.slice(1); + } else { + hs.push(h); + } + } + const obj = { + parent: [], + }; + for (const h of hs) { + const key = h.slice(0, h.indexOf(' ')); + const value = h.slice(h.indexOf(' ') + 1); + if (Array.isArray(obj[key])) { + obj[key].push(value); + } else { + obj[key] = value; + } + } + if (obj.author) { + obj.author = parseAuthor(obj.author); + } + if (obj.committer) { + obj.committer = parseAuthor(obj.committer); + } + return obj + } + + static renderHeaders(obj) { + let headers = ''; + if (obj.tree) { + headers += `tree ${obj.tree}\n`; + } else { + headers += `tree 4b825dc642cb6eb9a060e54bf8d69288fbee4904\n`; // the null tree + } + if (obj.parent) { + if (obj.parent.length === undefined) { + throw new InternalError(`commit 'parent' property should be an array`) + } + for (const p of obj.parent) { + headers += `parent ${p}\n`; + } + } + const author = obj.author; + headers += `author ${formatAuthor(author)}\n`; + const committer = obj.committer || obj.author; + headers += `committer ${formatAuthor(committer)}\n`; + if (obj.gpgsig) { + headers += 'gpgsig' + indent(obj.gpgsig); + } + return headers + } + + static render(obj) { + return GitCommit.renderHeaders(obj) + '\n' + normalizeNewlines(obj.message) + } + + render() { + return this._commit + } + + withoutSignature() { + const commit = normalizeNewlines(this._commit); + if (commit.indexOf('\ngpgsig') === -1) return commit + const headers = commit.slice(0, commit.indexOf('\ngpgsig')); + const message = commit.slice( + commit.indexOf('-----END PGP SIGNATURE-----\n') + + '-----END PGP SIGNATURE-----\n'.length + ); + return normalizeNewlines(headers + '\n' + message) + } + + isolateSignature() { + const signature = this._commit.slice( + this._commit.indexOf('-----BEGIN PGP SIGNATURE-----'), + this._commit.indexOf('-----END PGP SIGNATURE-----') + + '-----END PGP SIGNATURE-----'.length + ); + return outdent(signature) + } + + static async sign(commit, sign, secretKey) { + const payload = commit.withoutSignature(); + const message = GitCommit.justMessage(commit._commit); + let { signature } = await sign({ payload, secretKey }); + // renormalize the line endings to the one true line-ending + signature = normalizeNewlines(signature); + const headers = GitCommit.justHeaders(commit._commit); + const signedCommit = + headers + '\n' + 'gpgsig' + indent(signature) + '\n' + message; + // return a new commit object + return GitCommit.from(signedCommit) + } +} + +async function resolveTree({ fs, cache, gitdir, oid }) { + // Empty tree - bypass `readObject` + if (oid === '4b825dc642cb6eb9a060e54bf8d69288fbee4904') { + return { tree: GitTree.from([]), oid } + } + const { type, object } = await _readObject({ fs, cache, gitdir, oid }); + // Resolve annotated tag objects to whatever + if (type === 'tag') { + oid = GitAnnotatedTag.from(object).parse().object; + return resolveTree({ fs, cache, gitdir, oid }) + } + // Resolve commits to trees + if (type === 'commit') { + oid = GitCommit.from(object).parse().tree; + return resolveTree({ fs, cache, gitdir, oid }) + } + if (type !== 'tree') { + throw new ObjectTypeError(oid, type, 'tree') + } + return { tree: GitTree.from(object), oid } +} + +class GitWalkerRepo { + constructor({ fs, gitdir, ref, cache }) { + this.fs = fs; + this.cache = cache; + this.gitdir = gitdir; + this.mapPromise = (async () => { + const map = new Map(); + let oid; + try { + oid = await GitRefManager.resolve({ fs, gitdir, ref }); + } catch (e) { + if (e instanceof NotFoundError) { + // Handle fresh branches with no commits + oid = '4b825dc642cb6eb9a060e54bf8d69288fbee4904'; + } + } + const tree = await resolveTree({ fs, cache: this.cache, gitdir, oid }); + tree.type = 'tree'; + tree.mode = '40000'; + map.set('.', tree); + return map + })(); + const walker = this; + this.ConstructEntry = class TreeEntry { + constructor(fullpath) { + this._fullpath = fullpath; + this._type = false; + this._mode = false; + this._stat = false; + this._content = false; + this._oid = false; + } + + async type() { + return walker.type(this) + } + + async mode() { + return walker.mode(this) + } + + async stat() { + return walker.stat(this) + } + + async content() { + return walker.content(this) + } + + async oid() { + return walker.oid(this) + } + }; + } + + async readdir(entry) { + const filepath = entry._fullpath; + const { fs, cache, gitdir } = this; + const map = await this.mapPromise; + const obj = map.get(filepath); + if (!obj) throw new Error(`No obj for ${filepath}`) + const oid = obj.oid; + if (!oid) throw new Error(`No oid for obj ${JSON.stringify(obj)}`) + if (obj.type !== 'tree') { + // TODO: support submodules (type === 'commit') + return null + } + const { type, object } = await _readObject({ fs, cache, gitdir, oid }); + if (type !== obj.type) { + throw new ObjectTypeError(oid, type, obj.type) + } + const tree = GitTree.from(object); + // cache all entries + for (const entry of tree) { + map.set(join(filepath, entry.path), entry); + } + return tree.entries().map(entry => join(filepath, entry.path)) + } + + async type(entry) { + if (entry._type === false) { + const map = await this.mapPromise; + const { type } = map.get(entry._fullpath); + entry._type = type; + } + return entry._type + } + + async mode(entry) { + if (entry._mode === false) { + const map = await this.mapPromise; + const { mode } = map.get(entry._fullpath); + entry._mode = normalizeMode(parseInt(mode, 8)); + } + return entry._mode + } + + async stat(_entry) {} + + async content(entry) { + if (entry._content === false) { + const map = await this.mapPromise; + const { fs, cache, gitdir } = this; + const obj = map.get(entry._fullpath); + const oid = obj.oid; + const { type, object } = await _readObject({ fs, cache, gitdir, oid }); + if (type !== 'blob') { + entry._content = undefined; + } else { + entry._content = new Uint8Array(object); + } + } + return entry._content + } + + async oid(entry) { + if (entry._oid === false) { + const map = await this.mapPromise; + const obj = map.get(entry._fullpath); + entry._oid = obj.oid; + } + return entry._oid + } +} + +// @ts-check + +/** + * @param {object} args + * @param {string} [args.ref='HEAD'] + * @returns {Walker} + */ +function TREE({ ref = 'HEAD' } = {}) { + const o = Object.create(null); + Object.defineProperty(o, GitWalkSymbol, { + value: function({ fs, gitdir, cache }) { + return new GitWalkerRepo({ fs, gitdir, ref, cache }) + }, + }); + Object.freeze(o); + return o +} + +// @ts-check + +class GitWalkerFs { + constructor({ fs, dir, gitdir, cache }) { + this.fs = fs; + this.cache = cache; + this.dir = dir; + this.gitdir = gitdir; + const walker = this; + this.ConstructEntry = class WorkdirEntry { + constructor(fullpath) { + this._fullpath = fullpath; + this._type = false; + this._mode = false; + this._stat = false; + this._content = false; + this._oid = false; + } + + async type() { + return walker.type(this) + } + + async mode() { + return walker.mode(this) + } + + async stat() { + return walker.stat(this) + } + + async content() { + return walker.content(this) + } + + async oid() { + return walker.oid(this) + } + }; + } + + async readdir(entry) { + const filepath = entry._fullpath; + const { fs, dir } = this; + const names = await fs.readdir(join(dir, filepath)); + if (names === null) return null + return names.map(name => join(filepath, name)) + } + + async type(entry) { + if (entry._type === false) { + await entry.stat(); + } + return entry._type + } + + async mode(entry) { + if (entry._mode === false) { + await entry.stat(); + } + return entry._mode + } + + async stat(entry) { + if (entry._stat === false) { + const { fs, dir } = this; + let stat = await fs.lstat(`${dir}/${entry._fullpath}`); + if (!stat) { + throw new Error( + `ENOENT: no such file or directory, lstat '${entry._fullpath}'` + ) + } + let type = stat.isDirectory() ? 'tree' : 'blob'; + if (type === 'blob' && !stat.isFile() && !stat.isSymbolicLink()) { + type = 'special'; + } + entry._type = type; + stat = normalizeStats(stat); + entry._mode = stat.mode; + // workaround for a BrowserFS edge case + if (stat.size === -1 && entry._actualSize) { + stat.size = entry._actualSize; + } + entry._stat = stat; + } + return entry._stat + } + + async content(entry) { + if (entry._content === false) { + const { fs, dir, gitdir } = this; + if ((await entry.type()) === 'tree') { + entry._content = undefined; + } else { + const config = await GitConfigManager.get({ fs, gitdir }); + const autocrlf = await config.get('core.autocrlf'); + const content = await fs.read(`${dir}/${entry._fullpath}`, { autocrlf }); + // workaround for a BrowserFS edge case + entry._actualSize = content.length; + if (entry._stat && entry._stat.size === -1) { + entry._stat.size = entry._actualSize; + } + entry._content = new Uint8Array(content); + } + } + return entry._content + } + + async oid(entry) { + if (entry._oid === false) { + const { fs, gitdir, cache } = this; + let oid; + // See if we can use the SHA1 hash in the index. + await GitIndexManager.acquire({ fs, gitdir, cache }, async function( + index + ) { + const stage = index.entriesMap.get(entry._fullpath); + const stats = await entry.stat(); + const config = await GitConfigManager.get({ fs, gitdir }); + const filemode = await config.get('core.filemode'); + const trustino = + typeof process !== 'undefined' + ? !(process.platform === 'win32') + : true; + if (!stage || compareStats(stats, stage, filemode, trustino)) { + const content = await entry.content(); + if (content === undefined) { + oid = undefined; + } else { + oid = await shasum( + GitObject.wrap({ type: 'blob', object: await entry.content() }) + ); + // Update the stats in the index so we will get a "cache hit" next time + // 1) if we can (because the oid and mode are the same) + // 2) and only if we need to (because other stats differ) + if ( + stage && + oid === stage.oid && + (!filemode || stats.mode === stage.mode) && + compareStats(stats, stage, filemode, trustino) + ) { + index.insert({ + filepath: entry._fullpath, + stats, + oid: oid, + }); + } + } + } else { + // Use the index SHA1 rather than compute it + oid = stage.oid; + } + }); + entry._oid = oid; + } + return entry._oid + } +} + +// @ts-check + +/** + * @returns {Walker} + */ +function WORKDIR() { + const o = Object.create(null); + Object.defineProperty(o, GitWalkSymbol, { + value: function({ fs, dir, gitdir, cache }) { + return new GitWalkerFs({ fs, dir, gitdir, cache }) + }, + }); + Object.freeze(o); + return o +} + +// @ts-check + +// https://dev.to/namirsab/comment/2050 +function arrayRange(start, end) { + const length = end - start; + return Array.from({ length }, (_, i) => start + i) +} + +// TODO: Should I just polyfill Array.flat? +const flat = + typeof Array.prototype.flat === 'undefined' + ? entries => entries.reduce((acc, x) => acc.concat(x), []) + : entries => entries.flat(); + +// This is convenient for computing unions/joins of sorted lists. +class RunningMinimum { + constructor() { + // Using a getter for 'value' would just bloat the code. + // You know better than to set it directly right? + this.value = null; + } + + consider(value) { + if (value === null || value === undefined) return + if (this.value === null) { + this.value = value; + } else if (value < this.value) { + this.value = value; + } + } + + reset() { + this.value = null; + } +} + +// Take an array of length N of +// iterators of length Q_n +// of strings +// and return an iterator of length max(Q_n) for all n +// of arrays of length N +// of string|null who all have the same string value +function* unionOfIterators(sets) { + /* NOTE: We can assume all arrays are sorted. + * Indexes are sorted because they are defined that way: + * + * > Index entries are sorted in ascending order on the name field, + * > interpreted as a string of unsigned bytes (i.e. memcmp() order, no + * > localization, no special casing of directory separator '/'). Entries + * > with the same name are sorted by their stage field. + * + * Trees should be sorted because they are created directly from indexes. + * They definitely should be sorted, or else they wouldn't have a unique SHA1. + * So that would be very naughty on the part of the tree-creator. + * + * Lastly, the working dir entries are sorted because I choose to sort them + * in my FileSystem.readdir() implementation. + */ + + // Init + const min = new RunningMinimum(); + let minimum; + const heads = []; + const numsets = sets.length; + for (let i = 0; i < numsets; i++) { + // Abuse the fact that iterators continue to return 'undefined' for value + // once they are done + heads[i] = sets[i].next().value; + if (heads[i] !== undefined) { + min.consider(heads[i]); + } + } + if (min.value === null) return + // Iterate + while (true) { + const result = []; + minimum = min.value; + min.reset(); + for (let i = 0; i < numsets; i++) { + if (heads[i] !== undefined && heads[i] === minimum) { + result[i] = heads[i]; + heads[i] = sets[i].next().value; + } else { + // A little hacky, but eh + result[i] = null; + } + if (heads[i] !== undefined) { + min.consider(heads[i]); + } + } + yield result; + if (min.value === null) return + } +} + +// @ts-check + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {object} args.cache + * @param {string} [args.dir] + * @param {string} [args.gitdir=join(dir,'.git')] + * @param {Walker[]} args.trees + * @param {WalkerMap} [args.map] + * @param {WalkerReduce} [args.reduce] + * @param {WalkerIterate} [args.iterate] + * + * @returns {Promise} The finished tree-walking result + * + * @see {WalkerMap} + * + */ +async function _walk({ + fs, + cache, + dir, + gitdir, + trees, + // @ts-ignore + map = async (_, entry) => entry, + // The default reducer is a flatmap that filters out undefineds. + reduce = async (parent, children) => { + const flatten = flat(children); + if (parent !== undefined) flatten.unshift(parent); + return flatten + }, + // The default iterate function walks all children concurrently + iterate = (walk, children) => Promise.all([...children].map(walk)), +}) { + const walkers = trees.map(proxy => + proxy[GitWalkSymbol]({ fs, dir, gitdir, cache }) + ); + + const root = new Array(walkers.length).fill('.'); + const range = arrayRange(0, walkers.length); + const unionWalkerFromReaddir = async entries => { + range.map(i => { + entries[i] = entries[i] && new walkers[i].ConstructEntry(entries[i]); + }); + const subdirs = await Promise.all( + range.map(i => (entries[i] ? walkers[i].readdir(entries[i]) : [])) + ); + // Now process child directories + const iterators = subdirs + .map(array => (array === null ? [] : array)) + .map(array => array[Symbol.iterator]()); + return { + entries, + children: unionOfIterators(iterators), + } + }; + + const walk = async root => { + const { entries, children } = await unionWalkerFromReaddir(root); + const fullpath = entries.find(entry => entry && entry._fullpath)._fullpath; + const parent = await map(fullpath, entries); + if (parent !== null) { + let walkedChildren = await iterate(walk, children); + walkedChildren = walkedChildren.filter(x => x !== undefined); + return reduce(parent, walkedChildren) + } + }; + return walk(root) +} + +/** + * Removes the directory at the specified filepath recursively. Used internally to replicate the behavior of + * fs.promises.rm({ recursive: true, force: true }) from Node.js 14 and above when not available. If the provided + * filepath resolves to a file, it will be removed. + * + * @param {import('../models/FileSystem.js').FileSystem} fs + * @param {string} filepath - The file or directory to remove. + */ +async function rmRecursive(fs, filepath) { + const entries = await fs.readdir(filepath); + if (entries == null) { + await fs.rm(filepath); + } else if (entries.length) { + await Promise.all( + entries.map(entry => { + const subpath = join(filepath, entry); + return fs.lstat(subpath).then(stat => { + if (!stat) return + return stat.isDirectory() ? rmRecursive(fs, subpath) : fs.rm(subpath) + }) + }) + ).then(() => fs.rmdir(filepath)); + } else { + await fs.rmdir(filepath); + } +} + +function isPromiseLike(obj) { + return isObject(obj) && isFunction(obj.then) && isFunction(obj.catch) +} + +function isObject(obj) { + return obj && typeof obj === 'object' +} + +function isFunction(obj) { + return typeof obj === 'function' +} + +function isPromiseFs(fs) { + const test = targetFs => { + try { + // If readFile returns a promise then we can probably assume the other + // commands do as well + return targetFs.readFile().catch(e => e) + } catch (e) { + return e + } + }; + return isPromiseLike(test(fs)) +} + +// List of commands all filesystems are expected to provide. `rm` is not +// included since it may not exist and must be handled as a special case +const commands = [ + 'readFile', + 'writeFile', + 'mkdir', + 'rmdir', + 'unlink', + 'stat', + 'lstat', + 'readdir', + 'readlink', + 'symlink', +]; + +function bindFs(target, fs) { + if (isPromiseFs(fs)) { + for (const command of commands) { + target[`_${command}`] = fs[command].bind(fs); + } + } else { + for (const command of commands) { + target[`_${command}`] = pify(fs[command].bind(fs)); + } + } + + // Handle the special case of `rm` + if (isPromiseFs(fs)) { + if (fs.rm) target._rm = fs.rm.bind(fs); + else if (fs.rmdir.length > 1) target._rm = fs.rmdir.bind(fs); + else target._rm = rmRecursive.bind(null, target); + } else { + if (fs.rm) target._rm = pify(fs.rm.bind(fs)); + else if (fs.rmdir.length > 2) target._rm = pify(fs.rmdir.bind(fs)); + else target._rm = rmRecursive.bind(null, target); + } +} + +/** + * This is just a collection of helper functions really. At least that's how it started. + */ +class FileSystem { + constructor(fs) { + if (typeof fs._original_unwrapped_fs !== 'undefined') return fs + + const promises = Object.getOwnPropertyDescriptor(fs, 'promises'); + if (promises && promises.enumerable) { + bindFs(this, fs.promises); + } else { + bindFs(this, fs); + } + this._original_unwrapped_fs = fs; + } + + /** + * Return true if a file exists, false if it doesn't exist. + * Rethrows errors that aren't related to file existence. + */ + async exists(filepath, options = {}) { + try { + await this._stat(filepath); + return true + } catch (err) { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + return false + } else { + console.log('Unhandled error in "FileSystem.exists()" function', err); + throw err + } + } + } + + /** + * Return the contents of a file if it exists, otherwise returns null. + * + * @param {string} filepath + * @param {object} [options] + * + * @returns {Promise} + */ + async read(filepath, options = {}) { + try { + let buffer = await this._readFile(filepath, options); + if (options.autocrlf === 'true') { + try { + buffer = new TextDecoder('utf8', { fatal: true }).decode(buffer); + buffer = buffer.replace(/\r\n/g, '\n'); + buffer = new TextEncoder().encode(buffer); + } catch (error) { + // non utf8 file + } + } + // Convert plain ArrayBuffers to Buffers + if (typeof buffer !== 'string') { + buffer = Buffer.from(buffer); + } + return buffer + } catch (err) { + return null + } + } + + /** + * Write a file (creating missing directories if need be) without throwing errors. + * + * @param {string} filepath + * @param {Buffer|Uint8Array|string} contents + * @param {object|string} [options] + */ + async write(filepath, contents, options = {}) { + try { + await this._writeFile(filepath, contents, options); + return + } catch (err) { + // Hmm. Let's try mkdirp and try again. + await this.mkdir(dirname(filepath)); + await this._writeFile(filepath, contents, options); + } + } + + /** + * Make a directory (or series of nested directories) without throwing an error if it already exists. + */ + async mkdir(filepath, _selfCall = false) { + try { + await this._mkdir(filepath); + return + } catch (err) { + // If err is null then operation succeeded! + if (err === null) return + // If the directory already exists, that's OK! + if (err.code === 'EEXIST') return + // Avoid infinite loops of failure + if (_selfCall) throw err + // If we got a "no such file or directory error" backup and try again. + if (err.code === 'ENOENT') { + const parent = dirname(filepath); + // Check to see if we've gone too far + if (parent === '.' || parent === '/' || parent === filepath) throw err + // Infinite recursion, what could go wrong? + await this.mkdir(parent); + await this.mkdir(filepath, true); + } + } + } + + /** + * Delete a file without throwing an error if it is already deleted. + */ + async rm(filepath) { + try { + await this._unlink(filepath); + } catch (err) { + if (err.code !== 'ENOENT') throw err + } + } + + /** + * Delete a directory without throwing an error if it is already deleted. + */ + async rmdir(filepath, opts) { + try { + if (opts && opts.recursive) { + await this._rm(filepath, opts); + } else { + await this._rmdir(filepath); + } + } catch (err) { + if (err.code !== 'ENOENT') throw err + } + } + + /** + * Read a directory without throwing an error is the directory doesn't exist + */ + async readdir(filepath) { + try { + const names = await this._readdir(filepath); + // Ordering is not guaranteed, and system specific (Windows vs Unix) + // so we must sort them ourselves. + names.sort(compareStrings); + return names + } catch (err) { + if (err.code === 'ENOTDIR') return null + return [] + } + } + + /** + * Return a flast list of all the files nested inside a directory + * + * Based on an elegant concurrent recursive solution from SO + * https://stackoverflow.com/a/45130990/2168416 + */ + async readdirDeep(dir) { + const subdirs = await this._readdir(dir); + const files = await Promise.all( + subdirs.map(async subdir => { + const res = dir + '/' + subdir; + return (await this._stat(res)).isDirectory() + ? this.readdirDeep(res) + : res + }) + ); + return files.reduce((a, f) => a.concat(f), []) + } + + /** + * Return the Stats of a file/symlink if it exists, otherwise returns null. + * Rethrows errors that aren't related to file existence. + */ + async lstat(filename) { + try { + const stats = await this._lstat(filename); + return stats + } catch (err) { + if (err.code === 'ENOENT') { + return null + } + throw err + } + } + + /** + * Reads the contents of a symlink if it exists, otherwise returns null. + * Rethrows errors that aren't related to file existence. + */ + async readlink(filename, opts = { encoding: 'buffer' }) { + // Note: FileSystem.readlink returns a buffer by default + // so we can dump it into GitObject.write just like any other file. + try { + const link = await this._readlink(filename, opts); + return Buffer.isBuffer(link) ? link : Buffer.from(link) + } catch (err) { + if (err.code === 'ENOENT') { + return null + } + throw err + } + } + + /** + * Write the contents of buffer to a symlink. + */ + async writelink(filename, buffer) { + return this._symlink(buffer.toString('utf8'), filename) + } +} + +function assertParameter(name, value) { + if (value === undefined) { + throw new MissingParameterError(name) + } +} + +// @ts-check +/** + * + * @param {WalkerEntry} entry + * @param {WalkerEntry} base + * + */ +async function modified(entry, base) { + if (!entry && !base) return false + if (entry && !base) return true + if (!entry && base) return true + if ((await entry.type()) === 'tree' && (await base.type()) === 'tree') { + return false + } + if ( + (await entry.type()) === (await base.type()) && + (await entry.mode()) === (await base.mode()) && + (await entry.oid()) === (await base.oid()) + ) { + return false + } + return true +} + +// @ts-check + +/** + * Abort a merge in progress. + * + * Based on the behavior of git reset --merge, i.e. "Resets the index and updates the files in the working tree that are different between and HEAD, but keeps those which are different between the index and working tree (i.e. which have changes which have not been added). If a file that is different between and the index has unstaged changes, reset is aborted." + * + * Essentially, abortMerge will reset any files affected by merge conflicts to their last known good version at HEAD. + * Any unstaged changes are saved and any staged changes are reset as well. + * + * NOTE: The behavior of this command differs slightly from canonical git in that an error will be thrown if a file exists in the index and nowhere else. + * Canonical git will reset the file and continue aborting the merge in this case. + * + * **WARNING:** Running git merge with non-trivial uncommitted changes is discouraged: while possible, it may leave you in a state that is hard to back out of in the case of a conflict. + * If there were uncommitted changes when the merge started (and especially if those changes were further modified after the merge was started), `git.abortMerge` will in some cases be unable to reconstruct the original (pre-merge) changes. + * + * @param {object} args + * @param {FsClient} args.fs - a file system implementation + * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} [args.commit='HEAD'] - commit to reset the index and worktree to, defaults to HEAD + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves successfully once the git index has been updated + * + */ +async function abortMerge({ + fs: _fs, + dir, + gitdir = join(dir, '.git'), + commit = 'HEAD', + cache = {}, +}) { + try { + assertParameter('fs', _fs); + assertParameter('dir', dir); + assertParameter('gitdir', gitdir); + + const fs = new FileSystem(_fs); + const trees = [TREE({ ref: commit }), WORKDIR(), STAGE()]; + let unmergedPaths = []; + + await GitIndexManager.acquire({ fs, gitdir, cache }, async function(index) { + unmergedPaths = index.unmergedPaths; + }); + + const results = await _walk({ + fs, + cache, + dir, + gitdir, + trees, + map: async function(path, [head, workdir, index]) { + const staged = !(await modified(workdir, index)); + const unmerged = unmergedPaths.includes(path); + const unmodified = !(await modified(index, head)); + + if (staged || unmerged) { + return head + ? { + path, + mode: await head.mode(), + oid: await head.oid(), + type: await head.type(), + content: await head.content(), + } + : undefined + } + + if (unmodified) return false + else throw new IndexResetError(path) + }, + }); + + await GitIndexManager.acquire({ fs, gitdir, cache }, async function(index) { + // Reset paths in index and worktree, this can't be done in _walk because the + // STAGE walker acquires its own index lock. + + for (const entry of results) { + if (entry === false) continue + + // entry is not false, so from here we can assume index = workdir + if (!entry) { + await fs.rmdir(`${dir}/${entry.path}`, { recursive: true }); + index.delete({ filepath: entry.path }); + continue + } + + if (entry.type === 'blob') { + const content = new TextDecoder().decode(entry.content); + await fs.write(`${dir}/${entry.path}`, content, { mode: entry.mode }); + index.insert({ + filepath: entry.path, + oid: entry.oid, + stage: 0, + }); + } + } + }); + } catch (err) { + err.caller = 'git.abortMerge'; + throw err + } +} + +// I'm putting this in a Manager because I reckon it could benefit +// from a LOT of caching. +class GitIgnoreManager { + static async isIgnored({ fs, dir, gitdir = join(dir, '.git'), filepath }) { + // ALWAYS ignore ".git" folders. + if (basename(filepath) === '.git') return true + // '.' is not a valid gitignore entry, so '.' is never ignored + if (filepath === '.') return false + // Check and load exclusion rules from project exclude file (.git/info/exclude) + let excludes = ''; + const excludesFile = join(gitdir, 'info', 'exclude'); + if (await fs.exists(excludesFile)) { + excludes = await fs.read(excludesFile, 'utf8'); + } + // Find all the .gitignore files that could affect this file + const pairs = [ + { + gitignore: join(dir, '.gitignore'), + filepath, + }, + ]; + const pieces = filepath.split('/').filter(Boolean); + for (let i = 1; i < pieces.length; i++) { + const folder = pieces.slice(0, i).join('/'); + const file = pieces.slice(i).join('/'); + pairs.push({ + gitignore: join(dir, folder, '.gitignore'), + filepath: file, + }); + } + let ignoredStatus = false; + for (const p of pairs) { + let file; + try { + file = await fs.read(p.gitignore, 'utf8'); + } catch (err) { + if (err.code === 'NOENT') continue + } + const ign = ignore().add(excludes); + ign.add(file); + // If the parent directory is excluded, we are done. + // "It is not possible to re-include a file if a parent directory of that file is excluded. Git doesn’t list excluded directories for performance reasons, so any patterns on contained files have no effect, no matter where they are defined." + // source: https://git-scm.com/docs/gitignore + const parentdir = dirname(p.filepath); + if (parentdir !== '.' && ign.ignores(parentdir)) return true + // If the file is currently ignored, test for UNignoring. + if (ignoredStatus) { + ignoredStatus = !ign.test(p.filepath).unignored; + } else { + ignoredStatus = ign.test(p.filepath).ignored; + } + } + return ignoredStatus + } +} + +async function writeObjectLoose({ fs, gitdir, object, format, oid }) { + if (format !== 'deflated') { + throw new InternalError( + 'GitObjectStoreLoose expects objects to write to be in deflated format' + ) + } + const source = `objects/${oid.slice(0, 2)}/${oid.slice(2)}`; + const filepath = `${gitdir}/${source}`; + // Don't overwrite existing git objects - this helps avoid EPERM errors. + // Although I don't know how we'd fix corrupted objects then. Perhaps delete them + // on read? + if (!(await fs.exists(filepath))) await fs.write(filepath, object); +} + +/* eslint-env node, browser */ + +let supportsCompressionStream = null; + +async function deflate(buffer) { + if (supportsCompressionStream === null) { + supportsCompressionStream = testCompressionStream(); + } + return supportsCompressionStream + ? browserDeflate(buffer) + : pako.deflate(buffer) +} + +async function browserDeflate(buffer) { + const cs = new CompressionStream('deflate'); + const c = new Blob([buffer]).stream().pipeThrough(cs); + return new Uint8Array(await new Response(c).arrayBuffer()) +} + +function testCompressionStream() { + try { + const cs = new CompressionStream('deflate'); + cs.writable.close(); + // Test if `Blob.stream` is present. React Native does not have the `stream` method + const stream = new Blob([]).stream(); + stream.cancel(); + return true + } catch (_) { + return false + } +} + +async function _writeObject({ + fs, + gitdir, + type, + object, + format = 'content', + oid = undefined, + dryRun = false, +}) { + if (format !== 'deflated') { + if (format !== 'wrapped') { + object = GitObject.wrap({ type, object }); + } + oid = await shasum(object); + object = Buffer.from(await deflate(object)); + } + if (!dryRun) { + await writeObjectLoose({ fs, gitdir, object, format: 'deflated', oid }); + } + return oid +} + +function posixifyPathBuffer(buffer) { + let idx; + while (~(idx = buffer.indexOf(92))) buffer[idx] = 47; + return buffer +} + +// @ts-check + +/** + * Add a file to the git index (aka staging area) + * + * @param {object} args + * @param {FsClient} args.fs - a file system implementation + * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string|string[]} args.filepath - The path to the file to add to the index + * @param {object} [args.cache] - a [cache](cache.md) object + * @param {boolean} [args.force=false] - add to index even if matches gitignore. Think `git add --force` + * @param {boolean} [args.parallel=false] - process each input file in parallel. Parallel processing will result in more memory consumption but less process time + * + * @returns {Promise} Resolves successfully once the git index has been updated + * + * @example + * await fs.promises.writeFile('/tutorial/README.md', `# TEST`) + * await git.add({ fs, dir: '/tutorial', filepath: 'README.md' }) + * console.log('done') + * + */ +async function add({ + fs: _fs, + dir, + gitdir = join(dir, '.git'), + filepath, + cache = {}, + force = false, + parallel = true, +}) { + try { + assertParameter('fs', _fs); + assertParameter('dir', dir); + assertParameter('gitdir', gitdir); + assertParameter('filepath', filepath); + + const fs = new FileSystem(_fs); + await GitIndexManager.acquire({ fs, gitdir, cache }, async index => { + return addToIndex({ + dir, + gitdir, + fs, + filepath, + index, + force, + parallel, + }) + }); + } catch (err) { + err.caller = 'git.add'; + throw err + } +} + +async function addToIndex({ + dir, + gitdir, + fs, + filepath, + index, + force, + parallel, +}) { + // TODO: Should ignore UNLESS it's already in the index. + filepath = Array.isArray(filepath) ? filepath : [filepath]; + const promises = filepath.map(async currentFilepath => { + if (!force) { + const ignored = await GitIgnoreManager.isIgnored({ + fs, + dir, + gitdir, + filepath: currentFilepath, + }); + if (ignored) return + } + const stats = await fs.lstat(join(dir, currentFilepath)); + if (!stats) throw new NotFoundError(currentFilepath) + + if (stats.isDirectory()) { + const children = await fs.readdir(join(dir, currentFilepath)); + if (parallel) { + const promises = children.map(child => + addToIndex({ + dir, + gitdir, + fs, + filepath: [join(currentFilepath, child)], + index, + force, + parallel, + }) + ); + await Promise.all(promises); + } else { + for (const child of children) { + await addToIndex({ + dir, + gitdir, + fs, + filepath: [join(currentFilepath, child)], + index, + force, + parallel, + }); + } + } + } else { + const config = await GitConfigManager.get({ fs, gitdir }); + const autocrlf = await config.get('core.autocrlf'); + const object = stats.isSymbolicLink() + ? await fs.readlink(join(dir, currentFilepath)).then(posixifyPathBuffer) + : await fs.read(join(dir, currentFilepath), { autocrlf }); + if (object === null) throw new NotFoundError(currentFilepath) + const oid = await _writeObject({ fs, gitdir, type: 'blob', object }); + index.insert({ filepath: currentFilepath, stats, oid }); + } + }); + + const settledPromises = await Promise.allSettled(promises); + const rejectedPromises = settledPromises + .filter(settle => settle.status === 'rejected') + .map(settle => settle.reason); + if (rejectedPromises.length > 1) { + throw new MultipleGitError(rejectedPromises) + } + if (rejectedPromises.length === 1) { + throw rejectedPromises[0] + } + + const fulfilledPromises = settledPromises + .filter(settle => settle.status === 'fulfilled' && settle.value) + .map(settle => settle.value); + + return fulfilledPromises +} + +// @ts-check + +/** + * @param {Object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {string} args.gitdir + * @param {string} args.path + * + * @returns {Promise} Resolves with the config value + * + * @example + * // Read config value + * let value = await git.getConfig({ + * dir: '$input((/))', + * path: '$input((user.name))' + * }) + * console.log(value) + * + */ +async function _getConfig({ fs, gitdir, path }) { + const config = await GitConfigManager.get({ fs, gitdir }); + return config.get(path) +} + +// Like Object.assign but ignore properties with undefined values +// ref: https://stackoverflow.com/q/39513815 +function assignDefined(target, ...sources) { + for (const source of sources) { + if (source) { + for (const key of Object.keys(source)) { + const val = source[key]; + if (val !== undefined) { + target[key] = val; + } + } + } + } + return target +} + +/** + * Return author object by using properties following this priority: + * (1) provided author object + * -> (2) author of provided commit object + * -> (3) Config and current date/time + * + * @param {Object} args + * @param {FsClient} args.fs - a file system implementation + * @param {string} [args.gitdir] - The [git directory](dir-vs-gitdir.md) path + * @param {Object} [args.author] - The author object. + * @param {CommitObject} [args.commit] - A commit object. + * + * @returns {Promise} + */ +async function normalizeAuthorObject({ fs, gitdir, author, commit }) { + const timestamp = Math.floor(Date.now() / 1000); + + const defaultAuthor = { + name: await _getConfig({ fs, gitdir, path: 'user.name' }), + email: (await _getConfig({ fs, gitdir, path: 'user.email' })) || '', // author.email is allowed to be empty string + timestamp, + timezoneOffset: new Date(timestamp * 1000).getTimezoneOffset(), + }; + + // Populate author object by using properties with this priority: + // (1) provided author object + // -> (2) author of provided commit object + // -> (3) default author + const normalizedAuthor = assignDefined( + {}, + defaultAuthor, + commit ? commit.author : undefined, + author + ); + + if (normalizedAuthor.name === undefined) { + return undefined + } + + return normalizedAuthor +} + +/** + * Return committer object by using properties with this priority: + * (1) provided committer object + * -> (2) provided author object + * -> (3) committer of provided commit object + * -> (4) Config and current date/time + * + * @param {Object} args + * @param {FsClient} args.fs - a file system implementation + * @param {string} [args.gitdir] - The [git directory](dir-vs-gitdir.md) path + * @param {Object} [args.author] - The author object. + * @param {Object} [args.committer] - The committer object. + * @param {CommitObject} [args.commit] - A commit object. + * + * @returns {Promise} + */ +async function normalizeCommitterObject({ + fs, + gitdir, + author, + committer, + commit, +}) { + const timestamp = Math.floor(Date.now() / 1000); + + const defaultCommitter = { + name: await _getConfig({ fs, gitdir, path: 'user.name' }), + email: (await _getConfig({ fs, gitdir, path: 'user.email' })) || '', // committer.email is allowed to be empty string + timestamp, + timezoneOffset: new Date(timestamp * 1000).getTimezoneOffset(), + }; + + const normalizedCommitter = assignDefined( + {}, + defaultCommitter, + commit ? commit.committer : undefined, + author, + committer + ); + + if (normalizedCommitter.name === undefined) { + return undefined + } + return normalizedCommitter +} + +async function resolveCommit({ fs, cache, gitdir, oid }) { + const { type, object } = await _readObject({ fs, cache, gitdir, oid }); + // Resolve annotated tag objects to whatever + if (type === 'tag') { + oid = GitAnnotatedTag.from(object).parse().object; + return resolveCommit({ fs, cache, gitdir, oid }) + } + if (type !== 'commit') { + throw new ObjectTypeError(oid, type, 'commit') + } + return { commit: GitCommit.from(object), oid } +} + +// @ts-check + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {any} args.cache + * @param {string} args.gitdir + * @param {string} args.oid + * + * @returns {Promise} Resolves successfully with a git commit object + * @see ReadCommitResult + * @see CommitObject + * + */ +async function _readCommit({ fs, cache, gitdir, oid }) { + const { commit, oid: commitOid } = await resolveCommit({ + fs, + cache, + gitdir, + oid, + }); + const result = { + oid: commitOid, + commit: commit.parse(), + payload: commit.withoutSignature(), + }; + // @ts-ignore + return result +} + +// @ts-check + +/** + * + * @param {Object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {object} args.cache + * @param {SignCallback} [args.onSign] + * @param {string} args.gitdir + * @param {string} [args.message] + * @param {Object} [args.author] + * @param {string} [args.author.name] + * @param {string} [args.author.email] + * @param {number} [args.author.timestamp] + * @param {number} [args.author.timezoneOffset] + * @param {Object} [args.committer] + * @param {string} [args.committer.name] + * @param {string} [args.committer.email] + * @param {number} [args.committer.timestamp] + * @param {number} [args.committer.timezoneOffset] + * @param {string} [args.signingKey] + * @param {boolean} [args.amend = false] + * @param {boolean} [args.dryRun = false] + * @param {boolean} [args.noUpdateBranch = false] + * @param {string} [args.ref] + * @param {string[]} [args.parent] + * @param {string} [args.tree] + * + * @returns {Promise} Resolves successfully with the SHA-1 object id of the newly created commit. + */ +async function _commit({ + fs, + cache, + onSign, + gitdir, + message, + author: _author, + committer: _committer, + signingKey, + amend = false, + dryRun = false, + noUpdateBranch = false, + ref, + parent, + tree, +}) { + // Determine ref and the commit pointed to by ref, and if it is the initial commit + let initialCommit = false; + if (!ref) { + ref = await GitRefManager.resolve({ + fs, + gitdir, + ref: 'HEAD', + depth: 2, + }); + } + + let refOid, refCommit; + try { + refOid = await GitRefManager.resolve({ + fs, + gitdir, + ref, + }); + refCommit = await _readCommit({ fs, gitdir, oid: refOid, cache: {} }); + } catch { + // We assume that there's no commit and this is the initial commit + initialCommit = true; + } + + if (amend && initialCommit) { + throw new NoCommitError(ref) + } + + // Determine author and committer information + const author = !amend + ? await normalizeAuthorObject({ fs, gitdir, author: _author }) + : await normalizeAuthorObject({ + fs, + gitdir, + author: _author, + commit: refCommit.commit, + }); + if (!author) throw new MissingNameError('author') + + const committer = !amend + ? await normalizeCommitterObject({ + fs, + gitdir, + author, + committer: _committer, + }) + : await normalizeCommitterObject({ + fs, + gitdir, + author, + committer: _committer, + commit: refCommit.commit, + }); + if (!committer) throw new MissingNameError('committer') + + return GitIndexManager.acquire( + { fs, gitdir, cache, allowUnmerged: false }, + async function(index) { + const inodes = flatFileListToDirectoryStructure(index.entries); + const inode = inodes.get('.'); + if (!tree) { + tree = await constructTree({ fs, gitdir, inode, dryRun }); + } + + // Determine parents of this commit + if (!parent) { + if (!amend) { + parent = refOid ? [refOid] : []; + } else { + parent = refCommit.commit.parent; + } + } else { + // ensure that the parents are oids, not refs + parent = await Promise.all( + parent.map(p => { + return GitRefManager.resolve({ fs, gitdir, ref: p }) + }) + ); + } + + // Determine message of this commit + if (!message) { + if (!amend) { + throw new MissingParameterError('message') + } else { + message = refCommit.commit.message; + } + } + + // Create and write new Commit object + let comm = GitCommit.from({ + tree, + parent, + author, + committer, + message, + }); + if (signingKey) { + comm = await GitCommit.sign(comm, onSign, signingKey); + } + const oid = await _writeObject({ + fs, + gitdir, + type: 'commit', + object: comm.toObject(), + dryRun, + }); + if (!noUpdateBranch && !dryRun) { + // Update branch pointer + await GitRefManager.writeRef({ + fs, + gitdir, + ref, + value: oid, + }); + } + return oid + } + ) +} + +async function constructTree({ fs, gitdir, inode, dryRun }) { + // use depth first traversal + const children = inode.children; + for (const inode of children) { + if (inode.type === 'tree') { + inode.metadata.mode = '040000'; + inode.metadata.oid = await constructTree({ fs, gitdir, inode, dryRun }); + } + } + const entries = children.map(inode => ({ + mode: inode.metadata.mode, + path: inode.basename, + oid: inode.metadata.oid, + type: inode.type, + })); + const tree = GitTree.from(entries); + const oid = await _writeObject({ + fs, + gitdir, + type: 'tree', + object: tree.toObject(), + dryRun, + }); + return oid +} + +// @ts-check + +async function resolveFilepath({ fs, cache, gitdir, oid, filepath }) { + // Ensure there are no leading or trailing directory separators. + // I was going to do this automatically, but then found that the Git Terminal for Windows + // auto-expands --filepath=/src/utils to --filepath=C:/Users/Will/AppData/Local/Programs/Git/src/utils + // so I figured it would be wise to promote the behavior in the application layer not just the library layer. + if (filepath.startsWith('/')) { + throw new InvalidFilepathError('leading-slash') + } else if (filepath.endsWith('/')) { + throw new InvalidFilepathError('trailing-slash') + } + const _oid = oid; + const result = await resolveTree({ fs, cache, gitdir, oid }); + const tree = result.tree; + if (filepath === '') { + oid = result.oid; + } else { + const pathArray = filepath.split('/'); + oid = await _resolveFilepath({ + fs, + cache, + gitdir, + tree, + pathArray, + oid: _oid, + filepath, + }); + } + return oid +} + +async function _resolveFilepath({ + fs, + cache, + gitdir, + tree, + pathArray, + oid, + filepath, +}) { + const name = pathArray.shift(); + for (const entry of tree) { + if (entry.path === name) { + if (pathArray.length === 0) { + return entry.oid + } else { + const { type, object } = await _readObject({ + fs, + cache, + gitdir, + oid: entry.oid, + }); + if (type !== 'tree') { + throw new ObjectTypeError(oid, type, 'tree', filepath) + } + tree = GitTree.from(object); + return _resolveFilepath({ + fs, + cache, + gitdir, + tree, + pathArray, + oid, + filepath, + }) + } + } + } + throw new NotFoundError(`file or directory found at "${oid}:${filepath}"`) +} + +// @ts-check + +/** + * + * @typedef {Object} ReadTreeResult - The object returned has the following schema: + * @property {string} oid - SHA-1 object id of this tree + * @property {TreeObject} tree - the parsed tree object + */ + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {any} args.cache + * @param {string} args.gitdir + * @param {string} args.oid + * @param {string} [args.filepath] + * + * @returns {Promise} + */ +async function _readTree({ + fs, + cache, + gitdir, + oid, + filepath = undefined, +}) { + if (filepath !== undefined) { + oid = await resolveFilepath({ fs, cache, gitdir, oid, filepath }); + } + const { tree, oid: treeOid } = await resolveTree({ fs, cache, gitdir, oid }); + const result = { + oid: treeOid, + tree: tree.entries(), + }; + return result +} + +// @ts-check + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {string} args.gitdir + * @param {TreeObject} args.tree + * + * @returns {Promise} + */ +async function _writeTree({ fs, gitdir, tree }) { + // Convert object to buffer + const object = GitTree.from(tree).toObject(); + const oid = await _writeObject({ + fs, + gitdir, + type: 'tree', + object, + format: 'content', + }); + return oid +} + +// @ts-check + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {object} args.cache + * @param {SignCallback} [args.onSign] + * @param {string} args.gitdir + * @param {string} args.ref + * @param {string} args.oid + * @param {string|Uint8Array} args.note + * @param {boolean} [args.force] + * @param {Object} args.author + * @param {string} args.author.name + * @param {string} args.author.email + * @param {number} args.author.timestamp + * @param {number} args.author.timezoneOffset + * @param {Object} args.committer + * @param {string} args.committer.name + * @param {string} args.committer.email + * @param {number} args.committer.timestamp + * @param {number} args.committer.timezoneOffset + * @param {string} [args.signingKey] + * + * @returns {Promise} + */ + +async function _addNote({ + fs, + cache, + onSign, + gitdir, + ref, + oid, + note, + force, + author, + committer, + signingKey, +}) { + // Get the current note commit + let parent; + try { + parent = await GitRefManager.resolve({ gitdir, fs, ref }); + } catch (err) { + if (!(err instanceof NotFoundError)) { + throw err + } + } + + // I'm using the "empty tree" magic number here for brevity + const result = await _readTree({ + fs, + cache, + gitdir, + oid: parent || '4b825dc642cb6eb9a060e54bf8d69288fbee4904', + }); + let tree = result.tree; + + // Handle the case where a note already exists + if (force) { + tree = tree.filter(entry => entry.path !== oid); + } else { + for (const entry of tree) { + if (entry.path === oid) { + throw new AlreadyExistsError('note', oid) + } + } + } + + // Create the note blob + if (typeof note === 'string') { + note = Buffer.from(note, 'utf8'); + } + const noteOid = await _writeObject({ + fs, + gitdir, + type: 'blob', + object: note, + format: 'content', + }); + + // Create the new note tree + tree.push({ mode: '100644', path: oid, oid: noteOid, type: 'blob' }); + const treeOid = await _writeTree({ + fs, + gitdir, + tree, + }); + + // Create the new note commit + const commitOid = await _commit({ + fs, + cache, + onSign, + gitdir, + ref, + tree: treeOid, + parent: parent && [parent], + message: `Note added by 'isomorphic-git addNote'\n`, + author, + committer, + signingKey, + }); + + return commitOid +} + +// @ts-check + +/** + * Add or update an object note + * + * @param {object} args + * @param {FsClient} args.fs - a file system implementation + * @param {SignCallback} [args.onSign] - a PGP signing implementation + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} [args.ref] - The notes ref to look under + * @param {string} args.oid - The SHA-1 object id of the object to add the note to. + * @param {string|Uint8Array} args.note - The note to add + * @param {boolean} [args.force] - Over-write note if it already exists. + * @param {Object} [args.author] - The details about the author. + * @param {string} [args.author.name] - Default is `user.name` config. + * @param {string} [args.author.email] - Default is `user.email` config. + * @param {number} [args.author.timestamp=Math.floor(Date.now()/1000)] - Set the author timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00). + * @param {number} [args.author.timezoneOffset] - Set the author timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`. + * @param {Object} [args.committer = author] - The details about the note committer, in the same format as the author parameter. If not specified, the author details are used. + * @param {string} [args.committer.name] - Default is `user.name` config. + * @param {string} [args.committer.email] - Default is `user.email` config. + * @param {number} [args.committer.timestamp=Math.floor(Date.now()/1000)] - Set the committer timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00). + * @param {number} [args.committer.timezoneOffset] - Set the committer timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`. + * @param {string} [args.signingKey] - Sign the note commit using this private PGP key. + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves successfully with the SHA-1 object id of the commit object for the added note. + */ + +async function addNote({ + fs: _fs, + onSign, + dir, + gitdir = join(dir, '.git'), + ref = 'refs/notes/commits', + oid, + note, + force, + author: _author, + committer: _committer, + signingKey, + cache = {}, +}) { + try { + assertParameter('fs', _fs); + assertParameter('gitdir', gitdir); + assertParameter('oid', oid); + assertParameter('note', note); + if (signingKey) { + assertParameter('onSign', onSign); + } + const fs = new FileSystem(_fs); + + const author = await normalizeAuthorObject({ fs, gitdir, author: _author }); + if (!author) throw new MissingNameError('author') + + const committer = await normalizeCommitterObject({ + fs, + gitdir, + author, + committer: _committer, + }); + if (!committer) throw new MissingNameError('committer') + + return await _addNote({ + fs: new FileSystem(fs), + cache, + onSign, + gitdir, + ref, + oid, + note, + force, + author, + committer, + signingKey, + }) + } catch (err) { + err.caller = 'git.addNote'; + throw err + } +} + +// @ts-check + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {string} args.gitdir + * @param {string} args.remote + * @param {string} args.url + * @param {boolean} args.force + * + * @returns {Promise} + * + */ +async function _addRemote({ fs, gitdir, remote, url, force }) { + if (remote !== cleanGitRef.clean(remote)) { + throw new InvalidRefNameError(remote, cleanGitRef.clean(remote)) + } + const config = await GitConfigManager.get({ fs, gitdir }); + if (!force) { + // Check that setting it wouldn't overwrite. + const remoteNames = await config.getSubsections('remote'); + if (remoteNames.includes(remote)) { + // Throw an error if it would overwrite an existing remote, + // but not if it's simply setting the same value again. + if (url !== (await config.get(`remote.${remote}.url`))) { + throw new AlreadyExistsError('remote', remote) + } + } + } + await config.set(`remote.${remote}.url`, url); + await config.set( + `remote.${remote}.fetch`, + `+refs/heads/*:refs/remotes/${remote}/*` + ); + await GitConfigManager.save({ fs, gitdir, config }); +} + +// @ts-check + +/** + * Add or update a remote + * + * @param {object} args + * @param {FsClient} args.fs - a file system implementation + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.remote - The name of the remote + * @param {string} args.url - The URL of the remote + * @param {boolean} [args.force = false] - Instead of throwing an error if a remote named `remote` already exists, overwrite the existing remote. + * + * @returns {Promise} Resolves successfully when filesystem operations are complete + * + * @example + * await git.addRemote({ + * fs, + * dir: '/tutorial', + * remote: 'upstream', + * url: 'https://github.com/isomorphic-git/isomorphic-git' + * }) + * console.log('done') + * + */ +async function addRemote({ + fs, + dir, + gitdir = join(dir, '.git'), + remote, + url, + force = false, +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('remote', remote); + assertParameter('url', url); + return await _addRemote({ + fs: new FileSystem(fs), + gitdir, + remote, + url, + force, + }) + } catch (err) { + err.caller = 'git.addRemote'; + throw err + } +} + +// @ts-check + +/** + * Create an annotated tag. + * + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {any} args.cache + * @param {SignCallback} [args.onSign] + * @param {string} args.gitdir + * @param {string} args.ref + * @param {string} [args.message = ref] + * @param {string} [args.object = 'HEAD'] + * @param {object} [args.tagger] + * @param {string} args.tagger.name + * @param {string} args.tagger.email + * @param {number} args.tagger.timestamp + * @param {number} args.tagger.timezoneOffset + * @param {string} [args.gpgsig] + * @param {string} [args.signingKey] + * @param {boolean} [args.force = false] + * + * @returns {Promise} Resolves successfully when filesystem operations are complete + * + * @example + * await git.annotatedTag({ + * dir: '$input((/))', + * ref: '$input((test-tag))', + * message: '$input((This commit is awesome))', + * tagger: { + * name: '$input((Mr. Test))', + * email: '$input((mrtest@example.com))' + * } + * }) + * console.log('done') + * + */ +async function _annotatedTag({ + fs, + cache, + onSign, + gitdir, + ref, + tagger, + message = ref, + gpgsig, + object, + signingKey, + force = false, +}) { + ref = ref.startsWith('refs/tags/') ? ref : `refs/tags/${ref}`; + + if (!force && (await GitRefManager.exists({ fs, gitdir, ref }))) { + throw new AlreadyExistsError('tag', ref) + } + + // Resolve passed value + const oid = await GitRefManager.resolve({ + fs, + gitdir, + ref: object || 'HEAD', + }); + + const { type } = await _readObject({ fs, cache, gitdir, oid }); + let tagObject = GitAnnotatedTag.from({ + object: oid, + type, + tag: ref.replace('refs/tags/', ''), + tagger, + message, + gpgsig, + }); + if (signingKey) { + tagObject = await GitAnnotatedTag.sign(tagObject, onSign, signingKey); + } + const value = await _writeObject({ + fs, + gitdir, + type: 'tag', + object: tagObject.toObject(), + }); + + await GitRefManager.writeRef({ fs, gitdir, ref, value }); +} + +// @ts-check + +/** + * Create an annotated tag. + * + * @param {object} args + * @param {FsClient} args.fs - a file system implementation + * @param {SignCallback} [args.onSign] - a PGP signing implementation + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.ref - What to name the tag + * @param {string} [args.message = ref] - The tag message to use. + * @param {string} [args.object = 'HEAD'] - The SHA-1 object id the tag points to. (Will resolve to a SHA-1 object id if value is a ref.) By default, the commit object which is referred by the current `HEAD` is used. + * @param {object} [args.tagger] - The details about the tagger. + * @param {string} [args.tagger.name] - Default is `user.name` config. + * @param {string} [args.tagger.email] - Default is `user.email` config. + * @param {number} [args.tagger.timestamp=Math.floor(Date.now()/1000)] - Set the tagger timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00). + * @param {number} [args.tagger.timezoneOffset] - Set the tagger timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`. + * @param {string} [args.gpgsig] - The gpgsig attached to the tag object. (Mutually exclusive with the `signingKey` option.) + * @param {string} [args.signingKey] - Sign the tag object using this private PGP key. (Mutually exclusive with the `gpgsig` option.) + * @param {boolean} [args.force = false] - Instead of throwing an error if a tag named `ref` already exists, overwrite the existing tag. Note that this option does not modify the original tag object itself. + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves successfully when filesystem operations are complete + * + * @example + * await git.annotatedTag({ + * fs, + * dir: '/tutorial', + * ref: 'test-tag', + * message: 'This commit is awesome', + * tagger: { + * name: 'Mr. Test', + * email: 'mrtest@example.com' + * } + * }) + * console.log('done') + * + */ +async function annotatedTag({ + fs: _fs, + onSign, + dir, + gitdir = join(dir, '.git'), + ref, + tagger: _tagger, + message = ref, + gpgsig, + object, + signingKey, + force = false, + cache = {}, +}) { + try { + assertParameter('fs', _fs); + assertParameter('gitdir', gitdir); + assertParameter('ref', ref); + if (signingKey) { + assertParameter('onSign', onSign); + } + const fs = new FileSystem(_fs); + + // Fill in missing arguments with default values + const tagger = await normalizeAuthorObject({ fs, gitdir, author: _tagger }); + if (!tagger) throw new MissingNameError('tagger') + + return await _annotatedTag({ + fs, + cache, + onSign, + gitdir, + ref, + tagger, + message, + gpgsig, + object, + signingKey, + force, + }) + } catch (err) { + err.caller = 'git.annotatedTag'; + throw err + } +} + +// @ts-check + +/** + * Create a branch + * + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {string} args.gitdir + * @param {string} args.ref + * @param {string} [args.object = 'HEAD'] + * @param {boolean} [args.checkout = false] + * @param {boolean} [args.force = false] + * + * @returns {Promise} Resolves successfully when filesystem operations are complete + * + * @example + * await git.branch({ dir: '$input((/))', ref: '$input((develop))' }) + * console.log('done') + * + */ +async function _branch({ + fs, + gitdir, + ref, + object, + checkout = false, + force = false, +}) { + if (ref !== cleanGitRef.clean(ref)) { + throw new InvalidRefNameError(ref, cleanGitRef.clean(ref)) + } + + const fullref = `refs/heads/${ref}`; + + if (!force) { + const exist = await GitRefManager.exists({ fs, gitdir, ref: fullref }); + if (exist) { + throw new AlreadyExistsError('branch', ref, false) + } + } + + // Get current HEAD tree oid + let oid; + try { + oid = await GitRefManager.resolve({ fs, gitdir, ref: object || 'HEAD' }); + } catch (e) { + // Probably an empty repo + } + + // Create a new ref that points at the current commit + if (oid) { + await GitRefManager.writeRef({ fs, gitdir, ref: fullref, value: oid }); + } + + if (checkout) { + // Update HEAD + await GitRefManager.writeSymbolicRef({ + fs, + gitdir, + ref: 'HEAD', + value: fullref, + }); + } +} + +// @ts-check + +/** + * Create a branch + * + * @param {object} args + * @param {FsClient} args.fs - a file system implementation + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.ref - What to name the branch + * @param {string} [args.object = 'HEAD'] - What oid to use as the start point. Accepts a symbolic ref. + * @param {boolean} [args.checkout = false] - Update `HEAD` to point at the newly created branch + * @param {boolean} [args.force = false] - Instead of throwing an error if a branched named `ref` already exists, overwrite the existing branch. + * + * @returns {Promise} Resolves successfully when filesystem operations are complete + * + * @example + * await git.branch({ fs, dir: '/tutorial', ref: 'develop' }) + * console.log('done') + * + */ +async function branch({ + fs, + dir, + gitdir = join(dir, '.git'), + ref, + object, + checkout = false, + force = false, +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('ref', ref); + return await _branch({ + fs: new FileSystem(fs), + gitdir, + ref, + object, + checkout, + force, + }) + } catch (err) { + err.caller = 'git.branch'; + throw err + } +} + +const worthWalking = (filepath, root) => { + if (filepath === '.' || root == null || root.length === 0 || root === '.') { + return true + } + if (root.length >= filepath.length) { + return root.startsWith(filepath) + } else { + return filepath.startsWith(root) + } +}; + +// @ts-check + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {any} args.cache + * @param {ProgressCallback} [args.onProgress] + * @param {PostCheckoutCallback} [args.onPostCheckout] + * @param {string} args.dir + * @param {string} args.gitdir + * @param {string} args.ref + * @param {string[]} [args.filepaths] + * @param {string} args.remote + * @param {boolean} args.noCheckout + * @param {boolean} [args.noUpdateHead] + * @param {boolean} [args.dryRun] + * @param {boolean} [args.force] + * @param {boolean} [args.track] + * + * @returns {Promise} Resolves successfully when filesystem operations are complete + * + */ +async function _checkout({ + fs, + cache, + onProgress, + onPostCheckout, + dir, + gitdir, + remote, + ref, + filepaths, + noCheckout, + noUpdateHead, + dryRun, + force, + track = true, +}) { + // oldOid is defined only if onPostCheckout hook is attached + let oldOid; + if (onPostCheckout) { + try { + oldOid = await GitRefManager.resolve({ fs, gitdir, ref: 'HEAD' }); + } catch (err) { + oldOid = '0000000000000000000000000000000000000000'; + } + } + + // Get tree oid + let oid; + try { + oid = await GitRefManager.resolve({ fs, gitdir, ref }); + // TODO: Figure out what to do if both 'ref' and 'remote' are specified, ref already exists, + // and is configured to track a different remote. + } catch (err) { + if (ref === 'HEAD') throw err + // If `ref` doesn't exist, create a new remote tracking branch + // Figure out the commit to checkout + const remoteRef = `${remote}/${ref}`; + oid = await GitRefManager.resolve({ + fs, + gitdir, + ref: remoteRef, + }); + if (track) { + // Set up remote tracking branch + const config = await GitConfigManager.get({ fs, gitdir }); + await config.set(`branch.${ref}.remote`, remote); + await config.set(`branch.${ref}.merge`, `refs/heads/${ref}`); + await GitConfigManager.save({ fs, gitdir, config }); + } + // Create a new branch that points at that same commit + await GitRefManager.writeRef({ + fs, + gitdir, + ref: `refs/heads/${ref}`, + value: oid, + }); + } + + // Update working dir + if (!noCheckout) { + let ops; + // First pass - just analyze files (not directories) and figure out what needs to be done + try { + ops = await analyze({ + fs, + cache, + onProgress, + dir, + gitdir, + ref, + force, + filepaths, + }); + } catch (err) { + // Throw a more helpful error message for this common mistake. + if (err instanceof NotFoundError && err.data.what === oid) { + throw new CommitNotFetchedError(ref, oid) + } else { + throw err + } + } + + // Report conflicts + const conflicts = ops + .filter(([method]) => method === 'conflict') + .map(([method, fullpath]) => fullpath); + if (conflicts.length > 0) { + throw new CheckoutConflictError(conflicts) + } + + // Collect errors + const errors = ops + .filter(([method]) => method === 'error') + .map(([method, fullpath]) => fullpath); + if (errors.length > 0) { + throw new InternalError(errors.join(', ')) + } + + if (dryRun) { + // Since the format of 'ops' is in flux, I really would rather folk besides myself not start relying on it + // return ops + + if (onPostCheckout) { + await onPostCheckout({ + previousHead: oldOid, + newHead: oid, + type: filepaths != null && filepaths.length > 0 ? 'file' : 'branch', + }); + } + return + } + + // Second pass - execute planned changes + // The cheapest semi-parallel solution without computing a full dependency graph will be + // to just do ops in 4 dumb phases: delete files, delete dirs, create dirs, write files + + let count = 0; + const total = ops.length; + await GitIndexManager.acquire({ fs, gitdir, cache }, async function(index) { + await Promise.all( + ops + .filter( + ([method]) => method === 'delete' || method === 'delete-index' + ) + .map(async function([method, fullpath]) { + const filepath = `${dir}/${fullpath}`; + if (method === 'delete') { + await fs.rm(filepath); + } + index.delete({ filepath: fullpath }); + if (onProgress) { + await onProgress({ + phase: 'Updating workdir', + loaded: ++count, + total, + }); + } + }) + ); + }); + + // Note: this is cannot be done naively in parallel + await GitIndexManager.acquire({ fs, gitdir, cache }, async function(index) { + for (const [method, fullpath] of ops) { + if (method === 'rmdir' || method === 'rmdir-index') { + const filepath = `${dir}/${fullpath}`; + try { + if (method === 'rmdir-index') { + index.delete({ filepath: fullpath }); + } + await fs.rmdir(filepath); + if (onProgress) { + await onProgress({ + phase: 'Updating workdir', + loaded: ++count, + total, + }); + } + } catch (e) { + if (e.code === 'ENOTEMPTY') { + console.log( + `Did not delete ${fullpath} because directory is not empty` + ); + } else { + throw e + } + } + } + } + }); + + await Promise.all( + ops + .filter(([method]) => method === 'mkdir' || method === 'mkdir-index') + .map(async function([_, fullpath]) { + const filepath = `${dir}/${fullpath}`; + await fs.mkdir(filepath); + if (onProgress) { + await onProgress({ + phase: 'Updating workdir', + loaded: ++count, + total, + }); + } + }) + ); + + await GitIndexManager.acquire({ fs, gitdir, cache }, async function(index) { + await Promise.all( + ops + .filter( + ([method]) => + method === 'create' || + method === 'create-index' || + method === 'update' || + method === 'mkdir-index' + ) + .map(async function([method, fullpath, oid, mode, chmod]) { + const filepath = `${dir}/${fullpath}`; + try { + if (method !== 'create-index' && method !== 'mkdir-index') { + const { object } = await _readObject({ fs, cache, gitdir, oid }); + if (chmod) { + // Note: the mode option of fs.write only works when creating files, + // not updating them. Since the `fs` plugin doesn't expose `chmod` this + // is our only option. + await fs.rm(filepath); + } + if (mode === 0o100644) { + // regular file + await fs.write(filepath, object); + } else if (mode === 0o100755) { + // executable file + await fs.write(filepath, object, { mode: 0o777 }); + } else if (mode === 0o120000) { + // symlink + await fs.writelink(filepath, object); + } else { + throw new InternalError( + `Invalid mode 0o${mode.toString(8)} detected in blob ${oid}` + ) + } + } + + const stats = await fs.lstat(filepath); + // We can't trust the executable bit returned by lstat on Windows, + // so we need to preserve this value from the TREE. + // TODO: Figure out how git handles this internally. + if (mode === 0o100755) { + stats.mode = 0o755; + } + // Submodules are present in the git index but use a unique mode different from trees + if (method === 'mkdir-index') { + stats.mode = 0o160000; + } + index.insert({ + filepath: fullpath, + stats, + oid, + }); + if (onProgress) { + await onProgress({ + phase: 'Updating workdir', + loaded: ++count, + total, + }); + } + } catch (e) { + console.log(e); + } + }) + ); + }); + + if (onPostCheckout) { + await onPostCheckout({ + previousHead: oldOid, + newHead: oid, + type: filepaths != null && filepaths.length > 0 ? 'file' : 'branch', + }); + } + } + + // Update HEAD + if (!noUpdateHead) { + const fullRef = await GitRefManager.expand({ fs, gitdir, ref }); + if (fullRef.startsWith('refs/heads')) { + await GitRefManager.writeSymbolicRef({ + fs, + gitdir, + ref: 'HEAD', + value: fullRef, + }); + } else { + // detached head + await GitRefManager.writeRef({ fs, gitdir, ref: 'HEAD', value: oid }); + } + } +} + +async function analyze({ + fs, + cache, + onProgress, + dir, + gitdir, + ref, + force, + filepaths, +}) { + let count = 0; + return _walk({ + fs, + cache, + dir, + gitdir, + trees: [TREE({ ref }), WORKDIR(), STAGE()], + map: async function(fullpath, [commit, workdir, stage]) { + if (fullpath === '.') return + // match against base paths + if (filepaths && !filepaths.some(base => worthWalking(fullpath, base))) { + return null + } + // Emit progress event + if (onProgress) { + await onProgress({ phase: 'Analyzing workdir', loaded: ++count }); + } + + // This is a kind of silly pattern but it worked so well for me in the past + // and it makes intuitively demonstrating exhaustiveness so *easy*. + // This checks for the presence and/or absence of each of the 3 entries, + // converts that to a 3-bit binary representation, and then handles + // every possible combination (2^3 or 8 cases) with a lookup table. + const key = [!!stage, !!commit, !!workdir].map(Number).join(''); + switch (key) { + // Impossible case. + case '000': + return + // Ignore workdir files that are not tracked and not part of the new commit. + case '001': + // OK, make an exception for explicitly named files. + if (force && filepaths && filepaths.includes(fullpath)) { + return ['delete', fullpath] + } + return + // New entries + case '010': { + switch (await commit.type()) { + case 'tree': { + return ['mkdir', fullpath] + } + case 'blob': { + return [ + 'create', + fullpath, + await commit.oid(), + await commit.mode(), + ] + } + case 'commit': { + return [ + 'mkdir-index', + fullpath, + await commit.oid(), + await commit.mode(), + ] + } + default: { + return [ + 'error', + `new entry Unhandled type ${await commit.type()}`, + ] + } + } + } + // New entries but there is already something in the workdir there. + case '011': { + switch (`${await commit.type()}-${await workdir.type()}`) { + case 'tree-tree': { + return // noop + } + case 'tree-blob': + case 'blob-tree': { + return ['conflict', fullpath] + } + case 'blob-blob': { + // Is the incoming file different? + if ((await commit.oid()) !== (await workdir.oid())) { + if (force) { + return [ + 'update', + fullpath, + await commit.oid(), + await commit.mode(), + (await commit.mode()) !== (await workdir.mode()), + ] + } else { + return ['conflict', fullpath] + } + } else { + // Is the incoming file a different mode? + if ((await commit.mode()) !== (await workdir.mode())) { + if (force) { + return [ + 'update', + fullpath, + await commit.oid(), + await commit.mode(), + true, + ] + } else { + return ['conflict', fullpath] + } + } else { + return [ + 'create-index', + fullpath, + await commit.oid(), + await commit.mode(), + ] + } + } + } + case 'commit-tree': { + // TODO: submodule + // We'll ignore submodule directories for now. + // Users prefer we not throw an error for lack of submodule support. + // gitlinks + return + } + case 'commit-blob': { + // TODO: submodule + // But... we'll complain if there is a *file* where we would + // put a submodule if we had submodule support. + return ['conflict', fullpath] + } + default: { + return ['error', `new entry Unhandled type ${commit.type}`] + } + } + } + // Something in stage but not in the commit OR the workdir. + // Note: I verified this behavior against canonical git. + case '100': { + return ['delete-index', fullpath] + } + // Deleted entries + // TODO: How to handle if stage type and workdir type mismatch? + case '101': { + switch (await stage.type()) { + case 'tree': { + return ['rmdir', fullpath] + } + case 'blob': { + // Git checks that the workdir.oid === stage.oid before deleting file + if ((await stage.oid()) !== (await workdir.oid())) { + if (force) { + return ['delete', fullpath] + } else { + return ['conflict', fullpath] + } + } else { + return ['delete', fullpath] + } + } + case 'commit': { + return ['rmdir-index', fullpath] + } + default: { + return [ + 'error', + `delete entry Unhandled type ${await stage.type()}`, + ] + } + } + } + /* eslint-disable no-fallthrough */ + // File missing from workdir + case '110': + // Possibly modified entries + case '111': { + /* eslint-enable no-fallthrough */ + switch (`${await stage.type()}-${await commit.type()}`) { + case 'tree-tree': { + return + } + case 'blob-blob': { + // If the file hasn't changed, there is no need to do anything. + // Existing file modifications in the workdir can be be left as is. + if ( + (await stage.oid()) === (await commit.oid()) && + (await stage.mode()) === (await commit.mode()) && + !force + ) { + return + } + + // Check for local changes that would be lost + if (workdir) { + // Note: canonical git only compares with the stage. But we're smart enough + // to compare to the stage AND the incoming commit. + if ( + (await workdir.oid()) !== (await stage.oid()) && + (await workdir.oid()) !== (await commit.oid()) + ) { + if (force) { + return [ + 'update', + fullpath, + await commit.oid(), + await commit.mode(), + (await commit.mode()) !== (await workdir.mode()), + ] + } else { + return ['conflict', fullpath] + } + } + } else if (force) { + return [ + 'update', + fullpath, + await commit.oid(), + await commit.mode(), + (await commit.mode()) !== (await stage.mode()), + ] + } + // Has file mode changed? + if ((await commit.mode()) !== (await stage.mode())) { + return [ + 'update', + fullpath, + await commit.oid(), + await commit.mode(), + true, + ] + } + // TODO: HANDLE SYMLINKS + // Has the file content changed? + if ((await commit.oid()) !== (await stage.oid())) { + return [ + 'update', + fullpath, + await commit.oid(), + await commit.mode(), + false, + ] + } else { + return + } + } + case 'tree-blob': { + return ['update-dir-to-blob', fullpath, await commit.oid()] + } + case 'blob-tree': { + return ['update-blob-to-tree', fullpath] + } + case 'commit-commit': { + return [ + 'mkdir-index', + fullpath, + await commit.oid(), + await commit.mode(), + ] + } + default: { + return [ + 'error', + `update entry Unhandled type ${await stage.type()}-${await commit.type()}`, + ] + } + } + } + } + }, + // Modify the default flat mapping + reduce: async function(parent, children) { + children = flat(children); + if (!parent) { + return children + } else if (parent && parent[0] === 'rmdir') { + children.push(parent); + return children + } else { + children.unshift(parent); + return children + } + }, + }) +} + +// @ts-check + +/** + * Checkout a branch + * + * If the branch already exists it will check out that branch. Otherwise, it will create a new remote tracking branch set to track the remote branch of that name. + * + * @param {object} args + * @param {FsClient} args.fs - a file system implementation + * @param {ProgressCallback} [args.onProgress] - optional progress event callback + * @param {PostCheckoutCallback} [args.onPostCheckout] - optional post-checkout hook callback + * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} [args.ref = 'HEAD'] - Source to checkout files from + * @param {string[]} [args.filepaths] - Limit the checkout to the given files and directories + * @param {string} [args.remote = 'origin'] - Which remote repository to use + * @param {boolean} [args.noCheckout = false] - If true, will update HEAD but won't update the working directory + * @param {boolean} [args.noUpdateHead] - If true, will update the working directory but won't update HEAD. Defaults to `false` when `ref` is provided, and `true` if `ref` is not provided. + * @param {boolean} [args.dryRun = false] - If true, simulates a checkout so you can test whether it would succeed. + * @param {boolean} [args.force = false] - If true, conflicts will be ignored and files will be overwritten regardless of local changes. + * @param {boolean} [args.track = true] - If false, will not set the remote branch tracking information. Defaults to true. + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves successfully when filesystem operations are complete + * + * @example + * // switch to the main branch + * await git.checkout({ + * fs, + * dir: '/tutorial', + * ref: 'main' + * }) + * console.log('done') + * + * @example + * // restore the 'docs' and 'src/docs' folders to the way they were, overwriting any changes + * await git.checkout({ + * fs, + * dir: '/tutorial', + * force: true, + * filepaths: ['docs', 'src/docs'] + * }) + * console.log('done') + * + * @example + * // restore the 'docs' and 'src/docs' folders to the way they are in the 'develop' branch, overwriting any changes + * await git.checkout({ + * fs, + * dir: '/tutorial', + * ref: 'develop', + * noUpdateHead: true, + * force: true, + * filepaths: ['docs', 'src/docs'] + * }) + * console.log('done') + */ +async function checkout({ + fs, + onProgress, + onPostCheckout, + dir, + gitdir = join(dir, '.git'), + remote = 'origin', + ref: _ref, + filepaths, + noCheckout = false, + noUpdateHead = _ref === undefined, + dryRun = false, + force = false, + track = true, + cache = {}, +}) { + try { + assertParameter('fs', fs); + assertParameter('dir', dir); + assertParameter('gitdir', gitdir); + + const ref = _ref || 'HEAD'; + return await _checkout({ + fs: new FileSystem(fs), + cache, + onProgress, + onPostCheckout, + dir, + gitdir, + remote, + ref, + filepaths, + noCheckout, + noUpdateHead, + dryRun, + force, + track, + }) + } catch (err) { + err.caller = 'git.checkout'; + throw err + } +} + +// @see https://git-scm.com/docs/git-rev-parse.html#_specifying_revisions +const abbreviateRx = new RegExp('^refs/(heads/|tags/|remotes/)?(.*)'); + +function abbreviateRef(ref) { + const match = abbreviateRx.exec(ref); + if (match) { + if (match[1] === 'remotes/' && ref.endsWith('/HEAD')) { + return match[2].slice(0, -5) + } else { + return match[2] + } + } + return ref +} + +// @ts-check + +/** + * @param {Object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {string} args.gitdir + * @param {boolean} [args.fullname = false] - Return the full path (e.g. "refs/heads/main") instead of the abbreviated form. + * @param {boolean} [args.test = false] - If the current branch doesn't actually exist (such as right after git init) then return `undefined`. + * + * @returns {Promise} The name of the current branch or undefined if the HEAD is detached. + * + */ +async function _currentBranch({ + fs, + gitdir, + fullname = false, + test = false, +}) { + const ref = await GitRefManager.resolve({ + fs, + gitdir, + ref: 'HEAD', + depth: 2, + }); + if (test) { + try { + await GitRefManager.resolve({ fs, gitdir, ref }); + } catch (_) { + return + } + } + // Return `undefined` for detached HEAD + if (!ref.startsWith('refs/')) return + return fullname ? ref : abbreviateRef(ref) +} + +function translateSSHtoHTTP(url) { + // handle "shorter scp-like syntax" + url = url.replace(/^git@([^:]+):/, 'https://$1/'); + // handle proper SSH URLs + url = url.replace(/^ssh:\/\//, 'https://'); + return url +} + +function calculateBasicAuthHeader({ username = '', password = '' }) { + return `Basic ${Buffer.from(`${username}:${password}`).toString('base64')}` +} + +// Currently 'for await' upsets my linters. +async function forAwait(iterable, cb) { + const iter = getIterator(iterable); + while (true) { + const { value, done } = await iter.next(); + if (value) await cb(value); + if (done) break + } + if (iter.return) iter.return(); +} + +async function collect(iterable) { + let size = 0; + const buffers = []; + // This will be easier once `for await ... of` loops are available. + await forAwait(iterable, value => { + buffers.push(value); + size += value.byteLength; + }); + const result = new Uint8Array(size); + let nextIndex = 0; + for (const buffer of buffers) { + result.set(buffer, nextIndex); + nextIndex += buffer.byteLength; + } + return result +} + +function extractAuthFromUrl(url) { + // For whatever reason, the `fetch` API does not convert credentials embedded in the URL + // into Basic Authentication headers automatically. Instead it throws an error! + // So we must manually parse the URL, rip out the user:password portion if it is present + // and compute the Authorization header. + // Note: I tried using new URL(url) but that throws a security exception in Edge. :rolleyes: + let userpass = url.match(/^https?:\/\/([^/]+)@/); + // No credentials, return the url unmodified and an empty auth object + if (userpass == null) return { url, auth: {} } + userpass = userpass[1]; + const [username, password] = userpass.split(':'); + // Remove credentials from URL + url = url.replace(`${userpass}@`, ''); + // Has credentials, return the fetch-safe URL and the parsed credentials + return { url, auth: { username, password } } +} + +function padHex(b, n) { + const s = n.toString(16); + return '0'.repeat(b - s.length) + s +} + +/** +pkt-line Format +--------------- + +Much (but not all) of the payload is described around pkt-lines. + +A pkt-line is a variable length binary string. The first four bytes +of the line, the pkt-len, indicates the total length of the line, +in hexadecimal. The pkt-len includes the 4 bytes used to contain +the length's hexadecimal representation. + +A pkt-line MAY contain binary data, so implementers MUST ensure +pkt-line parsing/formatting routines are 8-bit clean. + +A non-binary line SHOULD BE terminated by an LF, which if present +MUST be included in the total length. Receivers MUST treat pkt-lines +with non-binary data the same whether or not they contain the trailing +LF (stripping the LF if present, and not complaining when it is +missing). + +The maximum length of a pkt-line's data component is 65516 bytes. +Implementations MUST NOT send pkt-line whose length exceeds 65520 +(65516 bytes of payload + 4 bytes of length data). + +Implementations SHOULD NOT send an empty pkt-line ("0004"). + +A pkt-line with a length field of 0 ("0000"), called a flush-pkt, +is a special case and MUST be handled differently than an empty +pkt-line ("0004"). + +---- + pkt-line = data-pkt / flush-pkt + + data-pkt = pkt-len pkt-payload + pkt-len = 4*(HEXDIG) + pkt-payload = (pkt-len - 4)*(OCTET) + + flush-pkt = "0000" +---- + +Examples (as C-style strings): + +---- + pkt-line actual value + --------------------------------- + "0006a\n" "a\n" + "0005a" "a" + "000bfoobar\n" "foobar\n" + "0004" "" +---- +*/ + +// I'm really using this more as a namespace. +// There's not a lot of "state" in a pkt-line + +class GitPktLine { + static flush() { + return Buffer.from('0000', 'utf8') + } + + static delim() { + return Buffer.from('0001', 'utf8') + } + + static encode(line) { + if (typeof line === 'string') { + line = Buffer.from(line); + } + const length = line.length + 4; + const hexlength = padHex(4, length); + return Buffer.concat([Buffer.from(hexlength, 'utf8'), line]) + } + + static streamReader(stream) { + const reader = new StreamReader(stream); + return async function read() { + try { + let length = await reader.read(4); + if (length == null) return true + length = parseInt(length.toString('utf8'), 16); + if (length === 0) return null + if (length === 1) return null // delim packets + const buffer = await reader.read(length - 4); + if (buffer == null) return true + return buffer + } catch (err) { + stream.error = err; + return true + } + } + } +} + +// @ts-check + +/** + * @param {function} read + */ +async function parseCapabilitiesV2(read) { + /** @type {Object} */ + const capabilities2 = {}; + + let line; + while (true) { + line = await read(); + if (line === true) break + if (line === null) continue + line = line.toString('utf8').replace(/\n$/, ''); + const i = line.indexOf('='); + if (i > -1) { + const key = line.slice(0, i); + const value = line.slice(i + 1); + capabilities2[key] = value; + } else { + capabilities2[line] = true; + } + } + return { protocolVersion: 2, capabilities2 } +} + +async function parseRefsAdResponse(stream, { service }) { + const capabilities = new Set(); + const refs = new Map(); + const symrefs = new Map(); + + // There is probably a better way to do this, but for now + // let's just throw the result parser inline here. + const read = GitPktLine.streamReader(stream); + let lineOne = await read(); + // skip past any flushes + while (lineOne === null) lineOne = await read(); + + if (lineOne === true) throw new EmptyServerResponseError() + + // Handle protocol v2 responses (Bitbucket Server doesn't include a `# service=` line) + if (lineOne.includes('version 2')) { + return parseCapabilitiesV2(read) + } + + // Clients MUST ignore an LF at the end of the line. + if (lineOne.toString('utf8').replace(/\n$/, '') !== `# service=${service}`) { + throw new ParseError(`# service=${service}\\n`, lineOne.toString('utf8')) + } + let lineTwo = await read(); + // skip past any flushes + while (lineTwo === null) lineTwo = await read(); + // In the edge case of a brand new repo, zero refs (and zero capabilities) + // are returned. + if (lineTwo === true) return { capabilities, refs, symrefs } + lineTwo = lineTwo.toString('utf8'); + + // Handle protocol v2 responses + if (lineTwo.includes('version 2')) { + return parseCapabilitiesV2(read) + } + + const [firstRef, capabilitiesLine] = splitAndAssert(lineTwo, '\x00', '\\x00'); + capabilitiesLine.split(' ').map(x => capabilities.add(x)); + // see no-refs in https://git-scm.com/docs/pack-protocol#_reference_discovery (since git 2.41.0) + if (firstRef !== '0000000000000000000000000000000000000000 capabilities^{}') { + const [ref, name] = splitAndAssert(firstRef, ' ', ' '); + refs.set(name, ref); + while (true) { + const line = await read(); + if (line === true) break + if (line !== null) { + const [ref, name] = splitAndAssert(line.toString('utf8'), ' ', ' '); + refs.set(name, ref); + } + } + } + // Symrefs are thrown into the "capabilities" unfortunately. + for (const cap of capabilities) { + if (cap.startsWith('symref=')) { + const m = cap.match(/symref=([^:]+):(.*)/); + if (m.length === 3) { + symrefs.set(m[1], m[2]); + } + } + } + return { protocolVersion: 1, capabilities, refs, symrefs } +} + +function splitAndAssert(line, sep, expected) { + const split = line.trim().split(sep); + if (split.length !== 2) { + throw new ParseError( + `Two strings separated by '${expected}'`, + line.toString('utf8') + ) + } + return split +} + +// Try to accommodate known CORS proxy implementations: +// - https://jcubic.pl/proxy.php? <-- uses query string +// - https://cors.isomorphic-git.org <-- uses path +const corsProxify = (corsProxy, url) => + corsProxy.endsWith('?') + ? `${corsProxy}${url}` + : `${corsProxy}/${url.replace(/^https?:\/\//, '')}`; + +const updateHeaders = (headers, auth) => { + // Update the basic auth header + if (auth.username || auth.password) { + headers.Authorization = calculateBasicAuthHeader(auth); + } + // but any manually provided headers take precedence + if (auth.headers) { + Object.assign(headers, auth.headers); + } +}; + +/** + * @param {GitHttpResponse} res + * + * @returns {{ preview: string, response: string, data: Buffer }} + */ +const stringifyBody = async res => { + try { + // Some services provide a meaningful error message in the body of 403s like "token lacks the scopes necessary to perform this action" + const data = Buffer.from(await collect(res.body)); + const response = data.toString('utf8'); + const preview = + response.length < 256 ? response : response.slice(0, 256) + '...'; + return { preview, response, data } + } catch (e) { + return {} + } +}; + +class GitRemoteHTTP { + static async capabilities() { + return ['discover', 'connect'] + } + + /** + * @param {Object} args + * @param {HttpClient} args.http + * @param {ProgressCallback} [args.onProgress] + * @param {AuthCallback} [args.onAuth] + * @param {AuthFailureCallback} [args.onAuthFailure] + * @param {AuthSuccessCallback} [args.onAuthSuccess] + * @param {string} [args.corsProxy] + * @param {string} args.service + * @param {string} args.url + * @param {Object} args.headers + * @param {1 | 2} args.protocolVersion - Git Protocol Version + */ + static async discover({ + http, + onProgress, + onAuth, + onAuthSuccess, + onAuthFailure, + corsProxy, + service, + url: _origUrl, + headers, + protocolVersion, + }) { + let { url, auth } = extractAuthFromUrl(_origUrl); + const proxifiedURL = corsProxy ? corsProxify(corsProxy, url) : url; + if (auth.username || auth.password) { + headers.Authorization = calculateBasicAuthHeader(auth); + } + if (protocolVersion === 2) { + headers['Git-Protocol'] = 'version=2'; + } + + let res; + let tryAgain; + let providedAuthBefore = false; + do { + res = await http.request({ + onProgress, + method: 'GET', + url: `${proxifiedURL}/info/refs?service=${service}`, + headers, + }); + + // the default loop behavior + tryAgain = false; + + // 401 is the "correct" response for access denied. 203 is Non-Authoritative Information and comes from Azure DevOps, which + // apparently doesn't realize this is a git request and is returning the HTML for the "Azure DevOps Services | Sign In" page. + if (res.statusCode === 401 || res.statusCode === 203) { + // On subsequent 401s, call `onAuthFailure` instead of `onAuth`. + // This is so that naive `onAuth` callbacks that return a fixed value don't create an infinite loop of retrying. + const getAuth = providedAuthBefore ? onAuthFailure : onAuth; + if (getAuth) { + // Acquire credentials and try again + // TODO: read `useHttpPath` value from git config and pass along? + auth = await getAuth(url, { + ...auth, + headers: { ...headers }, + }); + if (auth && auth.cancel) { + throw new UserCanceledError() + } else if (auth) { + updateHeaders(headers, auth); + providedAuthBefore = true; + tryAgain = true; + } + } + } else if ( + res.statusCode === 200 && + providedAuthBefore && + onAuthSuccess + ) { + await onAuthSuccess(url, auth); + } + } while (tryAgain) + + if (res.statusCode !== 200) { + const { response } = await stringifyBody(res); + throw new HttpError(res.statusCode, res.statusMessage, response) + } + // Git "smart" HTTP servers should respond with the correct Content-Type header. + if ( + res.headers['content-type'] === `application/x-${service}-advertisement` + ) { + const remoteHTTP = await parseRefsAdResponse(res.body, { service }); + remoteHTTP.auth = auth; + return remoteHTTP + } else { + // If they don't send the correct content-type header, that's a good indicator it is either a "dumb" HTTP + // server, or the user specified an incorrect remote URL and the response is actually an HTML page. + // In this case, we save the response as plain text so we can generate a better error message if needed. + const { preview, response, data } = await stringifyBody(res); + // For backwards compatibility, try to parse it anyway. + // TODO: maybe just throw instead of trying? + try { + const remoteHTTP = await parseRefsAdResponse([data], { service }); + remoteHTTP.auth = auth; + return remoteHTTP + } catch (e) { + throw new SmartHttpError(preview, response) + } + } + } + + /** + * @param {Object} args + * @param {HttpClient} args.http + * @param {ProgressCallback} [args.onProgress] + * @param {string} [args.corsProxy] + * @param {string} args.service + * @param {string} args.url + * @param {Object} [args.headers] + * @param {any} args.body + * @param {any} args.auth + */ + static async connect({ + http, + onProgress, + corsProxy, + service, + url, + auth, + body, + headers, + }) { + // We already have the "correct" auth value at this point, but + // we need to strip out the username/password from the URL yet again. + const urlAuth = extractAuthFromUrl(url); + if (urlAuth) url = urlAuth.url; + + if (corsProxy) url = corsProxify(corsProxy, url); + + headers['content-type'] = `application/x-${service}-request`; + headers.accept = `application/x-${service}-result`; + updateHeaders(headers, auth); + + const res = await http.request({ + onProgress, + method: 'POST', + url: `${url}/${service}`, + body, + headers, + }); + if (res.statusCode !== 200) { + const { response } = stringifyBody(res); + throw new HttpError(res.statusCode, res.statusMessage, response) + } + return res + } +} + +function parseRemoteUrl({ url }) { + // the stupid "shorter scp-like syntax" + if (url.startsWith('git@')) { + return { + transport: 'ssh', + address: url, + } + } + const matches = url.match(/(\w+)(:\/\/|::)(.*)/); + if (matches === null) return + /* + * When git encounters a URL of the form ://
, where is + * a protocol that it cannot handle natively, it automatically invokes git remote- + * with the full URL as the second argument. + * + * @see https://git-scm.com/docs/git-remote-helpers + */ + if (matches[2] === '://') { + return { + transport: matches[1], + address: matches[0], + } + } + /* + * A URL of the form ::
explicitly instructs git to invoke + * git remote- with
as the second argument. + * + * @see https://git-scm.com/docs/git-remote-helpers + */ + if (matches[2] === '::') { + return { + transport: matches[1], + address: matches[3], + } + } +} + +class GitRemoteManager { + static getRemoteHelperFor({ url }) { + // TODO: clean up the remoteHelper API and move into PluginCore + const remoteHelpers = new Map(); + remoteHelpers.set('http', GitRemoteHTTP); + remoteHelpers.set('https', GitRemoteHTTP); + + const parts = parseRemoteUrl({ url }); + if (!parts) { + throw new UrlParseError(url) + } + if (remoteHelpers.has(parts.transport)) { + return remoteHelpers.get(parts.transport) + } + throw new UnknownTransportError( + url, + parts.transport, + parts.transport === 'ssh' ? translateSSHtoHTTP(url) : undefined + ) + } +} + +let lock$2 = null; + +class GitShallowManager { + static async read({ fs, gitdir }) { + if (lock$2 === null) lock$2 = new AsyncLock(); + const filepath = join(gitdir, 'shallow'); + const oids = new Set(); + await lock$2.acquire(filepath, async function() { + const text = await fs.read(filepath, { encoding: 'utf8' }); + if (text === null) return oids // no file + if (text.trim() === '') return oids // empty file + text + .trim() + .split('\n') + .map(oid => oids.add(oid)); + }); + return oids + } + + static async write({ fs, gitdir, oids }) { + if (lock$2 === null) lock$2 = new AsyncLock(); + const filepath = join(gitdir, 'shallow'); + if (oids.size > 0) { + const text = [...oids].join('\n') + '\n'; + await lock$2.acquire(filepath, async function() { + await fs.write(filepath, text, { + encoding: 'utf8', + }); + }); + } else { + // No shallows + await lock$2.acquire(filepath, async function() { + await fs.rm(filepath); + }); + } + } +} + +async function hasObjectLoose({ fs, gitdir, oid }) { + const source = `objects/${oid.slice(0, 2)}/${oid.slice(2)}`; + return fs.exists(`${gitdir}/${source}`) +} + +async function hasObjectPacked({ + fs, + cache, + gitdir, + oid, + getExternalRefDelta, +}) { + // Check to see if it's in a packfile. + // Iterate through all the .idx files + let list = await fs.readdir(join(gitdir, 'objects/pack')); + list = list.filter(x => x.endsWith('.idx')); + for (const filename of list) { + const indexFile = `${gitdir}/objects/pack/${filename}`; + const p = await readPackIndex({ + fs, + cache, + filename: indexFile, + getExternalRefDelta, + }); + if (p.error) throw new InternalError(p.error) + // If the packfile DOES have the oid we're looking for... + if (p.offsets.has(oid)) { + return true + } + } + // Failed to find it + return false +} + +async function hasObject({ + fs, + cache, + gitdir, + oid, + format = 'content', +}) { + // Curry the current read method so that the packfile un-deltification + // process can acquire external ref-deltas. + const getExternalRefDelta = oid => _readObject({ fs, cache, gitdir, oid }); + + // Look for it in the loose object directory. + let result = await hasObjectLoose({ fs, gitdir, oid }); + // Check to see if it's in a packfile. + if (!result) { + result = await hasObjectPacked({ + fs, + cache, + gitdir, + oid, + getExternalRefDelta, + }); + } + // Finally + return result +} + +// TODO: make a function that just returns obCount. then emptyPackfile = () => sizePack(pack) === 0 +function emptyPackfile(pack) { + const pheader = '5041434b'; + const version = '00000002'; + const obCount = '00000000'; + const header = pheader + version + obCount; + return pack.slice(0, 12).toString('hex') === header +} + +function filterCapabilities(server, client) { + const serverNames = server.map(cap => cap.split('=', 1)[0]); + return client.filter(cap => { + const name = cap.split('=', 1)[0]; + return serverNames.includes(name) + }) +} + +const pkg = { + name: 'isomorphic-git', + version: '1.27.1', + agent: 'git/isomorphic-git@1.27.1', +}; + +class FIFO { + constructor() { + this._queue = []; + } + + write(chunk) { + if (this._ended) { + throw Error('You cannot write to a FIFO that has already been ended!') + } + if (this._waiting) { + const resolve = this._waiting; + this._waiting = null; + resolve({ value: chunk }); + } else { + this._queue.push(chunk); + } + } + + end() { + this._ended = true; + if (this._waiting) { + const resolve = this._waiting; + this._waiting = null; + resolve({ done: true }); + } + } + + destroy(err) { + this.error = err; + this.end(); + } + + async next() { + if (this._queue.length > 0) { + return { value: this._queue.shift() } + } + if (this._ended) { + return { done: true } + } + if (this._waiting) { + throw Error( + 'You cannot call read until the previous call to read has returned!' + ) + } + return new Promise(resolve => { + this._waiting = resolve; + }) + } +} + +// Note: progress messages are designed to be written directly to the terminal, +// so they are often sent with just a carriage return to overwrite the last line of output. +// But there are also messages delimited with newlines. +// I also include CRLF just in case. +function findSplit(str) { + const r = str.indexOf('\r'); + const n = str.indexOf('\n'); + if (r === -1 && n === -1) return -1 + if (r === -1) return n + 1 // \n + if (n === -1) return r + 1 // \r + if (n === r + 1) return n + 1 // \r\n + return Math.min(r, n) + 1 // \r or \n +} + +function splitLines(input) { + const output = new FIFO(); + let tmp = '' + ;(async () => { + await forAwait(input, chunk => { + chunk = chunk.toString('utf8'); + tmp += chunk; + while (true) { + const i = findSplit(tmp); + if (i === -1) break + output.write(tmp.slice(0, i)); + tmp = tmp.slice(i); + } + }); + if (tmp.length > 0) { + output.write(tmp); + } + output.end(); + })(); + return output +} + +/* +If 'side-band' or 'side-band-64k' capabilities have been specified by +the client, the server will send the packfile data multiplexed. + +Each packet starting with the packet-line length of the amount of data +that follows, followed by a single byte specifying the sideband the +following data is coming in on. + +In 'side-band' mode, it will send up to 999 data bytes plus 1 control +code, for a total of up to 1000 bytes in a pkt-line. In 'side-band-64k' +mode it will send up to 65519 data bytes plus 1 control code, for a +total of up to 65520 bytes in a pkt-line. + +The sideband byte will be a '1', '2' or a '3'. Sideband '1' will contain +packfile data, sideband '2' will be used for progress information that the +client will generally print to stderr and sideband '3' is used for error +information. + +If no 'side-band' capability was specified, the server will stream the +entire packfile without multiplexing. +*/ + +class GitSideBand { + static demux(input) { + const read = GitPktLine.streamReader(input); + // And now for the ridiculous side-band or side-band-64k protocol + const packetlines = new FIFO(); + const packfile = new FIFO(); + const progress = new FIFO(); + // TODO: Use a proper through stream? + const nextBit = async function() { + const line = await read(); + // Skip over flush packets + if (line === null) return nextBit() + // A made up convention to signal there's no more to read. + if (line === true) { + packetlines.end(); + progress.end(); + input.error ? packfile.destroy(input.error) : packfile.end(); + return + } + // Examine first byte to determine which output "stream" to use + switch (line[0]) { + case 1: { + // pack data + packfile.write(line.slice(1)); + break + } + case 2: { + // progress message + progress.write(line.slice(1)); + break + } + case 3: { + // fatal error message just before stream aborts + const error = line.slice(1); + progress.write(error); + packetlines.end(); + progress.end(); + packfile.destroy(new Error(error.toString('utf8'))); + return + } + default: { + // Not part of the side-band-64k protocol + packetlines.write(line); + } + } + // Careful not to blow up the stack. + // I think Promises in a tail-call position should be OK. + nextBit(); + }; + nextBit(); + return { + packetlines, + packfile, + progress, + } + } + // static mux ({ + // protocol, // 'side-band' or 'side-band-64k' + // packetlines, + // packfile, + // progress, + // error + // }) { + // const MAX_PACKET_LENGTH = protocol === 'side-band-64k' ? 999 : 65519 + // let output = new PassThrough() + // packetlines.on('data', data => { + // if (data === null) { + // output.write(GitPktLine.flush()) + // } else { + // output.write(GitPktLine.encode(data)) + // } + // }) + // let packfileWasEmpty = true + // let packfileEnded = false + // let progressEnded = false + // let errorEnded = false + // let goodbye = Buffer.concat([ + // GitPktLine.encode(Buffer.from('010A', 'hex')), + // GitPktLine.flush() + // ]) + // packfile + // .on('data', data => { + // packfileWasEmpty = false + // const buffers = splitBuffer(data, MAX_PACKET_LENGTH) + // for (const buffer of buffers) { + // output.write( + // GitPktLine.encode(Buffer.concat([Buffer.from('01', 'hex'), buffer])) + // ) + // } + // }) + // .on('end', () => { + // packfileEnded = true + // if (!packfileWasEmpty) output.write(goodbye) + // if (progressEnded && errorEnded) output.end() + // }) + // progress + // .on('data', data => { + // const buffers = splitBuffer(data, MAX_PACKET_LENGTH) + // for (const buffer of buffers) { + // output.write( + // GitPktLine.encode(Buffer.concat([Buffer.from('02', 'hex'), buffer])) + // ) + // } + // }) + // .on('end', () => { + // progressEnded = true + // if (packfileEnded && errorEnded) output.end() + // }) + // error + // .on('data', data => { + // const buffers = splitBuffer(data, MAX_PACKET_LENGTH) + // for (const buffer of buffers) { + // output.write( + // GitPktLine.encode(Buffer.concat([Buffer.from('03', 'hex'), buffer])) + // ) + // } + // }) + // .on('end', () => { + // errorEnded = true + // if (progressEnded && packfileEnded) output.end() + // }) + // return output + // } +} + +async function parseUploadPackResponse(stream) { + const { packetlines, packfile, progress } = GitSideBand.demux(stream); + const shallows = []; + const unshallows = []; + const acks = []; + let nak = false; + let done = false; + return new Promise((resolve, reject) => { + // Parse the response + forAwait(packetlines, data => { + const line = data.toString('utf8').trim(); + if (line.startsWith('shallow')) { + const oid = line.slice(-41).trim(); + if (oid.length !== 40) { + reject(new InvalidOidError(oid)); + } + shallows.push(oid); + } else if (line.startsWith('unshallow')) { + const oid = line.slice(-41).trim(); + if (oid.length !== 40) { + reject(new InvalidOidError(oid)); + } + unshallows.push(oid); + } else if (line.startsWith('ACK')) { + const [, oid, status] = line.split(' '); + acks.push({ oid, status }); + if (!status) done = true; + } else if (line.startsWith('NAK')) { + nak = true; + done = true; + } else { + done = true; + nak = true; + } + if (done) { + stream.error + ? reject(stream.error) + : resolve({ shallows, unshallows, acks, nak, packfile, progress }); + } + }).finally(() => { + if (!done) { + stream.error + ? reject(stream.error) + : resolve({ shallows, unshallows, acks, nak, packfile, progress }); + } + }); + }) +} + +function writeUploadPackRequest({ + capabilities = [], + wants = [], + haves = [], + shallows = [], + depth = null, + since = null, + exclude = [], +}) { + const packstream = []; + wants = [...new Set(wants)]; // remove duplicates + let firstLineCapabilities = ` ${capabilities.join(' ')}`; + for (const oid of wants) { + packstream.push(GitPktLine.encode(`want ${oid}${firstLineCapabilities}\n`)); + firstLineCapabilities = ''; + } + for (const oid of shallows) { + packstream.push(GitPktLine.encode(`shallow ${oid}\n`)); + } + if (depth !== null) { + packstream.push(GitPktLine.encode(`deepen ${depth}\n`)); + } + if (since !== null) { + packstream.push( + GitPktLine.encode(`deepen-since ${Math.floor(since.valueOf() / 1000)}\n`) + ); + } + for (const oid of exclude) { + packstream.push(GitPktLine.encode(`deepen-not ${oid}\n`)); + } + packstream.push(GitPktLine.flush()); + for (const oid of haves) { + packstream.push(GitPktLine.encode(`have ${oid}\n`)); + } + packstream.push(GitPktLine.encode(`done\n`)); + return packstream +} + +// @ts-check + +/** + * + * @typedef {object} FetchResult - The object returned has the following schema: + * @property {string | null} defaultBranch - The branch that is cloned if no branch is specified + * @property {string | null} fetchHead - The SHA-1 object id of the fetched head commit + * @property {string | null} fetchHeadDescription - a textual description of the branch that was fetched + * @property {Object} [headers] - The HTTP response headers returned by the git server + * @property {string[]} [pruned] - A list of branches that were pruned, if you provided the `prune` parameter + * + */ + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {any} args.cache + * @param {HttpClient} args.http + * @param {ProgressCallback} [args.onProgress] + * @param {MessageCallback} [args.onMessage] + * @param {AuthCallback} [args.onAuth] + * @param {AuthFailureCallback} [args.onAuthFailure] + * @param {AuthSuccessCallback} [args.onAuthSuccess] + * @param {string} args.gitdir + * @param {string|void} [args.url] + * @param {string} [args.corsProxy] + * @param {string} [args.ref] + * @param {string} [args.remoteRef] + * @param {string} [args.remote] + * @param {boolean} [args.singleBranch = false] + * @param {boolean} [args.tags = false] + * @param {number} [args.depth] + * @param {Date} [args.since] + * @param {string[]} [args.exclude = []] + * @param {boolean} [args.relative = false] + * @param {Object} [args.headers] + * @param {boolean} [args.prune] + * @param {boolean} [args.pruneTags] + * + * @returns {Promise} + * @see FetchResult + */ +async function _fetch({ + fs, + cache, + http, + onProgress, + onMessage, + onAuth, + onAuthSuccess, + onAuthFailure, + gitdir, + ref: _ref, + remoteRef: _remoteRef, + remote: _remote, + url: _url, + corsProxy, + depth = null, + since = null, + exclude = [], + relative = false, + tags = false, + singleBranch = false, + headers = {}, + prune = false, + pruneTags = false, +}) { + const ref = _ref || (await _currentBranch({ fs, gitdir, test: true })); + const config = await GitConfigManager.get({ fs, gitdir }); + // Figure out what remote to use. + const remote = + _remote || (ref && (await config.get(`branch.${ref}.remote`))) || 'origin'; + // Lookup the URL for the given remote. + const url = _url || (await config.get(`remote.${remote}.url`)); + if (typeof url === 'undefined') { + throw new MissingParameterError('remote OR url') + } + // Figure out what remote ref to use. + const remoteRef = + _remoteRef || + (ref && (await config.get(`branch.${ref}.merge`))) || + _ref || + 'HEAD'; + + if (corsProxy === undefined) { + corsProxy = await config.get('http.corsProxy'); + } + + const GitRemoteHTTP = GitRemoteManager.getRemoteHelperFor({ url }); + const remoteHTTP = await GitRemoteHTTP.discover({ + http, + onAuth, + onAuthSuccess, + onAuthFailure, + corsProxy, + service: 'git-upload-pack', + url, + headers, + protocolVersion: 1, + }); + const auth = remoteHTTP.auth; // hack to get new credentials from CredentialManager API + const remoteRefs = remoteHTTP.refs; + // For the special case of an empty repository with no refs, return null. + if (remoteRefs.size === 0) { + return { + defaultBranch: null, + fetchHead: null, + fetchHeadDescription: null, + } + } + // Check that the remote supports the requested features + if (depth !== null && !remoteHTTP.capabilities.has('shallow')) { + throw new RemoteCapabilityError('shallow', 'depth') + } + if (since !== null && !remoteHTTP.capabilities.has('deepen-since')) { + throw new RemoteCapabilityError('deepen-since', 'since') + } + if (exclude.length > 0 && !remoteHTTP.capabilities.has('deepen-not')) { + throw new RemoteCapabilityError('deepen-not', 'exclude') + } + if (relative === true && !remoteHTTP.capabilities.has('deepen-relative')) { + throw new RemoteCapabilityError('deepen-relative', 'relative') + } + // Figure out the SHA for the requested ref + const { oid, fullref } = GitRefManager.resolveAgainstMap({ + ref: remoteRef, + map: remoteRefs, + }); + // Filter out refs we want to ignore: only keep ref we're cloning, HEAD, branches, and tags (if we're keeping them) + for (const remoteRef of remoteRefs.keys()) { + if ( + remoteRef === fullref || + remoteRef === 'HEAD' || + remoteRef.startsWith('refs/heads/') || + (tags && remoteRef.startsWith('refs/tags/')) + ) { + continue + } + remoteRefs.delete(remoteRef); + } + // Assemble the application/x-git-upload-pack-request + const capabilities = filterCapabilities( + [...remoteHTTP.capabilities], + [ + 'multi_ack_detailed', + 'no-done', + 'side-band-64k', + // Note: I removed 'thin-pack' option since our code doesn't "fatten" packfiles, + // which is necessary for compatibility with git. It was the cause of mysterious + // 'fatal: pack has [x] unresolved deltas' errors that plagued us for some time. + // isomorphic-git is perfectly happy with thin packfiles in .git/objects/pack but + // canonical git it turns out is NOT. + 'ofs-delta', + `agent=${pkg.agent}`, + ] + ); + if (relative) capabilities.push('deepen-relative'); + // Start figuring out which oids from the remote we want to request + const wants = singleBranch ? [oid] : remoteRefs.values(); + // Come up with a reasonable list of oids to tell the remote we already have + // (preferably oids that are close ancestors of the branch heads we're fetching) + const haveRefs = singleBranch + ? [ref] + : await GitRefManager.listRefs({ + fs, + gitdir, + filepath: `refs`, + }); + let haves = []; + for (let ref of haveRefs) { + try { + ref = await GitRefManager.expand({ fs, gitdir, ref }); + const oid = await GitRefManager.resolve({ fs, gitdir, ref }); + if (await hasObject({ fs, cache, gitdir, oid })) { + haves.push(oid); + } + } catch (err) {} + } + haves = [...new Set(haves)]; + const oids = await GitShallowManager.read({ fs, gitdir }); + const shallows = remoteHTTP.capabilities.has('shallow') ? [...oids] : []; + const packstream = writeUploadPackRequest({ + capabilities, + wants, + haves, + shallows, + depth, + since, + exclude, + }); + // CodeCommit will hang up if we don't send a Content-Length header + // so we can't stream the body. + const packbuffer = Buffer.from(await collect(packstream)); + const raw = await GitRemoteHTTP.connect({ + http, + onProgress, + corsProxy, + service: 'git-upload-pack', + url, + auth, + body: [packbuffer], + headers, + }); + const response = await parseUploadPackResponse(raw.body); + if (raw.headers) { + response.headers = raw.headers; + } + // Apply all the 'shallow' and 'unshallow' commands + for (const oid of response.shallows) { + if (!oids.has(oid)) { + // this is in a try/catch mostly because my old test fixtures are missing objects + try { + // server says it's shallow, but do we have the parents? + const { object } = await _readObject({ fs, cache, gitdir, oid }); + const commit = new GitCommit(object); + const hasParents = await Promise.all( + commit + .headers() + .parent.map(oid => hasObject({ fs, cache, gitdir, oid })) + ); + const haveAllParents = + hasParents.length === 0 || hasParents.every(has => has); + if (!haveAllParents) { + oids.add(oid); + } + } catch (err) { + oids.add(oid); + } + } + } + for (const oid of response.unshallows) { + oids.delete(oid); + } + await GitShallowManager.write({ fs, gitdir, oids }); + // Update local remote refs + if (singleBranch) { + const refs = new Map([[fullref, oid]]); + // But wait, maybe it was a symref, like 'HEAD'! + // We need to save all the refs in the symref chain (sigh). + const symrefs = new Map(); + let bail = 10; + let key = fullref; + while (bail--) { + const value = remoteHTTP.symrefs.get(key); + if (value === undefined) break + symrefs.set(key, value); + key = value; + } + // final value must not be a symref but a real ref + const realRef = remoteRefs.get(key); + // There may be no ref at all if we've fetched a specific commit hash + if (realRef) { + refs.set(key, realRef); + } + const { pruned } = await GitRefManager.updateRemoteRefs({ + fs, + gitdir, + remote, + refs, + symrefs, + tags, + prune, + }); + if (prune) { + response.pruned = pruned; + } + } else { + const { pruned } = await GitRefManager.updateRemoteRefs({ + fs, + gitdir, + remote, + refs: remoteRefs, + symrefs: remoteHTTP.symrefs, + tags, + prune, + pruneTags, + }); + if (prune) { + response.pruned = pruned; + } + } + // We need this value later for the `clone` command. + response.HEAD = remoteHTTP.symrefs.get('HEAD'); + // AWS CodeCommit doesn't list HEAD as a symref, but we can reverse engineer it + // Find the SHA of the branch called HEAD + if (response.HEAD === undefined) { + const { oid } = GitRefManager.resolveAgainstMap({ + ref: 'HEAD', + map: remoteRefs, + }); + // Use the name of the first branch that's not called HEAD that has + // the same SHA as the branch called HEAD. + for (const [key, value] of remoteRefs.entries()) { + if (key !== 'HEAD' && value === oid) { + response.HEAD = key; + break + } + } + } + const noun = fullref.startsWith('refs/tags') ? 'tag' : 'branch'; + response.FETCH_HEAD = { + oid, + description: `${noun} '${abbreviateRef(fullref)}' of ${url}`, + }; + + if (onProgress || onMessage) { + const lines = splitLines(response.progress); + forAwait(lines, async line => { + if (onMessage) await onMessage(line); + if (onProgress) { + const matches = line.match(/([^:]*).*\((\d+?)\/(\d+?)\)/); + if (matches) { + await onProgress({ + phase: matches[1].trim(), + loaded: parseInt(matches[2], 10), + total: parseInt(matches[3], 10), + }); + } + } + }); + } + const packfile = Buffer.from(await collect(response.packfile)); + if (raw.body.error) throw raw.body.error + const packfileSha = packfile.slice(-20).toString('hex'); + const res = { + defaultBranch: response.HEAD, + fetchHead: response.FETCH_HEAD.oid, + fetchHeadDescription: response.FETCH_HEAD.description, + }; + if (response.headers) { + res.headers = response.headers; + } + if (prune) { + res.pruned = response.pruned; + } + // This is a quick fix for the empty .git/objects/pack/pack-.pack file error, + // which due to the way `git-list-pack` works causes the program to hang when it tries to read it. + // TODO: Longer term, we should actually: + // a) NOT concatenate the entire packfile into memory (line 78), + // b) compute the SHA of the stream except for the last 20 bytes, using the same library used in push.js, and + // c) compare the computed SHA with the last 20 bytes of the stream before saving to disk, and throwing a "packfile got corrupted during download" error if the SHA doesn't match. + if (packfileSha !== '' && !emptyPackfile(packfile)) { + res.packfile = `objects/pack/pack-${packfileSha}.pack`; + const fullpath = join(gitdir, res.packfile); + await fs.write(fullpath, packfile); + const getExternalRefDelta = oid => _readObject({ fs, cache, gitdir, oid }); + const idx = await GitPackIndex.fromPack({ + pack: packfile, + getExternalRefDelta, + onProgress, + }); + await fs.write(fullpath.replace(/\.pack$/, '.idx'), await idx.toBuffer()); + } + return res +} + +// @ts-check + +/** + * Initialize a new repository + * + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {string} [args.dir] + * @param {string} [args.gitdir] + * @param {boolean} [args.bare = false] + * @param {string} [args.defaultBranch = 'master'] + * @returns {Promise} + */ +async function _init({ + fs, + bare = false, + dir, + gitdir = bare ? dir : join(dir, '.git'), + defaultBranch = 'master', +}) { + // Don't overwrite an existing config + if (await fs.exists(gitdir + '/config')) return + + let folders = [ + 'hooks', + 'info', + 'objects/info', + 'objects/pack', + 'refs/heads', + 'refs/tags', + ]; + folders = folders.map(dir => gitdir + '/' + dir); + for (const folder of folders) { + await fs.mkdir(folder); + } + + await fs.write( + gitdir + '/config', + '[core]\n' + + '\trepositoryformatversion = 0\n' + + '\tfilemode = false\n' + + `\tbare = ${bare}\n` + + (bare ? '' : '\tlogallrefupdates = true\n') + + '\tsymlinks = false\n' + + '\tignorecase = true\n' + ); + await fs.write(gitdir + '/HEAD', `ref: refs/heads/${defaultBranch}\n`); +} + +// @ts-check + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {object} args.cache + * @param {HttpClient} args.http + * @param {ProgressCallback} [args.onProgress] + * @param {MessageCallback} [args.onMessage] + * @param {AuthCallback} [args.onAuth] + * @param {AuthFailureCallback} [args.onAuthFailure] + * @param {AuthSuccessCallback} [args.onAuthSuccess] + * @param {PostCheckoutCallback} [args.onPostCheckout] + * @param {string} [args.dir] + * @param {string} args.gitdir + * @param {string} args.url + * @param {string} args.corsProxy + * @param {string} args.ref + * @param {boolean} args.singleBranch + * @param {boolean} args.noCheckout + * @param {boolean} args.noTags + * @param {string} args.remote + * @param {number} args.depth + * @param {Date} args.since + * @param {string[]} args.exclude + * @param {boolean} args.relative + * @param {Object} args.headers + * + * @returns {Promise} Resolves successfully when clone completes + * + */ +async function _clone({ + fs, + cache, + http, + onProgress, + onMessage, + onAuth, + onAuthSuccess, + onAuthFailure, + onPostCheckout, + dir, + gitdir, + url, + corsProxy, + ref, + remote, + depth, + since, + exclude, + relative, + singleBranch, + noCheckout, + noTags, + headers, +}) { + try { + await _init({ fs, gitdir }); + await _addRemote({ fs, gitdir, remote, url, force: false }); + if (corsProxy) { + const config = await GitConfigManager.get({ fs, gitdir }); + await config.set(`http.corsProxy`, corsProxy); + await GitConfigManager.save({ fs, gitdir, config }); + } + const { defaultBranch, fetchHead } = await _fetch({ + fs, + cache, + http, + onProgress, + onMessage, + onAuth, + onAuthSuccess, + onAuthFailure, + gitdir, + ref, + remote, + corsProxy, + depth, + since, + exclude, + relative, + singleBranch, + headers, + tags: !noTags, + }); + if (fetchHead === null) return + ref = ref || defaultBranch; + ref = ref.replace('refs/heads/', ''); + // Checkout that branch + await _checkout({ + fs, + cache, + onProgress, + onPostCheckout, + dir, + gitdir, + ref, + remote, + noCheckout, + }); + } catch (err) { + // Remove partial local repository, see #1283 + // Ignore any error as we are already failing. + // The catch is necessary so the original error is not masked. + await fs + .rmdir(gitdir, { recursive: true, maxRetries: 10 }) + .catch(() => undefined); + throw err + } +} + +// @ts-check + +/** + * Clone a repository + * + * @param {object} args + * @param {FsClient} args.fs - a file system implementation + * @param {HttpClient} args.http - an HTTP client + * @param {ProgressCallback} [args.onProgress] - optional progress event callback + * @param {MessageCallback} [args.onMessage] - optional message event callback + * @param {AuthCallback} [args.onAuth] - optional auth fill callback + * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback + * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback + * @param {PostCheckoutCallback} [args.onPostCheckout] - optional post-checkout hook callback + * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.url - The URL of the remote repository + * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Value is stored in the git config file for that repo. + * @param {string} [args.ref] - Which branch to checkout. By default this is the designated "main branch" of the repository. + * @param {boolean} [args.singleBranch = false] - Instead of the default behavior of fetching all the branches, only fetch a single branch. + * @param {boolean} [args.noCheckout = false] - If true, clone will only fetch the repo, not check out a branch. Skipping checkout can save a lot of time normally spent writing files to disk. + * @param {boolean} [args.noTags = false] - By default clone will fetch all tags. `noTags` disables that behavior. + * @param {string} [args.remote = 'origin'] - What to name the remote that is created. + * @param {number} [args.depth] - Integer. Determines how much of the git repository's history to retrieve + * @param {Date} [args.since] - Only fetch commits created after the given date. Mutually exclusive with `depth`. + * @param {string[]} [args.exclude = []] - A list of branches or tags. Instructs the remote server not to send us any commits reachable from these refs. + * @param {boolean} [args.relative = false] - Changes the meaning of `depth` to be measured from the current shallow depth rather than from the branch tip. + * @param {Object} [args.headers = {}] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves successfully when clone completes + * + * @example + * await git.clone({ + * fs, + * http, + * dir: '/tutorial', + * corsProxy: 'https://cors.isomorphic-git.org', + * url: 'https://github.com/isomorphic-git/isomorphic-git', + * singleBranch: true, + * depth: 1 + * }) + * console.log('done') + * + */ +async function clone({ + fs, + http, + onProgress, + onMessage, + onAuth, + onAuthSuccess, + onAuthFailure, + onPostCheckout, + dir, + gitdir = join(dir, '.git'), + url, + corsProxy = undefined, + ref = undefined, + remote = 'origin', + depth = undefined, + since = undefined, + exclude = [], + relative = false, + singleBranch = false, + noCheckout = false, + noTags = false, + headers = {}, + cache = {}, +}) { + try { + assertParameter('fs', fs); + assertParameter('http', http); + assertParameter('gitdir', gitdir); + if (!noCheckout) { + assertParameter('dir', dir); + } + assertParameter('url', url); + + return await _clone({ + fs: new FileSystem(fs), + cache, + http, + onProgress, + onMessage, + onAuth, + onAuthSuccess, + onAuthFailure, + onPostCheckout, + dir, + gitdir, + url, + corsProxy, + ref, + remote, + depth, + since, + exclude, + relative, + singleBranch, + noCheckout, + noTags, + headers, + }) + } catch (err) { + err.caller = 'git.clone'; + throw err + } +} + +// @ts-check +/** + * Create a new commit + * + * @param {Object} args + * @param {FsClient} args.fs - a file system implementation + * @param {SignCallback} [args.onSign] - a PGP signing implementation + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} [args.message] - The commit message to use. Required, unless `amend === true` + * @param {Object} [args.author] - The details about the author. + * @param {string} [args.author.name] - Default is `user.name` config. + * @param {string} [args.author.email] - Default is `user.email` config. + * @param {number} [args.author.timestamp=Math.floor(Date.now()/1000)] - Set the author timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00). + * @param {number} [args.author.timezoneOffset] - Set the author timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`. + * @param {Object} [args.committer = author] - The details about the commit committer, in the same format as the author parameter. If not specified, the author details are used. + * @param {string} [args.committer.name] - Default is `user.name` config. + * @param {string} [args.committer.email] - Default is `user.email` config. + * @param {number} [args.committer.timestamp=Math.floor(Date.now()/1000)] - Set the committer timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00). + * @param {number} [args.committer.timezoneOffset] - Set the committer timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`. + * @param {string} [args.signingKey] - Sign the tag object using this private PGP key. + * @param {boolean} [args.amend = false] - If true, replaces the last commit pointed to by `ref` with a new commit. + * @param {boolean} [args.dryRun = false] - If true, simulates making a commit so you can test whether it would succeed. Implies `noUpdateBranch`. + * @param {boolean} [args.noUpdateBranch = false] - If true, does not update the branch pointer after creating the commit. + * @param {string} [args.ref] - The fully expanded name of the branch to commit to. Default is the current branch pointed to by HEAD. (TODO: fix it so it can expand branch names without throwing if the branch doesn't exist yet.) + * @param {string[]} [args.parent] - The SHA-1 object ids of the commits to use as parents. If not specified, the commit pointed to by `ref` is used. + * @param {string} [args.tree] - The SHA-1 object id of the tree to use. If not specified, a new tree object is created from the current git index. + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves successfully with the SHA-1 object id of the newly created commit. + * + * @example + * let sha = await git.commit({ + * fs, + * dir: '/tutorial', + * author: { + * name: 'Mr. Test', + * email: 'mrtest@example.com', + * }, + * message: 'Added the a.txt file' + * }) + * console.log(sha) + * + */ +async function commit({ + fs: _fs, + onSign, + dir, + gitdir = join(dir, '.git'), + message, + author, + committer, + signingKey, + amend = false, + dryRun = false, + noUpdateBranch = false, + ref, + parent, + tree, + cache = {}, +}) { + try { + assertParameter('fs', _fs); + if (!amend) { + assertParameter('message', message); + } + if (signingKey) { + assertParameter('onSign', onSign); + } + const fs = new FileSystem(_fs); + + return await _commit({ + fs, + cache, + onSign, + gitdir, + message, + author, + committer, + signingKey, + amend, + dryRun, + noUpdateBranch, + ref, + parent, + tree, + }) + } catch (err) { + err.caller = 'git.commit'; + throw err + } +} + +// @ts-check + +/** + * Get the name of the branch currently pointed to by .git/HEAD + * + * @param {Object} args + * @param {FsClient} args.fs - a file system implementation + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {boolean} [args.fullname = false] - Return the full path (e.g. "refs/heads/main") instead of the abbreviated form. + * @param {boolean} [args.test = false] - If the current branch doesn't actually exist (such as right after git init) then return `undefined`. + * + * @returns {Promise} The name of the current branch or undefined if the HEAD is detached. + * + * @example + * // Get the current branch name + * let branch = await git.currentBranch({ + * fs, + * dir: '/tutorial', + * fullname: false + * }) + * console.log(branch) + * + */ +async function currentBranch({ + fs, + dir, + gitdir = join(dir, '.git'), + fullname = false, + test = false, +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + return await _currentBranch({ + fs: new FileSystem(fs), + gitdir, + fullname, + test, + }) + } catch (err) { + err.caller = 'git.currentBranch'; + throw err + } +} + +// @ts-check + +/** + * @param {Object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {string} args.gitdir + * @param {string} args.ref + * + * @returns {Promise} + */ +async function _deleteBranch({ fs, gitdir, ref }) { + ref = ref.startsWith('refs/heads/') ? ref : `refs/heads/${ref}`; + const exist = await GitRefManager.exists({ fs, gitdir, ref }); + if (!exist) { + throw new NotFoundError(ref) + } + + const fullRef = await GitRefManager.expand({ fs, gitdir, ref }); + const currentRef = await _currentBranch({ fs, gitdir, fullname: true }); + if (fullRef === currentRef) { + // detach HEAD + const value = await GitRefManager.resolve({ fs, gitdir, ref: fullRef }); + await GitRefManager.writeRef({ fs, gitdir, ref: 'HEAD', value }); + } + + // Delete a specified branch + await GitRefManager.deleteRef({ fs, gitdir, ref: fullRef }); + + // Delete branch config entries + const abbrevRef = abbreviateRef(ref); + const config = await GitConfigManager.get({ fs, gitdir }); + await config.deleteSection('branch', abbrevRef); + await GitConfigManager.save({ fs, gitdir, config }); +} + +// @ts-check + +/** + * Delete a local branch + * + * > Note: This only deletes loose branches - it should be fixed in the future to delete packed branches as well. + * + * @param {Object} args + * @param {FsClient} args.fs - a file system implementation + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.ref - The branch to delete + * + * @returns {Promise} Resolves successfully when filesystem operations are complete + * + * @example + * await git.deleteBranch({ fs, dir: '/tutorial', ref: 'local-branch' }) + * console.log('done') + * + */ +async function deleteBranch({ + fs, + dir, + gitdir = join(dir, '.git'), + ref, +}) { + try { + assertParameter('fs', fs); + assertParameter('ref', ref); + return await _deleteBranch({ + fs: new FileSystem(fs), + gitdir, + ref, + }) + } catch (err) { + err.caller = 'git.deleteBranch'; + throw err + } +} + +// @ts-check + +/** + * Delete a local ref + * + * @param {Object} args + * @param {FsClient} args.fs - a file system implementation + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.ref - The ref to delete + * + * @returns {Promise} Resolves successfully when filesystem operations are complete + * + * @example + * await git.deleteRef({ fs, dir: '/tutorial', ref: 'refs/tags/test-tag' }) + * console.log('done') + * + */ +async function deleteRef({ fs, dir, gitdir = join(dir, '.git'), ref }) { + try { + assertParameter('fs', fs); + assertParameter('ref', ref); + await GitRefManager.deleteRef({ fs: new FileSystem(fs), gitdir, ref }); + } catch (err) { + err.caller = 'git.deleteRef'; + throw err + } +} + +// @ts-check + +/** + * @param {Object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {string} args.gitdir + * @param {string} args.remote + * + * @returns {Promise} + */ +async function _deleteRemote({ fs, gitdir, remote }) { + const config = await GitConfigManager.get({ fs, gitdir }); + await config.deleteSection('remote', remote); + await GitConfigManager.save({ fs, gitdir, config }); +} + +// @ts-check + +/** + * Removes the local config entry for a given remote + * + * @param {Object} args + * @param {FsClient} args.fs - a file system implementation + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.remote - The name of the remote to delete + * + * @returns {Promise} Resolves successfully when filesystem operations are complete + * + * @example + * await git.deleteRemote({ fs, dir: '/tutorial', remote: 'upstream' }) + * console.log('done') + * + */ +async function deleteRemote({ + fs, + dir, + gitdir = join(dir, '.git'), + remote, +}) { + try { + assertParameter('fs', fs); + assertParameter('remote', remote); + return await _deleteRemote({ + fs: new FileSystem(fs), + gitdir, + remote, + }) + } catch (err) { + err.caller = 'git.deleteRemote'; + throw err + } +} + +// @ts-check + +/** + * Delete a local tag ref + * + * @param {Object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {string} args.gitdir + * @param {string} args.ref - The tag to delete + * + * @returns {Promise} Resolves successfully when filesystem operations are complete + * + * @example + * await git.deleteTag({ dir: '$input((/))', ref: '$input((test-tag))' }) + * console.log('done') + * + */ +async function _deleteTag({ fs, gitdir, ref }) { + ref = ref.startsWith('refs/tags/') ? ref : `refs/tags/${ref}`; + await GitRefManager.deleteRef({ fs, gitdir, ref }); +} + +// @ts-check + +/** + * Delete a local tag ref + * + * @param {Object} args + * @param {FsClient} args.fs - a file system implementation + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.ref - The tag to delete + * + * @returns {Promise} Resolves successfully when filesystem operations are complete + * + * @example + * await git.deleteTag({ fs, dir: '/tutorial', ref: 'test-tag' }) + * console.log('done') + * + */ +async function deleteTag({ fs, dir, gitdir = join(dir, '.git'), ref }) { + try { + assertParameter('fs', fs); + assertParameter('ref', ref); + return await _deleteTag({ + fs: new FileSystem(fs), + gitdir, + ref, + }) + } catch (err) { + err.caller = 'git.deleteTag'; + throw err + } +} + +async function expandOidLoose({ fs, gitdir, oid: short }) { + const prefix = short.slice(0, 2); + const objectsSuffixes = await fs.readdir(`${gitdir}/objects/${prefix}`); + return objectsSuffixes + .map(suffix => `${prefix}${suffix}`) + .filter(_oid => _oid.startsWith(short)) +} + +async function expandOidPacked({ + fs, + cache, + gitdir, + oid: short, + getExternalRefDelta, +}) { + // Iterate through all the .pack files + const results = []; + let list = await fs.readdir(join(gitdir, 'objects/pack')); + list = list.filter(x => x.endsWith('.idx')); + for (const filename of list) { + const indexFile = `${gitdir}/objects/pack/${filename}`; + const p = await readPackIndex({ + fs, + cache, + filename: indexFile, + getExternalRefDelta, + }); + if (p.error) throw new InternalError(p.error) + // Search through the list of oids in the packfile + for (const oid of p.offsets.keys()) { + if (oid.startsWith(short)) results.push(oid); + } + } + return results +} + +async function _expandOid({ fs, cache, gitdir, oid: short }) { + // Curry the current read method so that the packfile un-deltification + // process can acquire external ref-deltas. + const getExternalRefDelta = oid => _readObject({ fs, cache, gitdir, oid }); + + const results = await expandOidLoose({ fs, gitdir, oid: short }); + const packedOids = await expandOidPacked({ + fs, + cache, + gitdir, + oid: short, + getExternalRefDelta, + }); + // Objects can exist in a pack file as well as loose, make sure we only get a list of unique oids. + for (const packedOid of packedOids) { + if (results.indexOf(packedOid) === -1) { + results.push(packedOid); + } + } + + if (results.length === 1) { + return results[0] + } + if (results.length > 1) { + throw new AmbiguousError('oids', short, results) + } + throw new NotFoundError(`an object matching "${short}"`) +} + +// @ts-check + +/** + * Expand and resolve a short oid into a full oid + * + * @param {Object} args + * @param {FsClient} args.fs - a file system implementation + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.oid - The shortened oid prefix to expand (like "0414d2a") + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves successfully with the full oid (like "0414d2a286d7bbc7a4a326a61c1f9f888a8ab87f") + * + * @example + * let oid = await git.expandOid({ fs, dir: '/tutorial', oid: '0414d2a'}) + * console.log(oid) + * + */ +async function expandOid({ + fs, + dir, + gitdir = join(dir, '.git'), + oid, + cache = {}, +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('oid', oid); + return await _expandOid({ + fs: new FileSystem(fs), + cache, + gitdir, + oid, + }) + } catch (err) { + err.caller = 'git.expandOid'; + throw err + } +} + +// @ts-check + +/** + * Expand an abbreviated ref to its full name + * + * @param {Object} args + * @param {FsClient} args.fs - a file system implementation + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.ref - The ref to expand (like "v1.0.0") + * + * @returns {Promise} Resolves successfully with a full ref name ("refs/tags/v1.0.0") + * + * @example + * let fullRef = await git.expandRef({ fs, dir: '/tutorial', ref: 'main'}) + * console.log(fullRef) + * + */ +async function expandRef({ fs, dir, gitdir = join(dir, '.git'), ref }) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('ref', ref); + return await GitRefManager.expand({ + fs: new FileSystem(fs), + gitdir, + ref, + }) + } catch (err) { + err.caller = 'git.expandRef'; + throw err + } +} + +// @ts-check + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {any} args.cache + * @param {string} args.gitdir + * @param {string[]} args.oids + * + */ +async function _findMergeBase({ fs, cache, gitdir, oids }) { + // Note: right now, the tests are geared so that the output should match that of + // `git merge-base --all --octopus` + // because without the --octopus flag, git's output seems to depend on the ORDER of the oids, + // and computing virtual merge bases is just too much for me to fathom right now. + + // If we start N independent walkers, one at each of the given `oids`, and walk backwards + // through ancestors, eventually we'll discover a commit where each one of these N walkers + // has passed through. So we just need to keep track of which walkers have visited each commit + // until we find a commit that N distinct walkers has visited. + const visits = {}; + const passes = oids.length; + let heads = oids.map((oid, index) => ({ index, oid })); + while (heads.length) { + // Count how many times we've passed each commit + const result = new Set(); + for (const { oid, index } of heads) { + if (!visits[oid]) visits[oid] = new Set(); + visits[oid].add(index); + if (visits[oid].size === passes) { + result.add(oid); + } + } + if (result.size > 0) { + return [...result] + } + // We haven't found a common ancestor yet + const newheads = new Map(); + for (const { oid, index } of heads) { + try { + const { object } = await _readObject({ fs, cache, gitdir, oid }); + const commit = GitCommit.from(object); + const { parent } = commit.parseHeaders(); + for (const oid of parent) { + if (!visits[oid] || !visits[oid].has(index)) { + newheads.set(oid + ':' + index, { oid, index }); + } + } + } catch (err) { + // do nothing + } + } + heads = Array.from(newheads.values()); + } + return [] +} + +const LINEBREAKS = /^.*(\r?\n|$)/gm; + +function mergeFile({ branches, contents }) { + const ourName = branches[1]; + const theirName = branches[2]; + + const baseContent = contents[0]; + const ourContent = contents[1]; + const theirContent = contents[2]; + + const ours = ourContent.match(LINEBREAKS); + const base = baseContent.match(LINEBREAKS); + const theirs = theirContent.match(LINEBREAKS); + + // Here we let the diff3 library do the heavy lifting. + const result = diff3Merge(ours, base, theirs); + + const markerSize = 7; + + // Here we note whether there are conflicts and format the results + let mergedText = ''; + let cleanMerge = true; + + for (const item of result) { + if (item.ok) { + mergedText += item.ok.join(''); + } + if (item.conflict) { + cleanMerge = false; + mergedText += `${'<'.repeat(markerSize)} ${ourName}\n`; + mergedText += item.conflict.a.join(''); + + mergedText += `${'='.repeat(markerSize)}\n`; + mergedText += item.conflict.b.join(''); + mergedText += `${'>'.repeat(markerSize)} ${theirName}\n`; + } + } + return { cleanMerge, mergedText } +} + +// @ts-check + +/** + * Create a merged tree + * + * @param {Object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {object} args.cache + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.ourOid - The SHA-1 object id of our tree + * @param {string} args.baseOid - The SHA-1 object id of the base tree + * @param {string} args.theirOid - The SHA-1 object id of their tree + * @param {string} [args.ourName='ours'] - The name to use in conflicted files for our hunks + * @param {string} [args.baseName='base'] - The name to use in conflicted files (in diff3 format) for the base hunks + * @param {string} [args.theirName='theirs'] - The name to use in conflicted files for their hunks + * @param {boolean} [args.dryRun=false] + * @param {boolean} [args.abortOnConflict=false] + * @param {MergeDriverCallback} [args.mergeDriver] + * + * @returns {Promise} - The SHA-1 object id of the merged tree + * + */ +async function mergeTree({ + fs, + cache, + dir, + gitdir = join(dir, '.git'), + index, + ourOid, + baseOid, + theirOid, + ourName = 'ours', + baseName = 'base', + theirName = 'theirs', + dryRun = false, + abortOnConflict = true, + mergeDriver, +}) { + const ourTree = TREE({ ref: ourOid }); + const baseTree = TREE({ ref: baseOid }); + const theirTree = TREE({ ref: theirOid }); + + const unmergedFiles = []; + const bothModified = []; + const deleteByUs = []; + const deleteByTheirs = []; + + const results = await _walk({ + fs, + cache, + dir, + gitdir, + trees: [ourTree, baseTree, theirTree], + map: async function(filepath, [ours, base, theirs]) { + const path = basename(filepath); + // What we did, what they did + const ourChange = await modified(ours, base); + const theirChange = await modified(theirs, base); + switch (`${ourChange}-${theirChange}`) { + case 'false-false': { + return { + mode: await base.mode(), + path, + oid: await base.oid(), + type: await base.type(), + } + } + case 'false-true': { + return theirs + ? { + mode: await theirs.mode(), + path, + oid: await theirs.oid(), + type: await theirs.type(), + } + : undefined + } + case 'true-false': { + return ours + ? { + mode: await ours.mode(), + path, + oid: await ours.oid(), + type: await ours.type(), + } + : undefined + } + case 'true-true': { + // Modifications + if ( + ours && + base && + theirs && + (await ours.type()) === 'blob' && + (await base.type()) === 'blob' && + (await theirs.type()) === 'blob' + ) { + return mergeBlobs({ + fs, + gitdir, + path, + ours, + base, + theirs, + ourName, + baseName, + theirName, + mergeDriver, + }).then(async r => { + if (!r.cleanMerge) { + unmergedFiles.push(filepath); + bothModified.push(filepath); + if (!abortOnConflict) { + const baseOid = await base.oid(); + const ourOid = await ours.oid(); + const theirOid = await theirs.oid(); + + index.delete({ filepath }); + + index.insert({ filepath, oid: baseOid, stage: 1 }); + index.insert({ filepath, oid: ourOid, stage: 2 }); + index.insert({ filepath, oid: theirOid, stage: 3 }); + } + } else if (!abortOnConflict) { + index.insert({ filepath, oid: r.mergeResult.oid, stage: 0 }); + } + return r.mergeResult + }) + } + + // deleted by us + if ( + base && + !ours && + theirs && + (await base.type()) === 'blob' && + (await theirs.type()) === 'blob' + ) { + unmergedFiles.push(filepath); + deleteByUs.push(filepath); + if (!abortOnConflict) { + const baseOid = await base.oid(); + const theirOid = await theirs.oid(); + + index.delete({ filepath }); + + index.insert({ filepath, oid: baseOid, stage: 1 }); + index.insert({ filepath, oid: theirOid, stage: 3 }); + } + + return { + mode: await theirs.mode(), + oid: await theirs.oid(), + type: 'blob', + path, + } + } + + // deleted by theirs + if ( + base && + ours && + !theirs && + (await base.type()) === 'blob' && + (await ours.type()) === 'blob' + ) { + unmergedFiles.push(filepath); + deleteByTheirs.push(filepath); + if (!abortOnConflict) { + const baseOid = await base.oid(); + const ourOid = await ours.oid(); + + index.delete({ filepath }); + + index.insert({ filepath, oid: baseOid, stage: 1 }); + index.insert({ filepath, oid: ourOid, stage: 2 }); + } + + return { + mode: await ours.mode(), + oid: await ours.oid(), + type: 'blob', + path, + } + } + + // deleted by both + if (base && !ours && !theirs && (await base.type()) === 'blob') { + return undefined + } + + // all other types of conflicts fail + // TODO: Merge conflicts involving additions + throw new MergeNotSupportedError() + } + } + }, + /** + * @param {TreeEntry} [parent] + * @param {Array} children + */ + reduce: + unmergedFiles.length !== 0 && (!dir || abortOnConflict) + ? undefined + : async (parent, children) => { + const entries = children.filter(Boolean); // remove undefineds + + // if the parent was deleted, the children have to go + if (!parent) return + + // automatically delete directories if they have been emptied + if (parent && parent.type === 'tree' && entries.length === 0) return + + if (entries.length > 0) { + const tree = new GitTree(entries); + const object = tree.toObject(); + const oid = await _writeObject({ + fs, + gitdir, + type: 'tree', + object, + dryRun, + }); + parent.oid = oid; + } + return parent + }, + }); + + if (unmergedFiles.length !== 0) { + if (dir && !abortOnConflict) { + await _walk({ + fs, + cache, + dir, + gitdir, + trees: [TREE({ ref: results.oid })], + map: async function(filepath, [entry]) { + const path = `${dir}/${filepath}`; + if ((await entry.type()) === 'blob') { + const mode = await entry.mode(); + const content = new TextDecoder().decode(await entry.content()); + await fs.write(path, content, { mode }); + } + return true + }, + }); + } + return new MergeConflictError( + unmergedFiles, + bothModified, + deleteByUs, + deleteByTheirs + ) + } + + return results.oid +} + +/** + * + * @param {Object} args + * @param {import('../models/FileSystem').FileSystem} args.fs + * @param {string} args.gitdir + * @param {string} args.path + * @param {WalkerEntry} args.ours + * @param {WalkerEntry} args.base + * @param {WalkerEntry} args.theirs + * @param {string} [args.ourName] + * @param {string} [args.baseName] + * @param {string} [args.theirName] + * @param {boolean} [args.dryRun = false] + * @param {MergeDriverCallback} [args.mergeDriver] + * + */ +async function mergeBlobs({ + fs, + gitdir, + path, + ours, + base, + theirs, + ourName, + theirName, + baseName, + dryRun, + mergeDriver = mergeFile, +}) { + const type = 'blob'; + // Compute the new mode. + // Since there are ONLY two valid blob modes ('100755' and '100644') it boils down to this + const mode = + (await base.mode()) === (await ours.mode()) + ? await theirs.mode() + : await ours.mode(); + // The trivial case: nothing to merge except maybe mode + if ((await ours.oid()) === (await theirs.oid())) { + return { + cleanMerge: true, + mergeResult: { mode, path, oid: await ours.oid(), type }, + } + } + // if only one side made oid changes, return that side's oid + if ((await ours.oid()) === (await base.oid())) { + return { + cleanMerge: true, + mergeResult: { mode, path, oid: await theirs.oid(), type }, + } + } + if ((await theirs.oid()) === (await base.oid())) { + return { + cleanMerge: true, + mergeResult: { mode, path, oid: await ours.oid(), type }, + } + } + // if both sides made changes do a merge + const ourContent = Buffer.from(await ours.content()).toString('utf8'); + const baseContent = Buffer.from(await base.content()).toString('utf8'); + const theirContent = Buffer.from(await theirs.content()).toString('utf8'); + const { mergedText, cleanMerge } = await mergeDriver({ + branches: [baseName, ourName, theirName], + contents: [baseContent, ourContent, theirContent], + path, + }); + const oid = await _writeObject({ + fs, + gitdir, + type: 'blob', + object: Buffer.from(mergedText, 'utf8'), + dryRun, + }); + + return { cleanMerge, mergeResult: { mode, path, oid, type } } +} + +// @ts-check + +// import diff3 from 'node-diff3' +/** + * + * @typedef {Object} MergeResult - Returns an object with a schema like this: + * @property {string} [oid] - The SHA-1 object id that is now at the head of the branch. Absent only if `dryRun` was specified and `mergeCommit` is true. + * @property {boolean} [alreadyMerged] - True if the branch was already merged so no changes were made + * @property {boolean} [fastForward] - True if it was a fast-forward merge + * @property {boolean} [mergeCommit] - True if merge resulted in a merge commit + * @property {string} [tree] - The SHA-1 object id of the tree resulting from a merge commit + * + */ + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {object} args.cache + * @param {string} args.gitdir + * @param {string} [args.ours] + * @param {string} args.theirs + * @param {boolean} args.fastForward + * @param {boolean} args.fastForwardOnly + * @param {boolean} args.dryRun + * @param {boolean} args.noUpdateBranch + * @param {boolean} args.abortOnConflict + * @param {string} [args.message] + * @param {Object} args.author + * @param {string} args.author.name + * @param {string} args.author.email + * @param {number} args.author.timestamp + * @param {number} args.author.timezoneOffset + * @param {Object} args.committer + * @param {string} args.committer.name + * @param {string} args.committer.email + * @param {number} args.committer.timestamp + * @param {number} args.committer.timezoneOffset + * @param {string} [args.signingKey] + * @param {SignCallback} [args.onSign] - a PGP signing implementation + * @param {MergeDriverCallback} [args.mergeDriver] + * + * @returns {Promise} Resolves to a description of the merge operation + * + */ +async function _merge({ + fs, + cache, + dir, + gitdir, + ours, + theirs, + fastForward = true, + fastForwardOnly = false, + dryRun = false, + noUpdateBranch = false, + abortOnConflict = true, + message, + author, + committer, + signingKey, + onSign, + mergeDriver, +}) { + if (ours === undefined) { + ours = await _currentBranch({ fs, gitdir, fullname: true }); + } + ours = await GitRefManager.expand({ + fs, + gitdir, + ref: ours, + }); + theirs = await GitRefManager.expand({ + fs, + gitdir, + ref: theirs, + }); + const ourOid = await GitRefManager.resolve({ + fs, + gitdir, + ref: ours, + }); + const theirOid = await GitRefManager.resolve({ + fs, + gitdir, + ref: theirs, + }); + // find most recent common ancestor of ref a and ref b + const baseOids = await _findMergeBase({ + fs, + cache, + gitdir, + oids: [ourOid, theirOid], + }); + if (baseOids.length !== 1) { + // TODO: Recursive Merge strategy + throw new MergeNotSupportedError() + } + const baseOid = baseOids[0]; + // handle fast-forward case + if (baseOid === theirOid) { + return { + oid: ourOid, + alreadyMerged: true, + } + } + if (fastForward && baseOid === ourOid) { + if (!dryRun && !noUpdateBranch) { + await GitRefManager.writeRef({ fs, gitdir, ref: ours, value: theirOid }); + } + return { + oid: theirOid, + fastForward: true, + } + } else { + // not a simple fast-forward + if (fastForwardOnly) { + throw new FastForwardError() + } + // try a fancier merge + const tree = await GitIndexManager.acquire( + { fs, gitdir, cache, allowUnmerged: false }, + async index => { + return mergeTree({ + fs, + cache, + dir, + gitdir, + index, + ourOid, + theirOid, + baseOid, + ourName: abbreviateRef(ours), + baseName: 'base', + theirName: abbreviateRef(theirs), + dryRun, + abortOnConflict, + mergeDriver, + }) + } + ); + + // Defer throwing error until the index lock is relinquished and index is + // written to filsesystem + if (tree instanceof MergeConflictError) throw tree + + if (!message) { + message = `Merge branch '${abbreviateRef(theirs)}' into ${abbreviateRef( + ours + )}`; + } + const oid = await _commit({ + fs, + cache, + gitdir, + message, + ref: ours, + tree, + parent: [ourOid, theirOid], + author, + committer, + signingKey, + onSign, + dryRun, + noUpdateBranch, + }); + return { + oid, + tree, + mergeCommit: true, + } + } +} + +// @ts-check + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {object} args.cache + * @param {HttpClient} args.http + * @param {ProgressCallback} [args.onProgress] + * @param {MessageCallback} [args.onMessage] + * @param {AuthCallback} [args.onAuth] + * @param {AuthFailureCallback} [args.onAuthFailure] + * @param {AuthSuccessCallback} [args.onAuthSuccess] + * @param {string} args.dir + * @param {string} args.gitdir + * @param {string} args.ref + * @param {string} [args.url] + * @param {string} [args.remote] + * @param {string} [args.remoteRef] + * @param {boolean} [args.prune] + * @param {boolean} [args.pruneTags] + * @param {string} [args.corsProxy] + * @param {boolean} args.singleBranch + * @param {boolean} args.fastForward + * @param {boolean} args.fastForwardOnly + * @param {Object} [args.headers] + * @param {Object} args.author + * @param {string} args.author.name + * @param {string} args.author.email + * @param {number} args.author.timestamp + * @param {number} args.author.timezoneOffset + * @param {Object} args.committer + * @param {string} args.committer.name + * @param {string} args.committer.email + * @param {number} args.committer.timestamp + * @param {number} args.committer.timezoneOffset + * @param {string} [args.signingKey] + * + * @returns {Promise} Resolves successfully when pull operation completes + * + */ +async function _pull({ + fs, + cache, + http, + onProgress, + onMessage, + onAuth, + onAuthSuccess, + onAuthFailure, + dir, + gitdir, + ref, + url, + remote, + remoteRef, + prune, + pruneTags, + fastForward, + fastForwardOnly, + corsProxy, + singleBranch, + headers, + author, + committer, + signingKey, +}) { + try { + // If ref is undefined, use 'HEAD' + if (!ref) { + const head = await _currentBranch({ fs, gitdir }); + // TODO: use a better error. + if (!head) { + throw new MissingParameterError('ref') + } + ref = head; + } + + const { fetchHead, fetchHeadDescription } = await _fetch({ + fs, + cache, + http, + onProgress, + onMessage, + onAuth, + onAuthSuccess, + onAuthFailure, + gitdir, + corsProxy, + ref, + url, + remote, + remoteRef, + singleBranch, + headers, + prune, + pruneTags, + }); + // Merge the remote tracking branch into the local one. + await _merge({ + fs, + cache, + gitdir, + ours: ref, + theirs: fetchHead, + fastForward, + fastForwardOnly, + message: `Merge ${fetchHeadDescription}`, + author, + committer, + signingKey, + dryRun: false, + noUpdateBranch: false, + }); + await _checkout({ + fs, + cache, + onProgress, + dir, + gitdir, + ref, + remote, + noCheckout: false, + }); + } catch (err) { + err.caller = 'git.pull'; + throw err + } +} + +// @ts-check + +/** + * Like `pull`, but hard-coded with `fastForward: true` so there is no need for an `author` parameter. + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {HttpClient} args.http - an HTTP client + * @param {ProgressCallback} [args.onProgress] - optional progress event callback + * @param {MessageCallback} [args.onMessage] - optional message event callback + * @param {AuthCallback} [args.onAuth] - optional auth fill callback + * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback + * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback + * @param {string} args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} [args.ref] - Which branch to merge into. By default this is the currently checked out branch. + * @param {string} [args.url] - (Added in 1.1.0) The URL of the remote repository. The default is the value set in the git config for that remote. + * @param {string} [args.remote] - (Added in 1.1.0) If URL is not specified, determines which remote to use. + * @param {string} [args.remoteRef] - (Added in 1.1.0) The name of the branch on the remote to fetch. By default this is the configured remote tracking branch. + * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Overrides value in repo config. + * @param {boolean} [args.singleBranch = false] - Instead of the default behavior of fetching all the branches, only fetch a single branch. + * @param {Object} [args.headers] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves successfully when pull operation completes + * + * @example + * await git.fastForward({ + * fs, + * http, + * dir: '/tutorial', + * ref: 'main', + * singleBranch: true + * }) + * console.log('done') + * + */ +async function fastForward({ + fs, + http, + onProgress, + onMessage, + onAuth, + onAuthSuccess, + onAuthFailure, + dir, + gitdir = join(dir, '.git'), + ref, + url, + remote, + remoteRef, + corsProxy, + singleBranch, + headers = {}, + cache = {}, +}) { + try { + assertParameter('fs', fs); + assertParameter('http', http); + assertParameter('gitdir', gitdir); + + const thisWillNotBeUsed = { + name: '', + email: '', + timestamp: Date.now(), + timezoneOffset: 0, + }; + + return await _pull({ + fs: new FileSystem(fs), + cache, + http, + onProgress, + onMessage, + onAuth, + onAuthSuccess, + onAuthFailure, + dir, + gitdir, + ref, + url, + remote, + remoteRef, + fastForwardOnly: true, + corsProxy, + singleBranch, + headers, + author: thisWillNotBeUsed, + committer: thisWillNotBeUsed, + }) + } catch (err) { + err.caller = 'git.fastForward'; + throw err + } +} + +// @ts-check + +/** + * + * @typedef {object} FetchResult - The object returned has the following schema: + * @property {string | null} defaultBranch - The branch that is cloned if no branch is specified + * @property {string | null} fetchHead - The SHA-1 object id of the fetched head commit + * @property {string | null} fetchHeadDescription - a textual description of the branch that was fetched + * @property {Object} [headers] - The HTTP response headers returned by the git server + * @property {string[]} [pruned] - A list of branches that were pruned, if you provided the `prune` parameter + * + */ + +/** + * Fetch commits from a remote repository + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {HttpClient} args.http - an HTTP client + * @param {ProgressCallback} [args.onProgress] - optional progress event callback + * @param {MessageCallback} [args.onMessage] - optional message event callback + * @param {AuthCallback} [args.onAuth] - optional auth fill callback + * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback + * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} [args.url] - The URL of the remote repository. The default is the value set in the git config for that remote. + * @param {string} [args.remote] - If URL is not specified, determines which remote to use. + * @param {boolean} [args.singleBranch = false] - Instead of the default behavior of fetching all the branches, only fetch a single branch. + * @param {string} [args.ref] - Which branch to fetch if `singleBranch` is true. By default this is the current branch or the remote's default branch. + * @param {string} [args.remoteRef] - The name of the branch on the remote to fetch if `singleBranch` is true. By default this is the configured remote tracking branch. + * @param {boolean} [args.tags = false] - Also fetch tags + * @param {number} [args.depth] - Integer. Determines how much of the git repository's history to retrieve + * @param {boolean} [args.relative = false] - Changes the meaning of `depth` to be measured from the current shallow depth rather than from the branch tip. + * @param {Date} [args.since] - Only fetch commits created after the given date. Mutually exclusive with `depth`. + * @param {string[]} [args.exclude = []] - A list of branches or tags. Instructs the remote server not to send us any commits reachable from these refs. + * @param {boolean} [args.prune = false] - Delete local remote-tracking branches that are not present on the remote + * @param {boolean} [args.pruneTags = false] - Prune local tags that don’t exist on the remote, and force-update those tags that differ + * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Overrides value in repo config. + * @param {Object} [args.headers] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves successfully when fetch completes + * @see FetchResult + * + * @example + * let result = await git.fetch({ + * fs, + * http, + * dir: '/tutorial', + * corsProxy: 'https://cors.isomorphic-git.org', + * url: 'https://github.com/isomorphic-git/isomorphic-git', + * ref: 'main', + * depth: 1, + * singleBranch: true, + * tags: false + * }) + * console.log(result) + * + */ +async function fetch({ + fs, + http, + onProgress, + onMessage, + onAuth, + onAuthSuccess, + onAuthFailure, + dir, + gitdir = join(dir, '.git'), + ref, + remote, + remoteRef, + url, + corsProxy, + depth = null, + since = null, + exclude = [], + relative = false, + tags = false, + singleBranch = false, + headers = {}, + prune = false, + pruneTags = false, + cache = {}, +}) { + try { + assertParameter('fs', fs); + assertParameter('http', http); + assertParameter('gitdir', gitdir); + + return await _fetch({ + fs: new FileSystem(fs), + cache, + http, + onProgress, + onMessage, + onAuth, + onAuthSuccess, + onAuthFailure, + gitdir, + ref, + remote, + remoteRef, + url, + corsProxy, + depth, + since, + exclude, + relative, + tags, + singleBranch, + headers, + prune, + pruneTags, + }) + } catch (err) { + err.caller = 'git.fetch'; + throw err + } +} + +// @ts-check + +/** + * Find the merge base for a set of commits + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string[]} args.oids - Which commits + * @param {object} [args.cache] - a [cache](cache.md) object + * + */ +async function findMergeBase({ + fs, + dir, + gitdir = join(dir, '.git'), + oids, + cache = {}, +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('oids', oids); + + return await _findMergeBase({ + fs: new FileSystem(fs), + cache, + gitdir, + oids, + }) + } catch (err) { + err.caller = 'git.findMergeBase'; + throw err + } +} + +// @ts-check + +/** + * Find the root git directory + * + * Starting at `filepath`, walks upward until it finds a directory that contains a subdirectory called '.git'. + * + * @param {Object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {string} args.filepath + * + * @returns {Promise} Resolves successfully with a root git directory path + */ +async function _findRoot({ fs, filepath }) { + if (await fs.exists(join(filepath, '.git'))) { + return filepath + } else { + const parent = dirname(filepath); + if (parent === filepath) { + throw new NotFoundError(`git root for ${filepath}`) + } + return _findRoot({ fs, filepath: parent }) + } +} + +// @ts-check + +/** + * Find the root git directory + * + * Starting at `filepath`, walks upward until it finds a directory that contains a subdirectory called '.git'. + * + * @param {Object} args + * @param {FsClient} args.fs - a file system client + * @param {string} args.filepath - The file directory to start searching in. + * + * @returns {Promise} Resolves successfully with a root git directory path + * @throws {NotFoundError} + * + * @example + * let gitroot = await git.findRoot({ + * fs, + * filepath: '/tutorial/src/utils' + * }) + * console.log(gitroot) + * + */ +async function findRoot({ fs, filepath }) { + try { + assertParameter('fs', fs); + assertParameter('filepath', filepath); + + return await _findRoot({ fs: new FileSystem(fs), filepath }) + } catch (err) { + err.caller = 'git.findRoot'; + throw err + } +} + +// @ts-check + +/** + * Read an entry from the git config files. + * + * *Caveats:* + * - Currently only the local `$GIT_DIR/config` file can be read or written. However support for the global `~/.gitconfig` and system `$(prefix)/etc/gitconfig` will be added in the future. + * - The current parser does not support the more exotic features of the git-config file format such as `[include]` and `[includeIf]`. + * + * @param {Object} args + * @param {FsClient} args.fs - a file system implementation + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.path - The key of the git config entry + * + * @returns {Promise} Resolves with the config value + * + * @example + * // Read config value + * let value = await git.getConfig({ + * fs, + * dir: '/tutorial', + * path: 'remote.origin.url' + * }) + * console.log(value) + * + */ +async function getConfig({ fs, dir, gitdir = join(dir, '.git'), path }) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('path', path); + + return await _getConfig({ + fs: new FileSystem(fs), + gitdir, + path, + }) + } catch (err) { + err.caller = 'git.getConfig'; + throw err + } +} + +// @ts-check + +/** + * @param {Object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {string} args.gitdir + * @param {string} args.path + * + * @returns {Promise>} Resolves with an array of the config value + * + */ +async function _getConfigAll({ fs, gitdir, path }) { + const config = await GitConfigManager.get({ fs, gitdir }); + return config.getall(path) +} + +// @ts-check + +/** + * Read a multi-valued entry from the git config files. + * + * *Caveats:* + * - Currently only the local `$GIT_DIR/config` file can be read or written. However support for the global `~/.gitconfig` and system `$(prefix)/etc/gitconfig` will be added in the future. + * - The current parser does not support the more exotic features of the git-config file format such as `[include]` and `[includeIf]`. + * + * @param {Object} args + * @param {FsClient} args.fs - a file system implementation + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.path - The key of the git config entry + * + * @returns {Promise>} Resolves with the config value + * + */ +async function getConfigAll({ + fs, + dir, + gitdir = join(dir, '.git'), + path, +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('path', path); + + return await _getConfigAll({ + fs: new FileSystem(fs), + gitdir, + path, + }) + } catch (err) { + err.caller = 'git.getConfigAll'; + throw err + } +} + +// @ts-check + +/** + * + * @typedef {Object} GetRemoteInfoResult - The object returned has the following schema: + * @property {string[]} capabilities - The list of capabilities returned by the server (part of the Git protocol) + * @property {Object} [refs] + * @property {string} [HEAD] - The default branch of the remote + * @property {Object} [refs.heads] - The branches on the remote + * @property {Object} [refs.pull] - The special branches representing pull requests (non-standard) + * @property {Object} [refs.tags] - The tags on the remote + * + */ + +/** + * List a remote servers branches, tags, and capabilities. + * + * This is a rare command that doesn't require an `fs`, `dir`, or even `gitdir` argument. + * It just communicates to a remote git server, using the first step of the `git-upload-pack` handshake, but stopping short of fetching the packfile. + * + * @param {object} args + * @param {HttpClient} args.http - an HTTP client + * @param {AuthCallback} [args.onAuth] - optional auth fill callback + * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback + * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback + * @param {string} args.url - The URL of the remote repository. Will be gotten from gitconfig if absent. + * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Overrides value in repo config. + * @param {boolean} [args.forPush = false] - By default, the command queries the 'fetch' capabilities. If true, it will ask for the 'push' capabilities. + * @param {Object} [args.headers] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config + * + * @returns {Promise} Resolves successfully with an object listing the branches, tags, and capabilities of the remote. + * @see GetRemoteInfoResult + * + * @example + * let info = await git.getRemoteInfo({ + * http, + * url: + * "https://cors.isomorphic-git.org/github.com/isomorphic-git/isomorphic-git.git" + * }); + * console.log(info); + * + */ +async function getRemoteInfo({ + http, + onAuth, + onAuthSuccess, + onAuthFailure, + corsProxy, + url, + headers = {}, + forPush = false, +}) { + try { + assertParameter('http', http); + assertParameter('url', url); + + const GitRemoteHTTP = GitRemoteManager.getRemoteHelperFor({ url }); + const remote = await GitRemoteHTTP.discover({ + http, + onAuth, + onAuthSuccess, + onAuthFailure, + corsProxy, + service: forPush ? 'git-receive-pack' : 'git-upload-pack', + url, + headers, + protocolVersion: 1, + }); + + // Note: remote.capabilities, remote.refs, and remote.symrefs are Set and Map objects, + // but one of the objectives of the public API is to always return JSON-compatible objects + // so we must JSONify them. + const result = { + capabilities: [...remote.capabilities], + }; + // Convert the flat list into an object tree, because I figure 99% of the time + // that will be easier to use. + for (const [ref, oid] of remote.refs) { + const parts = ref.split('/'); + const last = parts.pop(); + let o = result; + for (const part of parts) { + o[part] = o[part] || {}; + o = o[part]; + } + o[last] = oid; + } + // Merge symrefs on top of refs to more closely match actual git repo layouts + for (const [symref, ref] of remote.symrefs) { + const parts = symref.split('/'); + const last = parts.pop(); + let o = result; + for (const part of parts) { + o[part] = o[part] || {}; + o = o[part]; + } + o[last] = ref; + } + return result + } catch (err) { + err.caller = 'git.getRemoteInfo'; + throw err + } +} + +// @ts-check + +/** + * @param {any} remote + * @param {string} prefix + * @param {boolean} symrefs + * @param {boolean} peelTags + * @returns {ServerRef[]} + */ +function formatInfoRefs(remote, prefix, symrefs, peelTags) { + const refs = []; + for (const [key, value] of remote.refs) { + if (prefix && !key.startsWith(prefix)) continue + + if (key.endsWith('^{}')) { + if (peelTags) { + const _key = key.replace('^{}', ''); + // Peeled tags are almost always listed immediately after the original tag + const last = refs[refs.length - 1]; + const r = last.ref === _key ? last : refs.find(x => x.ref === _key); + if (r === undefined) { + throw new Error('I did not expect this to happen') + } + r.peeled = value; + } + continue + } + /** @type ServerRef */ + const ref = { ref: key, oid: value }; + if (symrefs) { + if (remote.symrefs.has(key)) { + ref.target = remote.symrefs.get(key); + } + } + refs.push(ref); + } + return refs +} + +// @ts-check + +/** + * @typedef {Object} GetRemoteInfo2Result - This object has the following schema: + * @property {1 | 2} protocolVersion - Git protocol version the server supports + * @property {Object} capabilities - An object of capabilities represented as keys and values + * @property {ServerRef[]} [refs] - Server refs (they get returned by protocol version 1 whether you want them or not) + */ + +/** + * List a remote server's capabilities. + * + * This is a rare command that doesn't require an `fs`, `dir`, or even `gitdir` argument. + * It just communicates to a remote git server, determining what protocol version, commands, and features it supports. + * + * > The successor to [`getRemoteInfo`](./getRemoteInfo.md), this command supports Git Wire Protocol Version 2. + * > Therefore its return type is more complicated as either: + * > + * > - v1 capabilities (and refs) or + * > - v2 capabilities (and no refs) + * > + * > are returned. + * > If you just care about refs, use [`listServerRefs`](./listServerRefs.md) + * + * @param {object} args + * @param {HttpClient} args.http - an HTTP client + * @param {AuthCallback} [args.onAuth] - optional auth fill callback + * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback + * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback + * @param {string} args.url - The URL of the remote repository. Will be gotten from gitconfig if absent. + * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Overrides value in repo config. + * @param {boolean} [args.forPush = false] - By default, the command queries the 'fetch' capabilities. If true, it will ask for the 'push' capabilities. + * @param {Object} [args.headers] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config + * @param {1 | 2} [args.protocolVersion = 2] - Which version of the Git Protocol to use. + * + * @returns {Promise} Resolves successfully with an object listing the capabilities of the remote. + * @see GetRemoteInfo2Result + * @see ServerRef + * + * @example + * let info = await git.getRemoteInfo2({ + * http, + * corsProxy: "https://cors.isomorphic-git.org", + * url: "https://github.com/isomorphic-git/isomorphic-git.git" + * }); + * console.log(info); + * + */ +async function getRemoteInfo2({ + http, + onAuth, + onAuthSuccess, + onAuthFailure, + corsProxy, + url, + headers = {}, + forPush = false, + protocolVersion = 2, +}) { + try { + assertParameter('http', http); + assertParameter('url', url); + + const GitRemoteHTTP = GitRemoteManager.getRemoteHelperFor({ url }); + const remote = await GitRemoteHTTP.discover({ + http, + onAuth, + onAuthSuccess, + onAuthFailure, + corsProxy, + service: forPush ? 'git-receive-pack' : 'git-upload-pack', + url, + headers, + protocolVersion, + }); + + if (remote.protocolVersion === 2) { + /** @type GetRemoteInfo2Result */ + return { + protocolVersion: remote.protocolVersion, + capabilities: remote.capabilities2, + } + } + + // Note: remote.capabilities, remote.refs, and remote.symrefs are Set and Map objects, + // but one of the objectives of the public API is to always return JSON-compatible objects + // so we must JSONify them. + /** @type Object */ + const capabilities = {}; + for (const cap of remote.capabilities) { + const [key, value] = cap.split('='); + if (value) { + capabilities[key] = value; + } else { + capabilities[key] = true; + } + } + /** @type GetRemoteInfo2Result */ + return { + protocolVersion: 1, + capabilities, + refs: formatInfoRefs(remote, undefined, true, true), + } + } catch (err) { + err.caller = 'git.getRemoteInfo2'; + throw err + } +} + +async function hashObject({ + type, + object, + format = 'content', + oid = undefined, +}) { + if (format !== 'deflated') { + if (format !== 'wrapped') { + object = GitObject.wrap({ type, object }); + } + oid = await shasum(object); + } + return { oid, object } +} + +// @ts-check + +/** + * + * @typedef {object} HashBlobResult - The object returned has the following schema: + * @property {string} oid - The SHA-1 object id + * @property {'blob'} type - The type of the object + * @property {Uint8Array} object - The wrapped git object (the thing that is hashed) + * @property {'wrapped'} format - The format of the object + * + */ + +/** + * Compute what the SHA-1 object id of a file would be + * + * @param {object} args + * @param {Uint8Array|string} args.object - The object to write. If `object` is a String then it will be converted to a Uint8Array using UTF-8 encoding. + * + * @returns {Promise} Resolves successfully with the SHA-1 object id and the wrapped object Uint8Array. + * @see HashBlobResult + * + * @example + * let { oid, type, object, format } = await git.hashBlob({ + * object: 'Hello world!', + * }) + * + * console.log('oid', oid) + * console.log('type', type) + * console.log('object', object) + * console.log('format', format) + * + */ +async function hashBlob({ object }) { + try { + assertParameter('object', object); + + // Convert object to buffer + if (typeof object === 'string') { + object = Buffer.from(object, 'utf8'); + } else { + object = Buffer.from(object); + } + + const type = 'blob'; + const { oid, object: _object } = await hashObject({ + type: 'blob', + format: 'content', + object, + }); + return { oid, type, object: new Uint8Array(_object), format: 'wrapped' } + } catch (err) { + err.caller = 'git.hashBlob'; + throw err + } +} + +// @ts-check + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {any} args.cache + * @param {ProgressCallback} [args.onProgress] + * @param {string} args.dir + * @param {string} args.gitdir + * @param {string} args.filepath + * + * @returns {Promise<{oids: string[]}>} + */ +async function _indexPack({ + fs, + cache, + onProgress, + dir, + gitdir, + filepath, +}) { + try { + filepath = join(dir, filepath); + const pack = await fs.read(filepath); + const getExternalRefDelta = oid => _readObject({ fs, cache, gitdir, oid }); + const idx = await GitPackIndex.fromPack({ + pack, + getExternalRefDelta, + onProgress, + }); + await fs.write(filepath.replace(/\.pack$/, '.idx'), await idx.toBuffer()); + return { + oids: [...idx.hashes], + } + } catch (err) { + err.caller = 'git.indexPack'; + throw err + } +} + +// @ts-check + +/** + * Create the .idx file for a given .pack file + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {ProgressCallback} [args.onProgress] - optional progress event callback + * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.filepath - The path to the .pack file to index + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise<{oids: string[]}>} Resolves with a list of the SHA-1 object ids contained in the packfile + * + * @example + * let packfiles = await fs.promises.readdir('/tutorial/.git/objects/pack') + * packfiles = packfiles.filter(name => name.endsWith('.pack')) + * console.log('packfiles', packfiles) + * + * const { oids } = await git.indexPack({ + * fs, + * dir: '/tutorial', + * filepath: `.git/objects/pack/${packfiles[0]}`, + * async onProgress (evt) { + * console.log(`${evt.phase}: ${evt.loaded} / ${evt.total}`) + * } + * }) + * console.log(oids) + * + */ +async function indexPack({ + fs, + onProgress, + dir, + gitdir = join(dir, '.git'), + filepath, + cache = {}, +}) { + try { + assertParameter('fs', fs); + assertParameter('dir', dir); + assertParameter('gitdir', dir); + assertParameter('filepath', filepath); + + return await _indexPack({ + fs: new FileSystem(fs), + cache, + onProgress, + dir, + gitdir, + filepath, + }) + } catch (err) { + err.caller = 'git.indexPack'; + throw err + } +} + +// @ts-check + +/** + * Initialize a new repository + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {boolean} [args.bare = false] - Initialize a bare repository + * @param {string} [args.defaultBranch = 'master'] - The name of the default branch (might be changed to a required argument in 2.0.0) + * @returns {Promise} Resolves successfully when filesystem operations are complete + * + * @example + * await git.init({ fs, dir: '/tutorial' }) + * console.log('done') + * + */ +async function init({ + fs, + bare = false, + dir, + gitdir = bare ? dir : join(dir, '.git'), + defaultBranch = 'master', +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + if (!bare) { + assertParameter('dir', dir); + } + + return await _init({ + fs: new FileSystem(fs), + bare, + dir, + gitdir, + defaultBranch, + }) + } catch (err) { + err.caller = 'git.init'; + throw err + } +} + +// @ts-check + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {any} args.cache + * @param {string} args.gitdir + * @param {string} args.oid + * @param {string} args.ancestor + * @param {number} args.depth - Maximum depth to search before giving up. -1 means no maximum depth. + * + * @returns {Promise} + */ +async function _isDescendent({ + fs, + cache, + gitdir, + oid, + ancestor, + depth, +}) { + const shallows = await GitShallowManager.read({ fs, gitdir }); + if (!oid) { + throw new MissingParameterError('oid') + } + if (!ancestor) { + throw new MissingParameterError('ancestor') + } + // If you don't like this behavior, add your own check. + // Edge cases are hard to define a perfect solution. + if (oid === ancestor) return false + // We do not use recursion here, because that would lead to depth-first traversal, + // and we want to maintain a breadth-first traversal to avoid hitting shallow clone depth cutoffs. + const queue = [oid]; + const visited = new Set(); + let searchdepth = 0; + while (queue.length) { + if (searchdepth++ === depth) { + throw new MaxDepthError(depth) + } + const oid = queue.shift(); + const { type, object } = await _readObject({ + fs, + cache, + gitdir, + oid, + }); + if (type !== 'commit') { + throw new ObjectTypeError(oid, type, 'commit') + } + const commit = GitCommit.from(object).parse(); + // Are any of the parents the sought-after ancestor? + for (const parent of commit.parent) { + if (parent === ancestor) return true + } + // If not, add them to heads (unless we know this is a shallow commit) + if (!shallows.has(oid)) { + for (const parent of commit.parent) { + if (!visited.has(parent)) { + queue.push(parent); + visited.add(parent); + } + } + } + // Eventually, we'll travel entire tree to the roots where all the parents are empty arrays, + // or hit the shallow depth and throw an error. Excluding the possibility of grafts, or + // different branches cloned to different depths, you would hit this error at the same time + // for all parents, so trying to continue is futile. + } + return false +} + +// @ts-check + +/** + * Check whether a git commit is descended from another + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.oid - The descendent commit + * @param {string} args.ancestor - The (proposed) ancestor commit + * @param {number} [args.depth = -1] - Maximum depth to search before giving up. -1 means no maximum depth. + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves to true if `oid` is a descendent of `ancestor` + * + * @example + * let oid = await git.resolveRef({ fs, dir: '/tutorial', ref: 'main' }) + * let ancestor = await git.resolveRef({ fs, dir: '/tutorial', ref: 'v0.20.0' }) + * console.log(oid, ancestor) + * await git.isDescendent({ fs, dir: '/tutorial', oid, ancestor, depth: -1 }) + * + */ +async function isDescendent({ + fs, + dir, + gitdir = join(dir, '.git'), + oid, + ancestor, + depth = -1, + cache = {}, +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('oid', oid); + assertParameter('ancestor', ancestor); + + return await _isDescendent({ + fs: new FileSystem(fs), + cache, + gitdir, + oid, + ancestor, + depth, + }) + } catch (err) { + err.caller = 'git.isDescendent'; + throw err + } +} + +// @ts-check + +/** + * Test whether a filepath should be ignored (because of .gitignore or .git/exclude) + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.filepath - The filepath to test + * + * @returns {Promise} Resolves to true if the file should be ignored + * + * @example + * await git.isIgnored({ fs, dir: '/tutorial', filepath: 'docs/add.md' }) + * + */ +async function isIgnored({ + fs, + dir, + gitdir = join(dir, '.git'), + filepath, +}) { + try { + assertParameter('fs', fs); + assertParameter('dir', dir); + assertParameter('gitdir', gitdir); + assertParameter('filepath', filepath); + + return GitIgnoreManager.isIgnored({ + fs: new FileSystem(fs), + dir, + gitdir, + filepath, + }) + } catch (err) { + err.caller = 'git.isIgnored'; + throw err + } +} + +// @ts-check + +/** + * List branches + * + * By default it lists local branches. If a 'remote' is specified, it lists the remote's branches. When listing remote branches, the HEAD branch is not filtered out, so it may be included in the list of results. + * + * Note that specifying a remote does not actually contact the server and update the list of branches. + * If you want an up-to-date list, first do a `fetch` to that remote. + * (Which branch you fetch doesn't matter - the list of branches available on the remote is updated during the fetch handshake.) + * + * Also note, that a branch is a reference to a commit. If you initialize a new repository it has no commits, so the + * `listBranches` function will return an empty list, until you create the first commit. + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} [args.remote] - Instead of the branches in `refs/heads`, list the branches in `refs/remotes/${remote}`. + * + * @returns {Promise>} Resolves successfully with an array of branch names + * + * @example + * let branches = await git.listBranches({ fs, dir: '/tutorial' }) + * console.log(branches) + * let remoteBranches = await git.listBranches({ fs, dir: '/tutorial', remote: 'origin' }) + * console.log(remoteBranches) + * + */ +async function listBranches({ + fs, + dir, + gitdir = join(dir, '.git'), + remote, +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + + return GitRefManager.listBranches({ + fs: new FileSystem(fs), + gitdir, + remote, + }) + } catch (err) { + err.caller = 'git.listBranches'; + throw err + } +} + +// @ts-check + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {object} args.cache + * @param {string} args.gitdir + * @param {string} [args.ref] + * + * @returns {Promise>} + */ +async function _listFiles({ fs, gitdir, ref, cache }) { + if (ref) { + const oid = await GitRefManager.resolve({ gitdir, fs, ref }); + const filenames = []; + await accumulateFilesFromOid({ + fs, + cache, + gitdir, + oid, + filenames, + prefix: '', + }); + return filenames + } else { + return GitIndexManager.acquire({ fs, gitdir, cache }, async function( + index + ) { + return index.entries.map(x => x.path) + }) + } +} + +async function accumulateFilesFromOid({ + fs, + cache, + gitdir, + oid, + filenames, + prefix, +}) { + const { tree } = await _readTree({ fs, cache, gitdir, oid }); + // TODO: Use `walk` to do this. Should be faster. + for (const entry of tree) { + if (entry.type === 'tree') { + await accumulateFilesFromOid({ + fs, + cache, + gitdir, + oid: entry.oid, + filenames, + prefix: join(prefix, entry.path), + }); + } else { + filenames.push(join(prefix, entry.path)); + } + } +} + +// @ts-check + +/** + * List all the files in the git index or a commit + * + * > Note: This function is efficient for listing the files in the staging area, but listing all the files in a commit requires recursively walking through the git object store. + * > If you do not require a complete list of every file, better performance can be achieved by using [walk](./walk) and ignoring subdirectories you don't care about. + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} [args.ref] - Return a list of all the files in the commit at `ref` instead of the files currently in the git index (aka staging area) + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise>} Resolves successfully with an array of filepaths + * + * @example + * // All the files in the previous commit + * let files = await git.listFiles({ fs, dir: '/tutorial', ref: 'HEAD' }) + * console.log(files) + * // All the files in the current staging area + * files = await git.listFiles({ fs, dir: '/tutorial' }) + * console.log(files) + * + */ +async function listFiles({ + fs, + dir, + gitdir = join(dir, '.git'), + ref, + cache = {}, +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + + return await _listFiles({ + fs: new FileSystem(fs), + cache, + gitdir, + ref, + }) + } catch (err) { + err.caller = 'git.listFiles'; + throw err + } +} + +// @ts-check + +/** + * List all the object notes + * + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {any} args.cache + * @param {string} args.gitdir + * @param {string} args.ref + * + * @returns {Promise>} + */ + +async function _listNotes({ fs, cache, gitdir, ref }) { + // Get the current note commit + let parent; + try { + parent = await GitRefManager.resolve({ gitdir, fs, ref }); + } catch (err) { + if (err instanceof NotFoundError) { + return [] + } + } + + // Create the current note tree + const result = await _readTree({ + fs, + cache, + gitdir, + oid: parent, + }); + + // Format the tree entries + const notes = result.tree.map(entry => ({ + target: entry.path, + note: entry.oid, + })); + return notes +} + +// @ts-check + +/** + * List all the object notes + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} [args.ref] - The notes ref to look under + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise>} Resolves successfully with an array of entries containing SHA-1 object ids of the note and the object the note targets + */ + +async function listNotes({ + fs, + dir, + gitdir = join(dir, '.git'), + ref = 'refs/notes/commits', + cache = {}, +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('ref', ref); + + return await _listNotes({ + fs: new FileSystem(fs), + cache, + gitdir, + ref, + }) + } catch (err) { + err.caller = 'git.listNotes'; + throw err + } +} + +// @ts-check + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {string} args.gitdir + * + * @returns {Promise>} + */ +async function _listRemotes({ fs, gitdir }) { + const config = await GitConfigManager.get({ fs, gitdir }); + const remoteNames = await config.getSubsections('remote'); + const remotes = Promise.all( + remoteNames.map(async remote => { + const url = await config.get(`remote.${remote}.url`); + return { remote, url } + }) + ); + return remotes +} + +// @ts-check + +/** + * List remotes + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * + * @returns {Promise>} Resolves successfully with an array of `{remote, url}` objects + * + * @example + * let remotes = await git.listRemotes({ fs, dir: '/tutorial' }) + * console.log(remotes) + * + */ +async function listRemotes({ fs, dir, gitdir = join(dir, '.git') }) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + + return await _listRemotes({ + fs: new FileSystem(fs), + gitdir, + }) + } catch (err) { + err.caller = 'git.listRemotes'; + throw err + } +} + +/** + * @typedef {Object} ServerRef - This object has the following schema: + * @property {string} ref - The name of the ref + * @property {string} oid - The SHA-1 object id the ref points to + * @property {string} [target] - The target ref pointed to by a symbolic ref + * @property {string} [peeled] - If the oid is the SHA-1 object id of an annotated tag, this is the SHA-1 object id that the annotated tag points to + */ + +async function parseListRefsResponse(stream) { + const read = GitPktLine.streamReader(stream); + + // TODO: when we re-write everything to minimize memory usage, + // we could make this a generator + const refs = []; + + let line; + while (true) { + line = await read(); + if (line === true) break + if (line === null) continue + line = line.toString('utf8').replace(/\n$/, ''); + const [oid, ref, ...attrs] = line.split(' '); + const r = { ref, oid }; + for (const attr of attrs) { + const [name, value] = attr.split(':'); + if (name === 'symref-target') { + r.target = value; + } else if (name === 'peeled') { + r.peeled = value; + } + } + refs.push(r); + } + + return refs +} + +/** + * @param {object} args + * @param {string} [args.prefix] - Only list refs that start with this prefix + * @param {boolean} [args.symrefs = false] - Include symbolic ref targets + * @param {boolean} [args.peelTags = false] - Include peeled tags values + * @returns {Uint8Array[]} + */ +async function writeListRefsRequest({ prefix, symrefs, peelTags }) { + const packstream = []; + // command + packstream.push(GitPktLine.encode('command=ls-refs\n')); + // capability-list + packstream.push(GitPktLine.encode(`agent=${pkg.agent}\n`)); + // [command-args] + if (peelTags || symrefs || prefix) { + packstream.push(GitPktLine.delim()); + } + if (peelTags) packstream.push(GitPktLine.encode('peel')); + if (symrefs) packstream.push(GitPktLine.encode('symrefs')); + if (prefix) packstream.push(GitPktLine.encode(`ref-prefix ${prefix}`)); + packstream.push(GitPktLine.flush()); + return packstream +} + +// @ts-check + +/** + * Fetch a list of refs (branches, tags, etc) from a server. + * + * This is a rare command that doesn't require an `fs`, `dir`, or even `gitdir` argument. + * It just requires an `http` argument. + * + * ### About `protocolVersion` + * + * There's a rather fun trade-off between Git Protocol Version 1 and Git Protocol Version 2. + * Version 2 actually requires 2 HTTP requests instead of 1, making it similar to fetch or push in that regard. + * However, version 2 supports server-side filtering by prefix, whereas that filtering is done client-side in version 1. + * Which protocol is most efficient therefore depends on the number of refs on the remote, the latency of the server, and speed of the network connection. + * For an small repos (or fast Internet connections), the requirement to make two trips to the server makes protocol 2 slower. + * But for large repos (or slow Internet connections), the decreased payload size of the second request makes up for the additional request. + * + * Hard numbers vary by situation, but here's some numbers from my machine: + * + * Using isomorphic-git in a browser, with a CORS proxy, listing only the branches (refs/heads) of https://github.com/isomorphic-git/isomorphic-git + * - Protocol Version 1 took ~300ms and transferred 84 KB. + * - Protocol Version 2 took ~500ms and transferred 4.1 KB. + * + * Using isomorphic-git in a browser, with a CORS proxy, listing only the branches (refs/heads) of https://gitlab.com/gitlab-org/gitlab + * - Protocol Version 1 took ~4900ms and transferred 9.41 MB. + * - Protocol Version 2 took ~1280ms and transferred 433 KB. + * + * Finally, there is a fun quirk regarding the `symrefs` parameter. + * Protocol Version 1 will generally only return the `HEAD` symref and not others. + * Historically, this meant that servers don't use symbolic refs except for `HEAD`, which is used to point at the "default branch". + * However Protocol Version 2 can return *all* the symbolic refs on the server. + * So if you are running your own git server, you could take advantage of that I guess. + * + * #### TL;DR + * If you are _not_ taking advantage of `prefix` I would recommend `protocolVersion: 1`. + * Otherwise, I recommend to use the default which is `protocolVersion: 2`. + * + * @param {object} args + * @param {HttpClient} args.http - an HTTP client + * @param {AuthCallback} [args.onAuth] - optional auth fill callback + * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback + * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback + * @param {string} args.url - The URL of the remote repository. Will be gotten from gitconfig if absent. + * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Overrides value in repo config. + * @param {boolean} [args.forPush = false] - By default, the command queries the 'fetch' capabilities. If true, it will ask for the 'push' capabilities. + * @param {Object} [args.headers] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config + * @param {1 | 2} [args.protocolVersion = 2] - Which version of the Git Protocol to use. + * @param {string} [args.prefix] - Only list refs that start with this prefix + * @param {boolean} [args.symrefs = false] - Include symbolic ref targets + * @param {boolean} [args.peelTags = false] - Include annotated tag peeled targets + * + * @returns {Promise} Resolves successfully with an array of ServerRef objects + * @see ServerRef + * + * @example + * // List all the branches on a repo + * let refs = await git.listServerRefs({ + * http, + * corsProxy: "https://cors.isomorphic-git.org", + * url: "https://github.com/isomorphic-git/isomorphic-git.git", + * prefix: "refs/heads/", + * }); + * console.log(refs); + * + * @example + * // Get the default branch on a repo + * let refs = await git.listServerRefs({ + * http, + * corsProxy: "https://cors.isomorphic-git.org", + * url: "https://github.com/isomorphic-git/isomorphic-git.git", + * prefix: "HEAD", + * symrefs: true, + * }); + * console.log(refs); + * + * @example + * // List all the tags on a repo + * let refs = await git.listServerRefs({ + * http, + * corsProxy: "https://cors.isomorphic-git.org", + * url: "https://github.com/isomorphic-git/isomorphic-git.git", + * prefix: "refs/tags/", + * peelTags: true, + * }); + * console.log(refs); + * + * @example + * // List all the pull requests on a repo + * let refs = await git.listServerRefs({ + * http, + * corsProxy: "https://cors.isomorphic-git.org", + * url: "https://github.com/isomorphic-git/isomorphic-git.git", + * prefix: "refs/pull/", + * }); + * console.log(refs); + * + */ +async function listServerRefs({ + http, + onAuth, + onAuthSuccess, + onAuthFailure, + corsProxy, + url, + headers = {}, + forPush = false, + protocolVersion = 2, + prefix, + symrefs, + peelTags, +}) { + try { + assertParameter('http', http); + assertParameter('url', url); + + const remote = await GitRemoteHTTP.discover({ + http, + onAuth, + onAuthSuccess, + onAuthFailure, + corsProxy, + service: forPush ? 'git-receive-pack' : 'git-upload-pack', + url, + headers, + protocolVersion, + }); + + if (remote.protocolVersion === 1) { + return formatInfoRefs(remote, prefix, symrefs, peelTags) + } + + // Protocol Version 2 + const body = await writeListRefsRequest({ prefix, symrefs, peelTags }); + + const res = await GitRemoteHTTP.connect({ + http, + auth: remote.auth, + headers, + corsProxy, + service: forPush ? 'git-receive-pack' : 'git-upload-pack', + url, + body, + }); + + return parseListRefsResponse(res.body) + } catch (err) { + err.caller = 'git.listServerRefs'; + throw err + } +} + +// @ts-check + +/** + * List tags + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * + * @returns {Promise>} Resolves successfully with an array of tag names + * + * @example + * let tags = await git.listTags({ fs, dir: '/tutorial' }) + * console.log(tags) + * + */ +async function listTags({ fs, dir, gitdir = join(dir, '.git') }) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + return GitRefManager.listTags({ fs: new FileSystem(fs), gitdir }) + } catch (err) { + err.caller = 'git.listTags'; + throw err + } +} + +function compareAge(a, b) { + return a.committer.timestamp - b.committer.timestamp +} + +// @ts-check + +// the empty file content object id +const EMPTY_OID = 'e69de29bb2d1d6434b8b29ae775ad8c2e48c5391'; + +async function resolveFileIdInTree({ fs, cache, gitdir, oid, fileId }) { + if (fileId === EMPTY_OID) return + const _oid = oid; + let filepath; + const result = await resolveTree({ fs, cache, gitdir, oid }); + const tree = result.tree; + if (fileId === result.oid) { + filepath = result.path; + } else { + filepath = await _resolveFileId({ + fs, + cache, + gitdir, + tree, + fileId, + oid: _oid, + }); + if (Array.isArray(filepath)) { + if (filepath.length === 0) filepath = undefined; + else if (filepath.length === 1) filepath = filepath[0]; + } + } + return filepath +} + +async function _resolveFileId({ + fs, + cache, + gitdir, + tree, + fileId, + oid, + filepaths = [], + parentPath = '', +}) { + const walks = tree.entries().map(function(entry) { + let result; + if (entry.oid === fileId) { + result = join(parentPath, entry.path); + filepaths.push(result); + } else if (entry.type === 'tree') { + result = _readObject({ + fs, + cache, + gitdir, + oid: entry.oid, + }).then(function({ object }) { + return _resolveFileId({ + fs, + cache, + gitdir, + tree: GitTree.from(object), + fileId, + oid, + filepaths, + parentPath: join(parentPath, entry.path), + }) + }); + } + return result + }); + + await Promise.all(walks); + return filepaths +} + +// @ts-check + +/** + * Get commit descriptions from the git history + * + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {any} args.cache + * @param {string} args.gitdir + * @param {string=} args.filepath optional get the commit for the filepath only + * @param {string} args.ref + * @param {number|void} args.depth + * @param {boolean=} [args.force=false] do not throw error if filepath is not exist (works only for a single file). defaults to false + * @param {boolean=} [args.follow=false] Continue listing the history of a file beyond renames (works only for a single file). defaults to false + * @param {boolean=} args.follow Continue listing the history of a file beyond renames (works only for a single file). defaults to false + * + * @returns {Promise>} Resolves to an array of ReadCommitResult objects + * @see ReadCommitResult + * @see CommitObject + * + * @example + * let commits = await git.log({ dir: '$input((/))', depth: $input((5)), ref: '$input((master))' }) + * console.log(commits) + * + */ +async function _log({ + fs, + cache, + gitdir, + filepath, + ref, + depth, + since, + force, + follow, +}) { + const sinceTimestamp = + typeof since === 'undefined' + ? undefined + : Math.floor(since.valueOf() / 1000); + // TODO: In the future, we may want to have an API where we return a + // async iterator that emits commits. + const commits = []; + const shallowCommits = await GitShallowManager.read({ fs, gitdir }); + const oid = await GitRefManager.resolve({ fs, gitdir, ref }); + const tips = [await _readCommit({ fs, cache, gitdir, oid })]; + let lastFileOid; + let lastCommit; + let isOk; + + function endCommit(commit) { + if (isOk && filepath) commits.push(commit); + } + + while (tips.length > 0) { + const commit = tips.pop(); + + // Stop the log if we've hit the age limit + if ( + sinceTimestamp !== undefined && + commit.commit.committer.timestamp <= sinceTimestamp + ) { + break + } + + if (filepath) { + let vFileOid; + try { + vFileOid = await resolveFilepath({ + fs, + cache, + gitdir, + oid: commit.commit.tree, + filepath, + }); + if (lastCommit && lastFileOid !== vFileOid) { + commits.push(lastCommit); + } + lastFileOid = vFileOid; + lastCommit = commit; + isOk = true; + } catch (e) { + if (e instanceof NotFoundError) { + let found = follow && lastFileOid; + if (found) { + found = await resolveFileIdInTree({ + fs, + cache, + gitdir, + oid: commit.commit.tree, + fileId: lastFileOid, + }); + if (found) { + if (Array.isArray(found)) { + if (lastCommit) { + const lastFound = await resolveFileIdInTree({ + fs, + cache, + gitdir, + oid: lastCommit.commit.tree, + fileId: lastFileOid, + }); + if (Array.isArray(lastFound)) { + found = found.filter(p => lastFound.indexOf(p) === -1); + if (found.length === 1) { + found = found[0]; + filepath = found; + if (lastCommit) commits.push(lastCommit); + } else { + found = false; + if (lastCommit) commits.push(lastCommit); + break + } + } + } + } else { + filepath = found; + if (lastCommit) commits.push(lastCommit); + } + } + } + if (!found) { + if (isOk && lastFileOid) { + commits.push(lastCommit); + if (!force) break + } + if (!force && !follow) throw e + } + lastCommit = commit; + isOk = false; + } else throw e + } + } else { + commits.push(commit); + } + + // Stop the loop if we have enough commits now. + if (depth !== undefined && commits.length === depth) { + endCommit(commit); + break + } + + // If this is not a shallow commit... + if (!shallowCommits.has(commit.oid)) { + // Add the parents of this commit to the queue + // Note: for the case of a commit with no parents, it will concat an empty array, having no net effect. + for (const oid of commit.commit.parent) { + const commit = await _readCommit({ fs, cache, gitdir, oid }); + if (!tips.map(commit => commit.oid).includes(commit.oid)) { + tips.push(commit); + } + } + } + + // Stop the loop if there are no more commit parents + if (tips.length === 0) { + endCommit(commit); + } + + // Process tips in order by age + tips.sort((a, b) => compareAge(a.commit, b.commit)); + } + return commits +} + +// @ts-check + +/** + * Get commit descriptions from the git history + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string=} args.filepath optional get the commit for the filepath only + * @param {string} [args.ref = 'HEAD'] - The commit to begin walking backwards through the history from + * @param {number=} [args.depth] - Limit the number of commits returned. No limit by default. + * @param {Date} [args.since] - Return history newer than the given date. Can be combined with `depth` to get whichever is shorter. + * @param {boolean=} [args.force=false] do not throw error if filepath is not exist (works only for a single file). defaults to false + * @param {boolean=} [args.follow=false] Continue listing the history of a file beyond renames (works only for a single file). defaults to false + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise>} Resolves to an array of ReadCommitResult objects + * @see ReadCommitResult + * @see CommitObject + * + * @example + * let commits = await git.log({ + * fs, + * dir: '/tutorial', + * depth: 5, + * ref: 'main' + * }) + * console.log(commits) + * + */ +async function log({ + fs, + dir, + gitdir = join(dir, '.git'), + filepath, + ref = 'HEAD', + depth, + since, // Date + force, + follow, + cache = {}, +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('ref', ref); + + return await _log({ + fs: new FileSystem(fs), + cache, + gitdir, + filepath, + ref, + depth, + since, + force, + follow, + }) + } catch (err) { + err.caller = 'git.log'; + throw err + } +} + +// @ts-check + +/** + * + * @typedef {Object} MergeResult - Returns an object with a schema like this: + * @property {string} [oid] - The SHA-1 object id that is now at the head of the branch. Absent only if `dryRun` was specified and `mergeCommit` is true. + * @property {boolean} [alreadyMerged] - True if the branch was already merged so no changes were made + * @property {boolean} [fastForward] - True if it was a fast-forward merge + * @property {boolean} [mergeCommit] - True if merge resulted in a merge commit + * @property {string} [tree] - The SHA-1 object id of the tree resulting from a merge commit + * + */ + +/** + * Merge two branches + * + * Currently it will fail if multiple candidate merge bases are found. (It doesn't yet implement the recursive merge strategy.) + * + * Currently it does not support selecting alternative merge strategies. + * + * Currently it is not possible to abort an incomplete merge. To restore the worktree to a clean state, you will need to checkout an earlier commit. + * + * Currently it does not directly support the behavior of `git merge --continue`. To complete a merge after manual conflict resolution, you will need to add and commit the files manually, and specify the appropriate parent commits. + * + * ## Manually resolving merge conflicts + * By default, if isomorphic-git encounters a merge conflict it cannot resolve using the builtin diff3 algorithm or provided merge driver, it will abort and throw a `MergeNotSupportedError`. + * This leaves the index and working tree untouched. + * + * When `abortOnConflict` is set to `false`, and a merge conflict cannot be automatically resolved, a `MergeConflictError` is thrown and the results of the incomplete merge will be written to the working directory. + * This includes conflict markers in files with unresolved merge conflicts. + * + * To complete the merge, edit the conflicting files as you see fit, and then add and commit the resolved merge. + * + * For a proper merge commit, be sure to specify the branches or commits you are merging in the `parent` argument to `git.commit`. + * For example, say we are merging the branch `feature` into the branch `main` and there is a conflict we want to resolve manually. + * The flow would look like this: + * + * ``` + * await git.merge({ + * fs, + * dir, + * ours: 'main', + * theirs: 'feature', + * abortOnConflict: false, + * }).catch(e => { + * if (e instanceof Errors.MergeConflictError) { + * console.log( + * 'Automatic merge failed for the following files: ' + * + `${e.data}. ` + * + 'Resolve these conflicts and then commit your changes.' + * ) + * } else throw e + * }) + * + * // This is the where we manually edit the files that have been written to the working directory + * // ... + * // Files have been edited and we are ready to commit + * + * await git.add({ + * fs, + * dir, + * filepath: '.', + * }) + * + * await git.commit({ + * fs, + * dir, + * ref: 'main', + * message: "Merge branch 'feature' into main", + * parent: ['main', 'feature'], // Be sure to specify the parents when creating a merge commit + * }) + * ``` + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {SignCallback} [args.onSign] - a PGP signing implementation + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} [args.ours] - The branch receiving the merge. If undefined, defaults to the current branch. + * @param {string} args.theirs - The branch to be merged + * @param {boolean} [args.fastForward = true] - If false, create a merge commit in all cases. + * @param {boolean} [args.fastForwardOnly = false] - If true, then non-fast-forward merges will throw an Error instead of performing a merge. + * @param {boolean} [args.dryRun = false] - If true, simulates a merge so you can test whether it would succeed. + * @param {boolean} [args.noUpdateBranch = false] - If true, does not update the branch pointer after creating the commit. + * @param {boolean} [args.abortOnConflict = true] - If true, merges with conflicts will not update the worktree or index. + * @param {string} [args.message] - Overrides the default auto-generated merge commit message + * @param {Object} [args.author] - passed to [commit](commit.md) when creating a merge commit + * @param {string} [args.author.name] - Default is `user.name` config. + * @param {string} [args.author.email] - Default is `user.email` config. + * @param {number} [args.author.timestamp=Math.floor(Date.now()/1000)] - Set the author timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00). + * @param {number} [args.author.timezoneOffset] - Set the author timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`. + * @param {Object} [args.committer] - passed to [commit](commit.md) when creating a merge commit + * @param {string} [args.committer.name] - Default is `user.name` config. + * @param {string} [args.committer.email] - Default is `user.email` config. + * @param {number} [args.committer.timestamp=Math.floor(Date.now()/1000)] - Set the committer timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00). + * @param {number} [args.committer.timezoneOffset] - Set the committer timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`. + * @param {string} [args.signingKey] - passed to [commit](commit.md) when creating a merge commit + * @param {object} [args.cache] - a [cache](cache.md) object + * @param {MergeDriverCallback} [args.mergeDriver] - a [merge driver](mergeDriver.md) implementation + * + * @returns {Promise} Resolves to a description of the merge operation + * @see MergeResult + * + * @example + * let m = await git.merge({ + * fs, + * dir: '/tutorial', + * ours: 'main', + * theirs: 'remotes/origin/main' + * }) + * console.log(m) + * + */ +async function merge({ + fs: _fs, + onSign, + dir, + gitdir = join(dir, '.git'), + ours, + theirs, + fastForward = true, + fastForwardOnly = false, + dryRun = false, + noUpdateBranch = false, + abortOnConflict = true, + message, + author: _author, + committer: _committer, + signingKey, + cache = {}, + mergeDriver, +}) { + try { + assertParameter('fs', _fs); + if (signingKey) { + assertParameter('onSign', onSign); + } + const fs = new FileSystem(_fs); + + const author = await normalizeAuthorObject({ fs, gitdir, author: _author }); + if (!author && (!fastForwardOnly || !fastForward)) { + throw new MissingNameError('author') + } + + const committer = await normalizeCommitterObject({ + fs, + gitdir, + author, + committer: _committer, + }); + if (!committer && (!fastForwardOnly || !fastForward)) { + throw new MissingNameError('committer') + } + + return await _merge({ + fs, + cache, + dir, + gitdir, + ours, + theirs, + fastForward, + fastForwardOnly, + dryRun, + noUpdateBranch, + abortOnConflict, + message, + author, + committer, + signingKey, + onSign, + mergeDriver, + }) + } catch (err) { + err.caller = 'git.merge'; + throw err + } +} + +/** + * @enum {number} + */ +const types = { + commit: 0b0010000, + tree: 0b0100000, + blob: 0b0110000, + tag: 0b1000000, + ofs_delta: 0b1100000, + ref_delta: 0b1110000, +}; + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {any} args.cache + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string[]} args.oids + */ +async function _pack({ + fs, + cache, + dir, + gitdir = join(dir, '.git'), + oids, +}) { + const hash = new Hash(); + const outputStream = []; + function write(chunk, enc) { + const buff = Buffer.from(chunk, enc); + outputStream.push(buff); + hash.update(buff); + } + async function writeObject({ stype, object }) { + // Object type is encoded in bits 654 + const type = types[stype]; + // The length encoding gets complicated. + let length = object.length; + // Whether the next byte is part of the variable-length encoded number + // is encoded in bit 7 + let multibyte = length > 0b1111 ? 0b10000000 : 0b0; + // Last four bits of length is encoded in bits 3210 + const lastFour = length & 0b1111; + // Discard those bits + length = length >>> 4; + // The first byte is then (1-bit multibyte?), (3-bit type), (4-bit least sig 4-bits of length) + let byte = (multibyte | type | lastFour).toString(16); + write(byte, 'hex'); + // Now we keep chopping away at length 7-bits at a time until its zero, + // writing out the bytes in what amounts to little-endian order. + while (multibyte) { + multibyte = length > 0b01111111 ? 0b10000000 : 0b0; + byte = multibyte | (length & 0b01111111); + write(padHex(2, byte), 'hex'); + length = length >>> 7; + } + // Lastly, we can compress and write the object. + write(Buffer.from(await deflate(object))); + } + write('PACK'); + write('00000002', 'hex'); + // Write a 4 byte (32-bit) int + write(padHex(8, oids.length), 'hex'); + for (const oid of oids) { + const { type, object } = await _readObject({ fs, cache, gitdir, oid }); + await writeObject({ write, object, stype: type }); + } + // Write SHA1 checksum + const digest = hash.digest(); + outputStream.push(digest); + return outputStream +} + +// @ts-check + +/** + * + * @typedef {Object} PackObjectsResult The packObjects command returns an object with two properties: + * @property {string} filename - The suggested filename for the packfile if you want to save it to disk somewhere. It includes the packfile SHA. + * @property {Uint8Array} [packfile] - The packfile contents. Not present if `write` parameter was true, in which case the packfile was written straight to disk. + */ + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {any} args.cache + * @param {string} args.gitdir + * @param {string[]} args.oids + * @param {boolean} args.write + * + * @returns {Promise} + * @see PackObjectsResult + */ +async function _packObjects({ fs, cache, gitdir, oids, write }) { + const buffers = await _pack({ fs, cache, gitdir, oids }); + const packfile = Buffer.from(await collect(buffers)); + const packfileSha = packfile.slice(-20).toString('hex'); + const filename = `pack-${packfileSha}.pack`; + if (write) { + await fs.write(join(gitdir, `objects/pack/${filename}`), packfile); + return { filename } + } + return { + filename, + packfile: new Uint8Array(packfile), + } +} + +// @ts-check + +/** + * + * @typedef {Object} PackObjectsResult The packObjects command returns an object with two properties: + * @property {string} filename - The suggested filename for the packfile if you want to save it to disk somewhere. It includes the packfile SHA. + * @property {Uint8Array} [packfile] - The packfile contents. Not present if `write` parameter was true, in which case the packfile was written straight to disk. + */ + +/** + * Create a packfile from an array of SHA-1 object ids + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string[]} args.oids - An array of SHA-1 object ids to be included in the packfile + * @param {boolean} [args.write = false] - Whether to save the packfile to disk or not + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves successfully when the packfile is ready with the filename and buffer + * @see PackObjectsResult + * + * @example + * // Create a packfile containing only an empty tree + * let { packfile } = await git.packObjects({ + * fs, + * dir: '/tutorial', + * oids: ['4b825dc642cb6eb9a060e54bf8d69288fbee4904'] + * }) + * console.log(packfile) + * + */ +async function packObjects({ + fs, + dir, + gitdir = join(dir, '.git'), + oids, + write = false, + cache = {}, +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('oids', oids); + + return await _packObjects({ + fs: new FileSystem(fs), + cache, + gitdir, + oids, + write, + }) + } catch (err) { + err.caller = 'git.packObjects'; + throw err + } +} + +// @ts-check + +/** + * Fetch and merge commits from a remote repository + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {HttpClient} args.http - an HTTP client + * @param {ProgressCallback} [args.onProgress] - optional progress event callback + * @param {MessageCallback} [args.onMessage] - optional message event callback + * @param {AuthCallback} [args.onAuth] - optional auth fill callback + * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback + * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback + * @param {string} args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} [args.ref] - Which branch to merge into. By default this is the currently checked out branch. + * @param {string} [args.url] - (Added in 1.1.0) The URL of the remote repository. The default is the value set in the git config for that remote. + * @param {string} [args.remote] - (Added in 1.1.0) If URL is not specified, determines which remote to use. + * @param {string} [args.remoteRef] - (Added in 1.1.0) The name of the branch on the remote to fetch. By default this is the configured remote tracking branch. + * @param {boolean} [args.prune = false] - Delete local remote-tracking branches that are not present on the remote + * @param {boolean} [args.pruneTags = false] - Prune local tags that don’t exist on the remote, and force-update those tags that differ + * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Overrides value in repo config. + * @param {boolean} [args.singleBranch = false] - Instead of the default behavior of fetching all the branches, only fetch a single branch. + * @param {boolean} [args.fastForward = true] - If false, only create merge commits. + * @param {boolean} [args.fastForwardOnly = false] - Only perform simple fast-forward merges. (Don't create merge commits.) + * @param {Object} [args.headers] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config + * @param {Object} [args.author] - The details about the author. + * @param {string} [args.author.name] - Default is `user.name` config. + * @param {string} [args.author.email] - Default is `user.email` config. + * @param {number} [args.author.timestamp=Math.floor(Date.now()/1000)] - Set the author timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00). + * @param {number} [args.author.timezoneOffset] - Set the author timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`. + * @param {Object} [args.committer = author] - The details about the commit committer, in the same format as the author parameter. If not specified, the author details are used. + * @param {string} [args.committer.name] - Default is `user.name` config. + * @param {string} [args.committer.email] - Default is `user.email` config. + * @param {number} [args.committer.timestamp=Math.floor(Date.now()/1000)] - Set the committer timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00). + * @param {number} [args.committer.timezoneOffset] - Set the committer timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`. + * @param {string} [args.signingKey] - passed to [commit](commit.md) when creating a merge commit + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves successfully when pull operation completes + * + * @example + * await git.pull({ + * fs, + * http, + * dir: '/tutorial', + * ref: 'main', + * singleBranch: true + * }) + * console.log('done') + * + */ +async function pull({ + fs: _fs, + http, + onProgress, + onMessage, + onAuth, + onAuthSuccess, + onAuthFailure, + dir, + gitdir = join(dir, '.git'), + ref, + url, + remote, + remoteRef, + prune = false, + pruneTags = false, + fastForward = true, + fastForwardOnly = false, + corsProxy, + singleBranch, + headers = {}, + author: _author, + committer: _committer, + signingKey, + cache = {}, +}) { + try { + assertParameter('fs', _fs); + assertParameter('gitdir', gitdir); + + const fs = new FileSystem(_fs); + + const author = await normalizeAuthorObject({ fs, gitdir, author: _author }); + if (!author) throw new MissingNameError('author') + + const committer = await normalizeCommitterObject({ + fs, + gitdir, + author, + committer: _committer, + }); + if (!committer) throw new MissingNameError('committer') + + return await _pull({ + fs, + cache, + http, + onProgress, + onMessage, + onAuth, + onAuthSuccess, + onAuthFailure, + dir, + gitdir, + ref, + url, + remote, + remoteRef, + fastForward, + fastForwardOnly, + corsProxy, + singleBranch, + headers, + author, + committer, + signingKey, + prune, + pruneTags, + }) + } catch (err) { + err.caller = 'git.pull'; + throw err + } +} + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {any} args.cache + * @param {string} [args.dir] + * @param {string} args.gitdir + * @param {Iterable} args.start + * @param {Iterable} args.finish + * @returns {Promise>} + */ +async function listCommitsAndTags({ + fs, + cache, + dir, + gitdir = join(dir, '.git'), + start, + finish, +}) { + const shallows = await GitShallowManager.read({ fs, gitdir }); + const startingSet = new Set(); + const finishingSet = new Set(); + for (const ref of start) { + startingSet.add(await GitRefManager.resolve({ fs, gitdir, ref })); + } + for (const ref of finish) { + // We may not have these refs locally so we must try/catch + try { + const oid = await GitRefManager.resolve({ fs, gitdir, ref }); + finishingSet.add(oid); + } catch (err) {} + } + const visited = new Set(); + // Because git commits are named by their hash, there is no + // way to construct a cycle. Therefore we won't worry about + // setting a default recursion limit. + async function walk(oid) { + visited.add(oid); + const { type, object } = await _readObject({ fs, cache, gitdir, oid }); + // Recursively resolve annotated tags + if (type === 'tag') { + const tag = GitAnnotatedTag.from(object); + const commit = tag.headers().object; + return walk(commit) + } + if (type !== 'commit') { + throw new ObjectTypeError(oid, type, 'commit') + } + if (!shallows.has(oid)) { + const commit = GitCommit.from(object); + const parents = commit.headers().parent; + for (oid of parents) { + if (!finishingSet.has(oid) && !visited.has(oid)) { + await walk(oid); + } + } + } + } + // Let's go walking! + for (const oid of startingSet) { + await walk(oid); + } + return visited +} + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {any} args.cache + * @param {string} [args.dir] + * @param {string} args.gitdir + * @param {Iterable} args.oids + * @returns {Promise>} + */ +async function listObjects({ + fs, + cache, + dir, + gitdir = join(dir, '.git'), + oids, +}) { + const visited = new Set(); + // We don't do the purest simplest recursion, because we can + // avoid reading Blob objects entirely since the Tree objects + // tell us which oids are Blobs and which are Trees. + async function walk(oid) { + if (visited.has(oid)) return + visited.add(oid); + const { type, object } = await _readObject({ fs, cache, gitdir, oid }); + if (type === 'tag') { + const tag = GitAnnotatedTag.from(object); + const obj = tag.headers().object; + await walk(obj); + } else if (type === 'commit') { + const commit = GitCommit.from(object); + const tree = commit.headers().tree; + await walk(tree); + } else if (type === 'tree') { + const tree = GitTree.from(object); + for (const entry of tree) { + // add blobs to the set + // skip over submodules whose type is 'commit' + if (entry.type === 'blob') { + visited.add(entry.oid); + } + // recurse for trees + if (entry.type === 'tree') { + await walk(entry.oid); + } + } + } + } + // Let's go walking! + for (const oid of oids) { + await walk(oid); + } + return visited +} + +async function parseReceivePackResponse(packfile) { + /** @type PushResult */ + const result = {}; + let response = ''; + const read = GitPktLine.streamReader(packfile); + let line = await read(); + while (line !== true) { + if (line !== null) response += line.toString('utf8') + '\n'; + line = await read(); + } + + const lines = response.toString('utf8').split('\n'); + // We're expecting "unpack {unpack-result}" + line = lines.shift(); + if (!line.startsWith('unpack ')) { + throw new ParseError('unpack ok" or "unpack [error message]', line) + } + result.ok = line === 'unpack ok'; + if (!result.ok) { + result.error = line.slice('unpack '.length); + } + result.refs = {}; + for (const line of lines) { + if (line.trim() === '') continue + const status = line.slice(0, 2); + const refAndMessage = line.slice(3); + let space = refAndMessage.indexOf(' '); + if (space === -1) space = refAndMessage.length; + const ref = refAndMessage.slice(0, space); + const error = refAndMessage.slice(space + 1); + result.refs[ref] = { + ok: status === 'ok', + error, + }; + } + return result +} + +async function writeReceivePackRequest({ + capabilities = [], + triplets = [], +}) { + const packstream = []; + let capsFirstLine = `\x00 ${capabilities.join(' ')}`; + for (const trip of triplets) { + packstream.push( + GitPktLine.encode( + `${trip.oldoid} ${trip.oid} ${trip.fullRef}${capsFirstLine}\n` + ) + ); + capsFirstLine = ''; + } + packstream.push(GitPktLine.flush()); + return packstream +} + +// @ts-check + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {any} args.cache + * @param {HttpClient} args.http + * @param {ProgressCallback} [args.onProgress] + * @param {MessageCallback} [args.onMessage] + * @param {AuthCallback} [args.onAuth] + * @param {AuthFailureCallback} [args.onAuthFailure] + * @param {AuthSuccessCallback} [args.onAuthSuccess] + * @param {PrePushCallback} [args.onPrePush] + * @param {string} args.gitdir + * @param {string} [args.ref] + * @param {string} [args.remoteRef] + * @param {string} [args.remote] + * @param {boolean} [args.force = false] + * @param {boolean} [args.delete = false] + * @param {string} [args.url] + * @param {string} [args.corsProxy] + * @param {Object} [args.headers] + * + * @returns {Promise} + */ +async function _push({ + fs, + cache, + http, + onProgress, + onMessage, + onAuth, + onAuthSuccess, + onAuthFailure, + onPrePush, + gitdir, + ref: _ref, + remoteRef: _remoteRef, + remote, + url: _url, + force = false, + delete: _delete = false, + corsProxy, + headers = {}, +}) { + const ref = _ref || (await _currentBranch({ fs, gitdir })); + if (typeof ref === 'undefined') { + throw new MissingParameterError('ref') + } + const config = await GitConfigManager.get({ fs, gitdir }); + // Figure out what remote to use. + remote = + remote || + (await config.get(`branch.${ref}.pushRemote`)) || + (await config.get('remote.pushDefault')) || + (await config.get(`branch.${ref}.remote`)) || + 'origin'; + // Lookup the URL for the given remote. + const url = + _url || + (await config.get(`remote.${remote}.pushurl`)) || + (await config.get(`remote.${remote}.url`)); + if (typeof url === 'undefined') { + throw new MissingParameterError('remote OR url') + } + // Figure out what remote ref to use. + const remoteRef = _remoteRef || (await config.get(`branch.${ref}.merge`)); + if (typeof url === 'undefined') { + throw new MissingParameterError('remoteRef') + } + + if (corsProxy === undefined) { + corsProxy = await config.get('http.corsProxy'); + } + + const fullRef = await GitRefManager.expand({ fs, gitdir, ref }); + const oid = _delete + ? '0000000000000000000000000000000000000000' + : await GitRefManager.resolve({ fs, gitdir, ref: fullRef }); + + /** @type typeof import("../managers/GitRemoteHTTP").GitRemoteHTTP */ + const GitRemoteHTTP = GitRemoteManager.getRemoteHelperFor({ url }); + const httpRemote = await GitRemoteHTTP.discover({ + http, + onAuth, + onAuthSuccess, + onAuthFailure, + corsProxy, + service: 'git-receive-pack', + url, + headers, + protocolVersion: 1, + }); + const auth = httpRemote.auth; // hack to get new credentials from CredentialManager API + let fullRemoteRef; + if (!remoteRef) { + fullRemoteRef = fullRef; + } else { + try { + fullRemoteRef = await GitRefManager.expandAgainstMap({ + ref: remoteRef, + map: httpRemote.refs, + }); + } catch (err) { + if (err instanceof NotFoundError) { + // The remote reference doesn't exist yet. + // If it is fully specified, use that value. Otherwise, treat it as a branch. + fullRemoteRef = remoteRef.startsWith('refs/') + ? remoteRef + : `refs/heads/${remoteRef}`; + } else { + throw err + } + } + } + const oldoid = + httpRemote.refs.get(fullRemoteRef) || + '0000000000000000000000000000000000000000'; + + if (onPrePush) { + const hookCancel = await onPrePush({ + remote, + url, + localRef: { ref: _delete ? '(delete)' : fullRef, oid: oid }, + remoteRef: { ref: fullRemoteRef, oid: oldoid }, + }); + if (!hookCancel) throw new UserCanceledError() + } + + // Remotes can always accept thin-packs UNLESS they specify the 'no-thin' capability + const thinPack = !httpRemote.capabilities.has('no-thin'); + + let objects = new Set(); + if (!_delete) { + const finish = [...httpRemote.refs.values()]; + let skipObjects = new Set(); + + // If remote branch is present, look for a common merge base. + if (oldoid !== '0000000000000000000000000000000000000000') { + // trick to speed up common force push scenarios + const mergebase = await _findMergeBase({ + fs, + cache, + gitdir, + oids: [oid, oldoid], + }); + for (const oid of mergebase) finish.push(oid); + if (thinPack) { + skipObjects = await listObjects({ fs, cache, gitdir, oids: mergebase }); + } + } + + // If remote does not have the commit, figure out the objects to send + if (!finish.includes(oid)) { + const commits = await listCommitsAndTags({ + fs, + cache, + gitdir, + start: [oid], + finish, + }); + objects = await listObjects({ fs, cache, gitdir, oids: commits }); + } + + if (thinPack) { + // If there's a default branch for the remote lets skip those objects too. + // Since this is an optional optimization, we just catch and continue if there is + // an error (because we can't find a default branch, or can't find a commit, etc) + try { + // Sadly, the discovery phase with 'forPush' doesn't return symrefs, so we have to + // rely on existing ones. + const ref = await GitRefManager.resolve({ + fs, + gitdir, + ref: `refs/remotes/${remote}/HEAD`, + depth: 2, + }); + const { oid } = await GitRefManager.resolveAgainstMap({ + ref: ref.replace(`refs/remotes/${remote}/`, ''), + fullref: ref, + map: httpRemote.refs, + }); + const oids = [oid]; + for (const oid of await listObjects({ fs, cache, gitdir, oids })) { + skipObjects.add(oid); + } + } catch (e) {} + + // Remove objects that we know the remote already has + for (const oid of skipObjects) { + objects.delete(oid); + } + } + + if (oid === oldoid) force = true; + if (!force) { + // Is it a tag that already exists? + if ( + fullRef.startsWith('refs/tags') && + oldoid !== '0000000000000000000000000000000000000000' + ) { + throw new PushRejectedError('tag-exists') + } + // Is it a non-fast-forward commit? + if ( + oid !== '0000000000000000000000000000000000000000' && + oldoid !== '0000000000000000000000000000000000000000' && + !(await _isDescendent({ + fs, + cache, + gitdir, + oid, + ancestor: oldoid, + depth: -1, + })) + ) { + throw new PushRejectedError('not-fast-forward') + } + } + } + // We can only safely use capabilities that the server also understands. + // For instance, AWS CodeCommit aborts a push if you include the `agent`!!! + const capabilities = filterCapabilities( + [...httpRemote.capabilities], + ['report-status', 'side-band-64k', `agent=${pkg.agent}`] + ); + const packstream1 = await writeReceivePackRequest({ + capabilities, + triplets: [{ oldoid, oid, fullRef: fullRemoteRef }], + }); + const packstream2 = _delete + ? [] + : await _pack({ + fs, + cache, + gitdir, + oids: [...objects], + }); + const res = await GitRemoteHTTP.connect({ + http, + onProgress, + corsProxy, + service: 'git-receive-pack', + url, + auth, + headers, + body: [...packstream1, ...packstream2], + }); + const { packfile, progress } = await GitSideBand.demux(res.body); + if (onMessage) { + const lines = splitLines(progress); + forAwait(lines, async line => { + await onMessage(line); + }); + } + // Parse the response! + const result = await parseReceivePackResponse(packfile); + if (res.headers) { + result.headers = res.headers; + } + + // Update the local copy of the remote ref + if ( + remote && + result.ok && + result.refs[fullRemoteRef].ok && + !fullRef.startsWith('refs/tags') + ) { + // TODO: I think this should actually be using a refspec transform rather than assuming 'refs/remotes/{remote}' + const ref = `refs/remotes/${remote}/${fullRemoteRef.replace( + 'refs/heads', + '' + )}`; + if (_delete) { + await GitRefManager.deleteRef({ fs, gitdir, ref }); + } else { + await GitRefManager.writeRef({ fs, gitdir, ref, value: oid }); + } + } + if (result.ok && Object.values(result.refs).every(result => result.ok)) { + return result + } else { + const prettyDetails = Object.entries(result.refs) + .filter(([k, v]) => !v.ok) + .map(([k, v]) => `\n - ${k}: ${v.error}`) + .join(''); + throw new GitPushError(prettyDetails, result) + } +} + +// @ts-check + +/** + * Push a branch or tag + * + * The push command returns an object that describes the result of the attempted push operation. + * *Notes:* If there were no errors, then there will be no `errors` property. There can be a mix of `ok` messages and `errors` messages. + * + * | param | type [= default] | description | + * | ------ | ---------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | + * | ok | Array\ | The first item is "unpack" if the overall operation was successful. The remaining items are the names of refs that were updated successfully. | + * | errors | Array\ | If the overall operation threw and error, the first item will be "unpack {Overall error message}". The remaining items are individual refs that failed to be updated in the format "{ref name} {error message}". | + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {HttpClient} args.http - an HTTP client + * @param {ProgressCallback} [args.onProgress] - optional progress event callback + * @param {MessageCallback} [args.onMessage] - optional message event callback + * @param {AuthCallback} [args.onAuth] - optional auth fill callback + * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback + * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback + * @param {PrePushCallback} [args.onPrePush] - optional pre-push hook callback + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} [args.ref] - Which branch or tag to push. By default this is the currently checked out branch. + * @param {string} [args.url] - The URL of the remote repository. The default is the value set in the git config for that remote. + * @param {string} [args.remote] - If URL is not specified, determines which remote to use. + * @param {string} [args.remoteRef] - The name of the receiving branch on the remote. By default this is the configured remote tracking branch. + * @param {boolean} [args.force = false] - If true, behaves the same as `git push --force` + * @param {boolean} [args.delete = false] - If true, delete the remote ref + * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Overrides value in repo config. + * @param {Object} [args.headers] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves successfully when push completes with a detailed description of the operation from the server. + * @see PushResult + * @see RefUpdateStatus + * + * @example + * let pushResult = await git.push({ + * fs, + * http, + * dir: '/tutorial', + * remote: 'origin', + * ref: 'main', + * onAuth: () => ({ username: process.env.GITHUB_TOKEN }), + * }) + * console.log(pushResult) + * + */ +async function push({ + fs, + http, + onProgress, + onMessage, + onAuth, + onAuthSuccess, + onAuthFailure, + onPrePush, + dir, + gitdir = join(dir, '.git'), + ref, + remoteRef, + remote = 'origin', + url, + force = false, + delete: _delete = false, + corsProxy, + headers = {}, + cache = {}, +}) { + try { + assertParameter('fs', fs); + assertParameter('http', http); + assertParameter('gitdir', gitdir); + + return await _push({ + fs: new FileSystem(fs), + cache, + http, + onProgress, + onMessage, + onAuth, + onAuthSuccess, + onAuthFailure, + onPrePush, + gitdir, + ref, + remoteRef, + remote, + url, + force, + delete: _delete, + corsProxy, + headers, + }) + } catch (err) { + err.caller = 'git.push'; + throw err + } +} + +async function resolveBlob({ fs, cache, gitdir, oid }) { + const { type, object } = await _readObject({ fs, cache, gitdir, oid }); + // Resolve annotated tag objects to whatever + if (type === 'tag') { + oid = GitAnnotatedTag.from(object).parse().object; + return resolveBlob({ fs, cache, gitdir, oid }) + } + if (type !== 'blob') { + throw new ObjectTypeError(oid, type, 'blob') + } + return { oid, blob: new Uint8Array(object) } +} + +// @ts-check + +/** + * + * @typedef {Object} ReadBlobResult - The object returned has the following schema: + * @property {string} oid + * @property {Uint8Array} blob + * + */ + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {any} args.cache + * @param {string} args.gitdir + * @param {string} args.oid + * @param {string} [args.filepath] + * + * @returns {Promise} Resolves successfully with a blob object description + * @see ReadBlobResult + */ +async function _readBlob({ + fs, + cache, + gitdir, + oid, + filepath = undefined, +}) { + if (filepath !== undefined) { + oid = await resolveFilepath({ fs, cache, gitdir, oid, filepath }); + } + const blob = await resolveBlob({ + fs, + cache, + gitdir, + oid, + }); + return blob +} + +// @ts-check + +/** + * + * @typedef {Object} ReadBlobResult - The object returned has the following schema: + * @property {string} oid + * @property {Uint8Array} blob + * + */ + +/** + * Read a blob object directly + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.oid - The SHA-1 object id to get. Annotated tags, commits, and trees are peeled. + * @param {string} [args.filepath] - Don't return the object with `oid` itself, but resolve `oid` to a tree and then return the blob object at that filepath. + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves successfully with a blob object description + * @see ReadBlobResult + * + * @example + * // Get the contents of 'README.md' in the main branch. + * let commitOid = await git.resolveRef({ fs, dir: '/tutorial', ref: 'main' }) + * console.log(commitOid) + * let { blob } = await git.readBlob({ + * fs, + * dir: '/tutorial', + * oid: commitOid, + * filepath: 'README.md' + * }) + * console.log(Buffer.from(blob).toString('utf8')) + * + */ +async function readBlob({ + fs, + dir, + gitdir = join(dir, '.git'), + oid, + filepath, + cache = {}, +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('oid', oid); + + return await _readBlob({ + fs: new FileSystem(fs), + cache, + gitdir, + oid, + filepath, + }) + } catch (err) { + err.caller = 'git.readBlob'; + throw err + } +} + +// @ts-check + +/** + * Read a commit object directly + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.oid - The SHA-1 object id to get. Annotated tags are peeled. + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves successfully with a git commit object + * @see ReadCommitResult + * @see CommitObject + * + * @example + * // Read a commit object + * let sha = await git.resolveRef({ fs, dir: '/tutorial', ref: 'main' }) + * console.log(sha) + * let commit = await git.readCommit({ fs, dir: '/tutorial', oid: sha }) + * console.log(commit) + * + */ +async function readCommit({ + fs, + dir, + gitdir = join(dir, '.git'), + oid, + cache = {}, +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('oid', oid); + + return await _readCommit({ + fs: new FileSystem(fs), + cache, + gitdir, + oid, + }) + } catch (err) { + err.caller = 'git.readCommit'; + throw err + } +} + +// @ts-check + +/** + * Read the contents of a note + * + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {any} args.cache + * @param {string} args.gitdir + * @param {string} [args.ref] - The notes ref to look under + * @param {string} args.oid + * + * @returns {Promise} Resolves successfully with note contents as a Buffer. + */ + +async function _readNote({ + fs, + cache, + gitdir, + ref = 'refs/notes/commits', + oid, +}) { + const parent = await GitRefManager.resolve({ gitdir, fs, ref }); + const { blob } = await _readBlob({ + fs, + cache, + gitdir, + oid: parent, + filepath: oid, + }); + + return blob +} + +// @ts-check + +/** + * Read the contents of a note + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} [args.ref] - The notes ref to look under + * @param {string} args.oid - The SHA-1 object id of the object to get the note for. + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves successfully with note contents as a Buffer. + */ + +async function readNote({ + fs, + dir, + gitdir = join(dir, '.git'), + ref = 'refs/notes/commits', + oid, + cache = {}, +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('ref', ref); + assertParameter('oid', oid); + + return await _readNote({ + fs: new FileSystem(fs), + cache, + gitdir, + ref, + oid, + }) + } catch (err) { + err.caller = 'git.readNote'; + throw err + } +} + +// @ts-check + +/** + * + * @typedef {Object} DeflatedObject + * @property {string} oid + * @property {'deflated'} type + * @property {'deflated'} format + * @property {Uint8Array} object + * @property {string} [source] + * + */ + +/** + * + * @typedef {Object} WrappedObject + * @property {string} oid + * @property {'wrapped'} type + * @property {'wrapped'} format + * @property {Uint8Array} object + * @property {string} [source] + * + */ + +/** + * + * @typedef {Object} RawObject + * @property {string} oid + * @property {'blob'|'commit'|'tree'|'tag'} type + * @property {'content'} format + * @property {Uint8Array} object + * @property {string} [source] + * + */ + +/** + * + * @typedef {Object} ParsedBlobObject + * @property {string} oid + * @property {'blob'} type + * @property {'parsed'} format + * @property {string} object + * @property {string} [source] + * + */ + +/** + * + * @typedef {Object} ParsedCommitObject + * @property {string} oid + * @property {'commit'} type + * @property {'parsed'} format + * @property {CommitObject} object + * @property {string} [source] + * + */ + +/** + * + * @typedef {Object} ParsedTreeObject + * @property {string} oid + * @property {'tree'} type + * @property {'parsed'} format + * @property {TreeObject} object + * @property {string} [source] + * + */ + +/** + * + * @typedef {Object} ParsedTagObject + * @property {string} oid + * @property {'tag'} type + * @property {'parsed'} format + * @property {TagObject} object + * @property {string} [source] + * + */ + +/** + * + * @typedef {ParsedBlobObject | ParsedCommitObject | ParsedTreeObject | ParsedTagObject} ParsedObject + */ + +/** + * + * @typedef {DeflatedObject | WrappedObject | RawObject | ParsedObject } ReadObjectResult + */ + +/** + * Read a git object directly by its SHA-1 object id + * + * Regarding `ReadObjectResult`: + * + * - `oid` will be the same as the `oid` argument unless the `filepath` argument is provided, in which case it will be the oid of the tree or blob being returned. + * - `type` of deflated objects is `'deflated'`, and `type` of wrapped objects is `'wrapped'` + * - `format` is usually, but not always, the format you requested. Packfiles do not store each object individually compressed so if you end up reading the object from a packfile it will be returned in format 'content' even if you requested 'deflated' or 'wrapped'. + * - `object` will be an actual Object if format is 'parsed' and the object is a commit, tree, or annotated tag. Blobs are still formatted as Buffers unless an encoding is provided in which case they'll be strings. If format is anything other than 'parsed', object will be a Buffer. + * - `source` is the name of the packfile or loose object file where the object was found. + * + * The `format` parameter can have the following values: + * + * | param | description | + * | ---------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | + * | 'deflated' | Return the raw deflate-compressed buffer for an object if possible. Useful for efficiently shuffling around loose objects when you don't care about the contents and can save time by not inflating them. | + * | 'wrapped' | Return the inflated object buffer wrapped in the git object header if possible. This is the raw data used when calculating the SHA-1 object id of a git object. | + * | 'content' | Return the object buffer without the git header. | + * | 'parsed' | Returns a parsed representation of the object. | + * + * The result will be in one of the following schemas: + * + * ## `'deflated'` format + * + * {@link DeflatedObject typedef} + * + * ## `'wrapped'` format + * + * {@link WrappedObject typedef} + * + * ## `'content'` format + * + * {@link RawObject typedef} + * + * ## `'parsed'` format + * + * ### parsed `'blob'` type + * + * {@link ParsedBlobObject typedef} + * + * ### parsed `'commit'` type + * + * {@link ParsedCommitObject typedef} + * {@link CommitObject typedef} + * + * ### parsed `'tree'` type + * + * {@link ParsedTreeObject typedef} + * {@link TreeObject typedef} + * {@link TreeEntry typedef} + * + * ### parsed `'tag'` type + * + * {@link ParsedTagObject typedef} + * {@link TagObject typedef} + * + * @deprecated + * > This command is overly complicated. + * > + * > If you know the type of object you are reading, use [`readBlob`](./readBlob.md), [`readCommit`](./readCommit.md), [`readTag`](./readTag.md), or [`readTree`](./readTree.md). + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.oid - The SHA-1 object id to get + * @param {'deflated' | 'wrapped' | 'content' | 'parsed'} [args.format = 'parsed'] - What format to return the object in. The choices are described in more detail below. + * @param {string} [args.filepath] - Don't return the object with `oid` itself, but resolve `oid` to a tree and then return the object at that filepath. To return the root directory of a tree set filepath to `''` + * @param {string} [args.encoding] - A convenience argument that only affects blobs. Instead of returning `object` as a buffer, it returns a string parsed using the given encoding. + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves successfully with a git object description + * @see ReadObjectResult + * + * @example + * // Given a ransom SHA-1 object id, figure out what it is + * let { type, object } = await git.readObject({ + * fs, + * dir: '/tutorial', + * oid: '0698a781a02264a6f37ba3ff41d78067eaf0f075' + * }) + * switch (type) { + * case 'commit': { + * console.log(object) + * break + * } + * case 'tree': { + * console.log(object) + * break + * } + * case 'blob': { + * console.log(object) + * break + * } + * case 'tag': { + * console.log(object) + * break + * } + * } + * + */ +async function readObject({ + fs: _fs, + dir, + gitdir = join(dir, '.git'), + oid, + format = 'parsed', + filepath = undefined, + encoding = undefined, + cache = {}, +}) { + try { + assertParameter('fs', _fs); + assertParameter('gitdir', gitdir); + assertParameter('oid', oid); + + const fs = new FileSystem(_fs); + if (filepath !== undefined) { + oid = await resolveFilepath({ + fs, + cache, + gitdir, + oid, + filepath, + }); + } + // GitObjectManager does not know how to parse content, so we tweak that parameter before passing it. + const _format = format === 'parsed' ? 'content' : format; + const result = await _readObject({ + fs, + cache, + gitdir, + oid, + format: _format, + }); + result.oid = oid; + if (format === 'parsed') { + result.format = 'parsed'; + switch (result.type) { + case 'commit': + result.object = GitCommit.from(result.object).parse(); + break + case 'tree': + result.object = GitTree.from(result.object).entries(); + break + case 'blob': + // Here we consider returning a raw Buffer as the 'content' format + // and returning a string as the 'parsed' format + if (encoding) { + result.object = result.object.toString(encoding); + } else { + result.object = new Uint8Array(result.object); + result.format = 'content'; + } + break + case 'tag': + result.object = GitAnnotatedTag.from(result.object).parse(); + break + default: + throw new ObjectTypeError( + result.oid, + result.type, + 'blob|commit|tag|tree' + ) + } + } else if (result.format === 'deflated' || result.format === 'wrapped') { + result.type = result.format; + } + return result + } catch (err) { + err.caller = 'git.readObject'; + throw err + } +} + +// @ts-check + +/** + * + * @typedef {Object} ReadTagResult - The object returned has the following schema: + * @property {string} oid - SHA-1 object id of this tag + * @property {TagObject} tag - the parsed tag object + * @property {string} payload - PGP signing payload + */ + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {any} args.cache + * @param {string} args.gitdir + * @param {string} args.oid + * + * @returns {Promise} + */ +async function _readTag({ fs, cache, gitdir, oid }) { + const { type, object } = await _readObject({ + fs, + cache, + gitdir, + oid, + format: 'content', + }); + if (type !== 'tag') { + throw new ObjectTypeError(oid, type, 'tag') + } + const tag = GitAnnotatedTag.from(object); + const result = { + oid, + tag: tag.parse(), + payload: tag.payload(), + }; + // @ts-ignore + return result +} + +/** + * + * @typedef {Object} ReadTagResult - The object returned has the following schema: + * @property {string} oid - SHA-1 object id of this tag + * @property {TagObject} tag - the parsed tag object + * @property {string} payload - PGP signing payload + */ + +/** + * Read an annotated tag object directly + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.oid - The SHA-1 object id to get + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves successfully with a git object description + * @see ReadTagResult + * @see TagObject + * + */ +async function readTag({ + fs, + dir, + gitdir = join(dir, '.git'), + oid, + cache = {}, +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('oid', oid); + + return await _readTag({ + fs: new FileSystem(fs), + cache, + gitdir, + oid, + }) + } catch (err) { + err.caller = 'git.readTag'; + throw err + } +} + +// @ts-check + +/** + * + * @typedef {Object} ReadTreeResult - The object returned has the following schema: + * @property {string} oid - SHA-1 object id of this tree + * @property {TreeObject} tree - the parsed tree object + */ + +/** + * Read a tree object directly + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.oid - The SHA-1 object id to get. Annotated tags and commits are peeled. + * @param {string} [args.filepath] - Don't return the object with `oid` itself, but resolve `oid` to a tree and then return the tree object at that filepath. + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves successfully with a git tree object + * @see ReadTreeResult + * @see TreeObject + * @see TreeEntry + * + */ +async function readTree({ + fs, + dir, + gitdir = join(dir, '.git'), + oid, + filepath = undefined, + cache = {}, +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('oid', oid); + + return await _readTree({ + fs: new FileSystem(fs), + cache, + gitdir, + oid, + filepath, + }) + } catch (err) { + err.caller = 'git.readTree'; + throw err + } +} + +// @ts-check + +/** + * Remove a file from the git index (aka staging area) + * + * Note that this does NOT delete the file in the working directory. + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.filepath - The path to the file to remove from the index + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves successfully once the git index has been updated + * + * @example + * await git.remove({ fs, dir: '/tutorial', filepath: 'README.md' }) + * console.log('done') + * + */ +async function remove({ + fs: _fs, + dir, + gitdir = join(dir, '.git'), + filepath, + cache = {}, +}) { + try { + assertParameter('fs', _fs); + assertParameter('gitdir', gitdir); + assertParameter('filepath', filepath); + + await GitIndexManager.acquire( + { fs: new FileSystem(_fs), gitdir, cache }, + async function(index) { + index.delete({ filepath }); + } + ); + } catch (err) { + err.caller = 'git.remove'; + throw err + } +} + +// @ts-check + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {object} args.cache + * @param {SignCallback} [args.onSign] + * @param {string} [args.dir] + * @param {string} [args.gitdir=join(dir,'.git')] + * @param {string} [args.ref] + * @param {string} args.oid + * @param {Object} args.author + * @param {string} args.author.name + * @param {string} args.author.email + * @param {number} args.author.timestamp + * @param {number} args.author.timezoneOffset + * @param {Object} args.committer + * @param {string} args.committer.name + * @param {string} args.committer.email + * @param {number} args.committer.timestamp + * @param {number} args.committer.timezoneOffset + * @param {string} [args.signingKey] + * + * @returns {Promise} + */ + +async function _removeNote({ + fs, + cache, + onSign, + gitdir, + ref = 'refs/notes/commits', + oid, + author, + committer, + signingKey, +}) { + // Get the current note commit + let parent; + try { + parent = await GitRefManager.resolve({ gitdir, fs, ref }); + } catch (err) { + if (!(err instanceof NotFoundError)) { + throw err + } + } + + // I'm using the "empty tree" magic number here for brevity + const result = await _readTree({ + fs, + gitdir, + oid: parent || '4b825dc642cb6eb9a060e54bf8d69288fbee4904', + }); + let tree = result.tree; + + // Remove the note blob entry from the tree + tree = tree.filter(entry => entry.path !== oid); + + // Create the new note tree + const treeOid = await _writeTree({ + fs, + gitdir, + tree, + }); + + // Create the new note commit + const commitOid = await _commit({ + fs, + cache, + onSign, + gitdir, + ref, + tree: treeOid, + parent: parent && [parent], + message: `Note removed by 'isomorphic-git removeNote'\n`, + author, + committer, + signingKey, + }); + + return commitOid +} + +// @ts-check + +/** + * Remove an object note + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {SignCallback} [args.onSign] - a PGP signing implementation + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} [args.ref] - The notes ref to look under + * @param {string} args.oid - The SHA-1 object id of the object to remove the note from. + * @param {Object} [args.author] - The details about the author. + * @param {string} [args.author.name] - Default is `user.name` config. + * @param {string} [args.author.email] - Default is `user.email` config. + * @param {number} [args.author.timestamp=Math.floor(Date.now()/1000)] - Set the author timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00). + * @param {number} [args.author.timezoneOffset] - Set the author timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`. + * @param {Object} [args.committer = author] - The details about the note committer, in the same format as the author parameter. If not specified, the author details are used. + * @param {string} [args.committer.name] - Default is `user.name` config. + * @param {string} [args.committer.email] - Default is `user.email` config. + * @param {number} [args.committer.timestamp=Math.floor(Date.now()/1000)] - Set the committer timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00). + * @param {number} [args.committer.timezoneOffset] - Set the committer timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`. + * @param {string} [args.signingKey] - Sign the tag object using this private PGP key. + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves successfully with the SHA-1 object id of the commit object for the note removal. + */ + +async function removeNote({ + fs: _fs, + onSign, + dir, + gitdir = join(dir, '.git'), + ref = 'refs/notes/commits', + oid, + author: _author, + committer: _committer, + signingKey, + cache = {}, +}) { + try { + assertParameter('fs', _fs); + assertParameter('gitdir', gitdir); + assertParameter('oid', oid); + + const fs = new FileSystem(_fs); + + const author = await normalizeAuthorObject({ fs, gitdir, author: _author }); + if (!author) throw new MissingNameError('author') + + const committer = await normalizeCommitterObject({ + fs, + gitdir, + author, + committer: _committer, + }); + if (!committer) throw new MissingNameError('committer') + + return await _removeNote({ + fs, + cache, + onSign, + gitdir, + ref, + oid, + author, + committer, + signingKey, + }) + } catch (err) { + err.caller = 'git.removeNote'; + throw err + } +} + +// @ts-check + +/** + * Rename a branch + * + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {string} args.gitdir + * @param {string} args.ref - The name of the new branch + * @param {string} args.oldref - The name of the old branch + * @param {boolean} [args.checkout = false] + * + * @returns {Promise} Resolves successfully when filesystem operations are complete + */ +async function _renameBranch({ + fs, + gitdir, + oldref, + ref, + checkout = false, +}) { + if (ref !== cleanGitRef.clean(ref)) { + throw new InvalidRefNameError(ref, cleanGitRef.clean(ref)) + } + + if (oldref !== cleanGitRef.clean(oldref)) { + throw new InvalidRefNameError(oldref, cleanGitRef.clean(oldref)) + } + + const fulloldref = `refs/heads/${oldref}`; + const fullnewref = `refs/heads/${ref}`; + + const newexist = await GitRefManager.exists({ fs, gitdir, ref: fullnewref }); + + if (newexist) { + throw new AlreadyExistsError('branch', ref, false) + } + + const value = await GitRefManager.resolve({ + fs, + gitdir, + ref: fulloldref, + depth: 1, + }); + + await GitRefManager.writeRef({ fs, gitdir, ref: fullnewref, value }); + await GitRefManager.deleteRef({ fs, gitdir, ref: fulloldref }); + + const fullCurrentBranchRef = await _currentBranch({ + fs, + gitdir, + fullname: true, + }); + const isCurrentBranch = fullCurrentBranchRef === fulloldref; + + if (checkout || isCurrentBranch) { + // Update HEAD + await GitRefManager.writeSymbolicRef({ + fs, + gitdir, + ref: 'HEAD', + value: fullnewref, + }); + } +} + +// @ts-check + +/** + * Rename a branch + * + * @param {object} args + * @param {FsClient} args.fs - a file system implementation + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.ref - What to name the branch + * @param {string} args.oldref - What the name of the branch was + * @param {boolean} [args.checkout = false] - Update `HEAD` to point at the newly created branch + * + * @returns {Promise} Resolves successfully when filesystem operations are complete + * + * @example + * await git.renameBranch({ fs, dir: '/tutorial', ref: 'main', oldref: 'master' }) + * console.log('done') + * + */ +async function renameBranch({ + fs, + dir, + gitdir = join(dir, '.git'), + ref, + oldref, + checkout = false, +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('ref', ref); + assertParameter('oldref', oldref); + return await _renameBranch({ + fs: new FileSystem(fs), + gitdir, + ref, + oldref, + checkout, + }) + } catch (err) { + err.caller = 'git.renameBranch'; + throw err + } +} + +async function hashObject$1({ gitdir, type, object }) { + return shasum(GitObject.wrap({ type, object })) +} + +// @ts-check + +/** + * Reset a file in the git index (aka staging area) + * + * Note that this does NOT modify the file in the working directory. + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.filepath - The path to the file to reset in the index + * @param {string} [args.ref = 'HEAD'] - A ref to the commit to use + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves successfully once the git index has been updated + * + * @example + * await git.resetIndex({ fs, dir: '/tutorial', filepath: 'README.md' }) + * console.log('done') + * + */ +async function resetIndex({ + fs: _fs, + dir, + gitdir = join(dir, '.git'), + filepath, + ref, + cache = {}, +}) { + try { + assertParameter('fs', _fs); + assertParameter('gitdir', gitdir); + assertParameter('filepath', filepath); + + const fs = new FileSystem(_fs); + + let oid; + let workdirOid; + + try { + // Resolve commit + oid = await GitRefManager.resolve({ fs, gitdir, ref: ref || 'HEAD' }); + } catch (e) { + if (ref) { + // Only throw the error if a ref is explicitly provided + throw e + } + } + + // Not having an oid at this point means `resetIndex()` was called without explicit `ref` on a new git + // repository. If that happens, we can skip resolving the file path. + if (oid) { + try { + // Resolve blob + oid = await resolveFilepath({ + fs, + cache, + gitdir, + oid, + filepath, + }); + } catch (e) { + // This means we're resetting the file to a "deleted" state + oid = null; + } + } + + // For files that aren't in the workdir use zeros + let stats = { + ctime: new Date(0), + mtime: new Date(0), + dev: 0, + ino: 0, + mode: 0, + uid: 0, + gid: 0, + size: 0, + }; + // If the file exists in the workdir... + const object = dir && (await fs.read(join(dir, filepath))); + if (object) { + // ... and has the same hash as the desired state... + workdirOid = await hashObject$1({ + gitdir, + type: 'blob', + object, + }); + if (oid === workdirOid) { + // ... use the workdir Stats object + stats = await fs.lstat(join(dir, filepath)); + } + } + await GitIndexManager.acquire({ fs, gitdir, cache }, async function(index) { + index.delete({ filepath }); + if (oid) { + index.insert({ filepath, stats, oid }); + } + }); + } catch (err) { + err.caller = 'git.reset'; + throw err + } +} + +// @ts-check + +/** + * Get the value of a symbolic ref or resolve a ref to its SHA-1 object id + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.ref - The ref to resolve + * @param {number} [args.depth = undefined] - How many symbolic references to follow before returning + * + * @returns {Promise} Resolves successfully with a SHA-1 object id or the value of a symbolic ref + * + * @example + * let currentCommit = await git.resolveRef({ fs, dir: '/tutorial', ref: 'HEAD' }) + * console.log(currentCommit) + * let currentBranch = await git.resolveRef({ fs, dir: '/tutorial', ref: 'HEAD', depth: 2 }) + * console.log(currentBranch) + * + */ +async function resolveRef({ + fs, + dir, + gitdir = join(dir, '.git'), + ref, + depth, +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('ref', ref); + + const oid = await GitRefManager.resolve({ + fs: new FileSystem(fs), + gitdir, + ref, + depth, + }); + return oid + } catch (err) { + err.caller = 'git.resolveRef'; + throw err + } +} + +// @ts-check + +/** + * Write an entry to the git config files. + * + * *Caveats:* + * - Currently only the local `$GIT_DIR/config` file can be read or written. However support for the global `~/.gitconfig` and system `$(prefix)/etc/gitconfig` will be added in the future. + * - The current parser does not support the more exotic features of the git-config file format such as `[include]` and `[includeIf]`. + * + * @param {Object} args + * @param {FsClient} args.fs - a file system implementation + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.path - The key of the git config entry + * @param {string | boolean | number | void} args.value - A value to store at that path. (Use `undefined` as the value to delete a config entry.) + * @param {boolean} [args.append = false] - If true, will append rather than replace when setting (use with multi-valued config options). + * + * @returns {Promise} Resolves successfully when operation completed + * + * @example + * // Write config value + * await git.setConfig({ + * fs, + * dir: '/tutorial', + * path: 'user.name', + * value: 'Mr. Test' + * }) + * + * // Print out config file + * let file = await fs.promises.readFile('/tutorial/.git/config', 'utf8') + * console.log(file) + * + * // Delete a config entry + * await git.setConfig({ + * fs, + * dir: '/tutorial', + * path: 'user.name', + * value: undefined + * }) + * + * // Print out config file + * file = await fs.promises.readFile('/tutorial/.git/config', 'utf8') + * console.log(file) + */ +async function setConfig({ + fs: _fs, + dir, + gitdir = join(dir, '.git'), + path, + value, + append = false, +}) { + try { + assertParameter('fs', _fs); + assertParameter('gitdir', gitdir); + assertParameter('path', path); + // assertParameter('value', value) // We actually allow 'undefined' as a value to unset/delete + + const fs = new FileSystem(_fs); + const config = await GitConfigManager.get({ fs, gitdir }); + if (append) { + await config.append(path, value); + } else { + await config.set(path, value); + } + await GitConfigManager.save({ fs, gitdir, config }); + } catch (err) { + err.caller = 'git.setConfig'; + throw err + } +} + +// @ts-check + +/** + * Tell whether a file has been changed + * + * The possible resolve values are: + * + * | status | description | + * | --------------------- | ------------------------------------------------------------------------------------- | + * | `"ignored"` | file ignored by a .gitignore rule | + * | `"unmodified"` | file unchanged from HEAD commit | + * | `"*modified"` | file has modifications, not yet staged | + * | `"*deleted"` | file has been removed, but the removal is not yet staged | + * | `"*added"` | file is untracked, not yet staged | + * | `"absent"` | file not present in HEAD commit, staging area, or working dir | + * | `"modified"` | file has modifications, staged | + * | `"deleted"` | file has been removed, staged | + * | `"added"` | previously untracked file, staged | + * | `"*unmodified"` | working dir and HEAD commit match, but index differs | + * | `"*absent"` | file not present in working dir or HEAD commit, but present in the index | + * | `"*undeleted"` | file was deleted from the index, but is still in the working dir | + * | `"*undeletemodified"` | file was deleted from the index, but is present with modifications in the working dir | + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.filepath - The path to the file to query + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise<'ignored'|'unmodified'|'*modified'|'*deleted'|'*added'|'absent'|'modified'|'deleted'|'added'|'*unmodified'|'*absent'|'*undeleted'|'*undeletemodified'>} Resolves successfully with the file's git status + * + * @example + * let status = await git.status({ fs, dir: '/tutorial', filepath: 'README.md' }) + * console.log(status) + * + */ +async function status({ + fs: _fs, + dir, + gitdir = join(dir, '.git'), + filepath, + cache = {}, +}) { + try { + assertParameter('fs', _fs); + assertParameter('gitdir', gitdir); + assertParameter('filepath', filepath); + + const fs = new FileSystem(_fs); + const ignored = await GitIgnoreManager.isIgnored({ + fs, + gitdir, + dir, + filepath, + }); + if (ignored) { + return 'ignored' + } + const headTree = await getHeadTree({ fs, cache, gitdir }); + const treeOid = await getOidAtPath({ + fs, + cache, + gitdir, + tree: headTree, + path: filepath, + }); + const indexEntry = await GitIndexManager.acquire( + { fs, gitdir, cache }, + async function(index) { + for (const entry of index) { + if (entry.path === filepath) return entry + } + return null + } + ); + const stats = await fs.lstat(join(dir, filepath)); + + const H = treeOid !== null; // head + const I = indexEntry !== null; // index + const W = stats !== null; // working dir + + const getWorkdirOid = async () => { + if (I && !compareStats(indexEntry, stats)) { + return indexEntry.oid + } else { + const object = await fs.read(join(dir, filepath)); + const workdirOid = await hashObject$1({ + gitdir, + type: 'blob', + object, + }); + // If the oid in the index === working dir oid but stats differed update cache + if (I && indexEntry.oid === workdirOid) { + // and as long as our fs.stats aren't bad. + // size of -1 happens over a BrowserFS HTTP Backend that doesn't serve Content-Length headers + // (like the Karma webserver) because BrowserFS HTTP Backend uses HTTP HEAD requests to do fs.stat + if (stats.size !== -1) { + // We don't await this so we can return faster for one-off cases. + GitIndexManager.acquire({ fs, gitdir, cache }, async function( + index + ) { + index.insert({ filepath, stats, oid: workdirOid }); + }); + } + } + return workdirOid + } + }; + + if (!H && !W && !I) return 'absent' // --- + if (!H && !W && I) return '*absent' // -A- + if (!H && W && !I) return '*added' // --A + if (!H && W && I) { + const workdirOid = await getWorkdirOid(); + // @ts-ignore + return workdirOid === indexEntry.oid ? 'added' : '*added' // -AA : -AB + } + if (H && !W && !I) return 'deleted' // A-- + if (H && !W && I) { + // @ts-ignore + return treeOid === indexEntry.oid ? '*deleted' : '*deleted' // AA- : AB- + } + if (H && W && !I) { + const workdirOid = await getWorkdirOid(); + return workdirOid === treeOid ? '*undeleted' : '*undeletemodified' // A-A : A-B + } + if (H && W && I) { + const workdirOid = await getWorkdirOid(); + if (workdirOid === treeOid) { + // @ts-ignore + return workdirOid === indexEntry.oid ? 'unmodified' : '*unmodified' // AAA : ABA + } else { + // @ts-ignore + return workdirOid === indexEntry.oid ? 'modified' : '*modified' // ABB : AAB + } + } + /* + --- + -A- + --A + -AA + -AB + A-- + AA- + AB- + A-A + A-B + AAA + ABA + ABB + AAB + */ + } catch (err) { + err.caller = 'git.status'; + throw err + } +} + +async function getOidAtPath({ fs, cache, gitdir, tree, path }) { + if (typeof path === 'string') path = path.split('/'); + const dirname = path.shift(); + for (const entry of tree) { + if (entry.path === dirname) { + if (path.length === 0) { + return entry.oid + } + const { type, object } = await _readObject({ + fs, + cache, + gitdir, + oid: entry.oid, + }); + if (type === 'tree') { + const tree = GitTree.from(object); + return getOidAtPath({ fs, cache, gitdir, tree, path }) + } + if (type === 'blob') { + throw new ObjectTypeError(entry.oid, type, 'blob', path.join('/')) + } + } + } + return null +} + +async function getHeadTree({ fs, cache, gitdir }) { + // Get the tree from the HEAD commit. + let oid; + try { + oid = await GitRefManager.resolve({ fs, gitdir, ref: 'HEAD' }); + } catch (e) { + // Handle fresh branches with no commits + if (e instanceof NotFoundError) { + return [] + } + } + const { tree } = await _readTree({ fs, cache, gitdir, oid }); + return tree +} + +// @ts-check + +/** + * Efficiently get the status of multiple files at once. + * + * The returned `StatusMatrix` is admittedly not the easiest format to read. + * However it conveys a large amount of information in dense format that should make it easy to create reports about the current state of the repository; + * without having to do multiple, time-consuming isomorphic-git calls. + * My hope is that the speed and flexibility of the function will make up for the learning curve of interpreting the return value. + * + * ```js live + * // get the status of all the files in 'src' + * let status = await git.statusMatrix({ + * fs, + * dir: '/tutorial', + * filter: f => f.startsWith('src/') + * }) + * console.log(status) + * ``` + * + * ```js live + * // get the status of all the JSON and Markdown files + * let status = await git.statusMatrix({ + * fs, + * dir: '/tutorial', + * filter: f => f.endsWith('.json') || f.endsWith('.md') + * }) + * console.log(status) + * ``` + * + * The result is returned as a 2D array. + * The outer array represents the files and/or blobs in the repo, in alphabetical order. + * The inner arrays describe the status of the file: + * the first value is the filepath, and the next three are integers + * representing the HEAD status, WORKDIR status, and STAGE status of the entry. + * + * ```js + * // example StatusMatrix + * [ + * ["a.txt", 0, 2, 0], // new, untracked + * ["b.txt", 0, 2, 2], // added, staged + * ["c.txt", 0, 2, 3], // added, staged, with unstaged changes + * ["d.txt", 1, 1, 1], // unmodified + * ["e.txt", 1, 2, 1], // modified, unstaged + * ["f.txt", 1, 2, 2], // modified, staged + * ["g.txt", 1, 2, 3], // modified, staged, with unstaged changes + * ["h.txt", 1, 0, 1], // deleted, unstaged + * ["i.txt", 1, 0, 0], // deleted, staged + * ["j.txt", 1, 2, 0], // deleted, staged, with unstaged-modified changes (new file of the same name) + * ["k.txt", 1, 1, 0], // deleted, staged, with unstaged changes (new file of the same name) + * ] + * ``` + * + * - The HEAD status is either absent (0) or present (1). + * - The WORKDIR status is either absent (0), identical to HEAD (1), or different from HEAD (2). + * - The STAGE status is either absent (0), identical to HEAD (1), identical to WORKDIR (2), or different from WORKDIR (3). + * + * ```ts + * type Filename = string + * type HeadStatus = 0 | 1 + * type WorkdirStatus = 0 | 1 | 2 + * type StageStatus = 0 | 1 | 2 | 3 + * + * type StatusRow = [Filename, HeadStatus, WorkdirStatus, StageStatus] + * + * type StatusMatrix = StatusRow[] + * ``` + * + * > Think of the natural progression of file modifications as being from HEAD (previous) -> WORKDIR (current) -> STAGE (next). + * > Then HEAD is "version 1", WORKDIR is "version 2", and STAGE is "version 3". + * > Then, imagine a "version 0" which is before the file was created. + * > Then the status value in each column corresponds to the oldest version of the file it is identical to. + * > (For a file to be identical to "version 0" means the file is deleted.) + * + * Here are some examples of queries you can answer using the result: + * + * #### Q: What files have been deleted? + * ```js + * const FILE = 0, WORKDIR = 2 + * + * const filenames = (await statusMatrix({ dir })) + * .filter(row => row[WORKDIR] === 0) + * .map(row => row[FILE]) + * ``` + * + * #### Q: What files have unstaged changes? + * ```js + * const FILE = 0, WORKDIR = 2, STAGE = 3 + * + * const filenames = (await statusMatrix({ dir })) + * .filter(row => row[WORKDIR] !== row[STAGE]) + * .map(row => row[FILE]) + * ``` + * + * #### Q: What files have been modified since the last commit? + * ```js + * const FILE = 0, HEAD = 1, WORKDIR = 2 + * + * const filenames = (await statusMatrix({ dir })) + * .filter(row => row[HEAD] !== row[WORKDIR]) + * .map(row => row[FILE]) + * ``` + * + * #### Q: What files will NOT be changed if I commit right now? + * ```js + * const FILE = 0, HEAD = 1, STAGE = 3 + * + * const filenames = (await statusMatrix({ dir })) + * .filter(row => row[HEAD] === row[STAGE]) + * .map(row => row[FILE]) + * ``` + * + * For reference, here are all possible combinations: + * + * | HEAD | WORKDIR | STAGE | `git status --short` equivalent | + * | ---- | ------- | ----- | ------------------------------- | + * | 0 | 0 | 0 | `` | + * | 0 | 0 | 3 | `AD` | + * | 0 | 2 | 0 | `??` | + * | 0 | 2 | 2 | `A ` | + * | 0 | 2 | 3 | `AM` | + * | 1 | 0 | 0 | `D ` | + * | 1 | 0 | 1 | ` D` | + * | 1 | 0 | 3 | `MD` | + * | 1 | 1 | 0 | `D ` + `??` | + * | 1 | 1 | 1 | `` | + * | 1 | 1 | 3 | `MM` | + * | 1 | 2 | 0 | `D ` + `??` | + * | 1 | 2 | 1 | ` M` | + * | 1 | 2 | 2 | `M ` | + * | 1 | 2 | 3 | `MM` | + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} [args.ref = 'HEAD'] - Optionally specify a different commit to compare against the workdir and stage instead of the HEAD + * @param {string[]} [args.filepaths = ['.']] - Limit the query to the given files and directories + * @param {function(string): boolean} [args.filter] - Filter the results to only those whose filepath matches a function. + * @param {object} [args.cache] - a [cache](cache.md) object + * @param {boolean} [args.ignored = false] - include ignored files in the result + * + * @returns {Promise>} Resolves with a status matrix, described below. + * @see StatusRow + */ +async function statusMatrix({ + fs: _fs, + dir, + gitdir = join(dir, '.git'), + ref = 'HEAD', + filepaths = ['.'], + filter, + cache = {}, + ignored: shouldIgnore = false, +}) { + try { + assertParameter('fs', _fs); + assertParameter('gitdir', gitdir); + assertParameter('ref', ref); + + const fs = new FileSystem(_fs); + return await _walk({ + fs, + cache, + dir, + gitdir, + trees: [TREE({ ref }), WORKDIR(), STAGE()], + map: async function(filepath, [head, workdir, stage]) { + // Ignore ignored files, but only if they are not already tracked. + if (!head && !stage && workdir) { + if (!shouldIgnore) { + const isIgnored = await GitIgnoreManager.isIgnored({ + fs, + dir, + filepath, + }); + if (isIgnored) { + return null + } + } + } + // match against base paths + if (!filepaths.some(base => worthWalking(filepath, base))) { + return null + } + // Late filter against file names + if (filter) { + if (!filter(filepath)) return + } + + const [headType, workdirType, stageType] = await Promise.all([ + head && head.type(), + workdir && workdir.type(), + stage && stage.type(), + ]); + + const isBlob = [headType, workdirType, stageType].includes('blob'); + + // For now, bail on directories unless the file is also a blob in another tree + if ((headType === 'tree' || headType === 'special') && !isBlob) return + if (headType === 'commit') return null + + if ((workdirType === 'tree' || workdirType === 'special') && !isBlob) + return + + if (stageType === 'commit') return null + if ((stageType === 'tree' || stageType === 'special') && !isBlob) return + + // Figure out the oids for files, using the staged oid for the working dir oid if the stats match. + const headOid = headType === 'blob' ? await head.oid() : undefined; + const stageOid = stageType === 'blob' ? await stage.oid() : undefined; + let workdirOid; + if ( + headType !== 'blob' && + workdirType === 'blob' && + stageType !== 'blob' + ) { + // We don't actually NEED the sha. Any sha will do + // TODO: update this logic to handle N trees instead of just 3. + workdirOid = '42'; + } else if (workdirType === 'blob') { + workdirOid = await workdir.oid(); + } + const entry = [undefined, headOid, workdirOid, stageOid]; + const result = entry.map(value => entry.indexOf(value)); + result.shift(); // remove leading undefined entry + return [filepath, ...result] + }, + }) + } catch (err) { + err.caller = 'git.statusMatrix'; + throw err + } +} + +// @ts-check + +/** + * Create a lightweight tag + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.ref - What to name the tag + * @param {string} [args.object = 'HEAD'] - What oid the tag refers to. (Will resolve to oid if value is a ref.) By default, the commit object which is referred by the current `HEAD` is used. + * @param {boolean} [args.force = false] - Instead of throwing an error if a tag named `ref` already exists, overwrite the existing tag. + * + * @returns {Promise} Resolves successfully when filesystem operations are complete + * + * @example + * await git.tag({ fs, dir: '/tutorial', ref: 'test-tag' }) + * console.log('done') + * + */ +async function tag({ + fs: _fs, + dir, + gitdir = join(dir, '.git'), + ref, + object, + force = false, +}) { + try { + assertParameter('fs', _fs); + assertParameter('gitdir', gitdir); + assertParameter('ref', ref); + + const fs = new FileSystem(_fs); + + if (ref === undefined) { + throw new MissingParameterError('ref') + } + + ref = ref.startsWith('refs/tags/') ? ref : `refs/tags/${ref}`; + + // Resolve passed object + const value = await GitRefManager.resolve({ + fs, + gitdir, + ref: object || 'HEAD', + }); + + if (!force && (await GitRefManager.exists({ fs, gitdir, ref }))) { + throw new AlreadyExistsError('tag', ref) + } + + await GitRefManager.writeRef({ fs, gitdir, ref, value }); + } catch (err) { + err.caller = 'git.tag'; + throw err + } +} + +// @ts-check + +/** + * Register file contents in the working tree or object database to the git index (aka staging area). + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.filepath - File to act upon. + * @param {string} [args.oid] - OID of the object in the object database to add to the index with the specified filepath. + * @param {number} [args.mode = 100644] - The file mode to add the file to the index. + * @param {boolean} [args.add] - Adds the specified file to the index if it does not yet exist in the index. + * @param {boolean} [args.remove] - Remove the specified file from the index if it does not exist in the workspace anymore. + * @param {boolean} [args.force] - Remove the specified file from the index, even if it still exists in the workspace. + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} Resolves successfully with the SHA-1 object id of the object written or updated in the index, or nothing if the file was removed. + * + * @example + * await git.updateIndex({ + * fs, + * dir: '/tutorial', + * filepath: 'readme.md' + * }) + * + * @example + * // Manually create a blob in the object database. + * let oid = await git.writeBlob({ + * fs, + * dir: '/tutorial', + * blob: new Uint8Array([]) + * }) + * + * // Write the object in the object database to the index. + * await git.updateIndex({ + * fs, + * dir: '/tutorial', + * add: true, + * filepath: 'readme.md', + * oid + * }) + */ +async function updateIndex({ + fs: _fs, + dir, + gitdir = join(dir, '.git'), + cache = {}, + filepath, + oid, + mode, + add, + remove, + force, +}) { + try { + assertParameter('fs', _fs); + assertParameter('gitdir', gitdir); + assertParameter('filepath', filepath); + + const fs = new FileSystem(_fs); + + if (remove) { + return await GitIndexManager.acquire( + { fs, gitdir, cache }, + async function(index) { + let fileStats; + + if (!force) { + // Check if the file is still present in the working directory + fileStats = await fs.lstat(join(dir, filepath)); + + if (fileStats) { + if (fileStats.isDirectory()) { + // Removing directories should not work + throw new InvalidFilepathError('directory') + } + + // Do nothing if we don't force and the file still exists in the workdir + return + } + } + + // Directories are not allowed, so we make sure the provided filepath exists in the index + if (index.has({ filepath })) { + index.delete({ + filepath, + }); + } + } + ) + } + + // Test if it is a file and exists on disk if `remove` is not provided, only of no oid is provided + let fileStats; + + if (!oid) { + fileStats = await fs.lstat(join(dir, filepath)); + + if (!fileStats) { + throw new NotFoundError( + `file at "${filepath}" on disk and "remove" not set` + ) + } + + if (fileStats.isDirectory()) { + throw new InvalidFilepathError('directory') + } + } + + return await GitIndexManager.acquire({ fs, gitdir, cache }, async function( + index + ) { + if (!add && !index.has({ filepath })) { + // If the index does not contain the filepath yet and `add` is not set, we should throw + throw new NotFoundError( + `file at "${filepath}" in index and "add" not set` + ) + } + + // By default we use 0 for the stats of the index file + let stats = { + ctime: new Date(0), + mtime: new Date(0), + dev: 0, + ino: 0, + mode, + uid: 0, + gid: 0, + size: 0, + }; + + if (!oid) { + stats = fileStats; + + // Write the file to the object database + const object = stats.isSymbolicLink() + ? await fs.readlink(join(dir, filepath)) + : await fs.read(join(dir, filepath)); + + oid = await _writeObject({ + fs, + gitdir, + type: 'blob', + format: 'content', + object, + }); + } + + index.insert({ + filepath, + oid: oid, + stats, + }); + + return oid + }) + } catch (err) { + err.caller = 'git.updateIndex'; + throw err + } +} + +// @ts-check + +/** + * Return the version number of isomorphic-git + * + * I don't know why you might need this. I added it just so I could check that I was getting + * the correct version of the library and not a cached version. + * + * @returns {string} the version string taken from package.json at publication time + * + * @example + * console.log(git.version()) + * + */ +function version() { + try { + return pkg.version + } catch (err) { + err.caller = 'git.version'; + throw err + } +} + +// @ts-check + +/** + * @callback WalkerMap + * @param {string} filename + * @param {Array} entries + * @returns {Promise} + */ + +/** + * @callback WalkerReduce + * @param {any} parent + * @param {any[]} children + * @returns {Promise} + */ + +/** + * @callback WalkerIterateCallback + * @param {WalkerEntry[]} entries + * @returns {Promise} + */ + +/** + * @callback WalkerIterate + * @param {WalkerIterateCallback} walk + * @param {IterableIterator} children + * @returns {Promise} + */ + +/** + * A powerful recursive tree-walking utility. + * + * The `walk` API simplifies gathering detailed information about a tree or comparing all the filepaths in two or more trees. + * Trees can be git commits, the working directory, or the or git index (staging area). + * As long as a file or directory is present in at least one of the trees, it will be traversed. + * Entries are traversed in alphabetical order. + * + * The arguments to `walk` are the `trees` you want to traverse, and 3 optional transform functions: + * `map`, `reduce`, and `iterate`. + * + * ## `TREE`, `WORKDIR`, and `STAGE` + * + * Tree walkers are represented by three separate functions that can be imported: + * + * ```js + * import { TREE, WORKDIR, STAGE } from 'isomorphic-git' + * ``` + * + * These functions return opaque handles called `Walker`s. + * The only thing that `Walker` objects are good for is passing into `walk`. + * Here are the three `Walker`s passed into `walk` by the `statusMatrix` command for example: + * + * ```js + * let ref = 'HEAD' + * + * let trees = [TREE({ ref }), WORKDIR(), STAGE()] + * ``` + * + * For the arguments, see the doc pages for [TREE](./TREE.md), [WORKDIR](./WORKDIR.md), and [STAGE](./STAGE.md). + * + * `map`, `reduce`, and `iterate` allow you control the recursive walk by pruning and transforming `WalkerEntry`s into the desired result. + * + * ## WalkerEntry + * + * {@link WalkerEntry typedef} + * + * `map` receives an array of `WalkerEntry[]` as its main argument, one `WalkerEntry` for each `Walker` in the `trees` argument. + * The methods are memoized per `WalkerEntry` so calling them multiple times in a `map` function does not adversely impact performance. + * By only computing these values if needed, you build can build lean, mean, efficient walking machines. + * + * ### WalkerEntry#type() + * + * Returns the kind as a string. This is normally either `tree` or `blob`. + * + * `TREE`, `STAGE`, and `WORKDIR` walkers all return a string. + * + * Possible values: + * + * - `'tree'` directory + * - `'blob'` file + * - `'special'` used by `WORKDIR` to represent irregular files like sockets and FIFOs + * - `'commit'` used by `TREE` to represent submodules + * + * ```js + * await entry.type() + * ``` + * + * ### WalkerEntry#mode() + * + * Returns the file mode as a number. Use this to distinguish between regular files, symlinks, and executable files. + * + * `TREE`, `STAGE`, and `WORKDIR` walkers all return a number for all `type`s of entries. + * + * It has been normalized to one of the 4 values that are allowed in git commits: + * + * - `0o40000` directory + * - `0o100644` file + * - `0o100755` file (executable) + * - `0o120000` symlink + * + * Tip: to make modes more readable, you can print them to octal using `.toString(8)`. + * + * ```js + * await entry.mode() + * ``` + * + * ### WalkerEntry#oid() + * + * Returns the SHA-1 object id for blobs and trees. + * + * `TREE` walkers return a string for `blob` and `tree` entries. + * + * `STAGE` and `WORKDIR` walkers return a string for `blob` entries and `undefined` for `tree` entries. + * + * ```js + * await entry.oid() + * ``` + * + * ### WalkerEntry#content() + * + * Returns the file contents as a Buffer. + * + * `TREE` and `WORKDIR` walkers return a Buffer for `blob` entries and `undefined` for `tree` entries. + * + * `STAGE` walkers always return `undefined` since the file contents are never stored in the stage. + * + * ```js + * await entry.content() + * ``` + * + * ### WalkerEntry#stat() + * + * Returns a normalized subset of filesystem Stat data. + * + * `WORKDIR` walkers return a `Stat` for `blob` and `tree` entries. + * + * `STAGE` walkers return a `Stat` for `blob` entries and `undefined` for `tree` entries. + * + * `TREE` walkers return `undefined` for all entry types. + * + * ```js + * await entry.stat() + * ``` + * + * {@link Stat typedef} + * + * ## map(string, Array) => Promise + * + * {@link WalkerMap typedef} + * + * This is the function that is called once per entry BEFORE visiting the children of that node. + * + * If you return `null` for a `tree` entry, then none of the children of that `tree` entry will be walked. + * + * This is a good place for query logic, such as examining the contents of a file. + * Ultimately, compare all the entries and return any values you are interested in. + * If you do not return a value (or return undefined) that entry will be filtered from the results. + * + * Example 1: Find all the files containing the word 'foo'. + * ```js + * async function map(filepath, [head, workdir]) { + * let content = (await workdir.content()).toString('utf8') + * if (content.contains('foo')) { + * return { + * filepath, + * content + * } + * } + * } + * ``` + * + * Example 2: Return the difference between the working directory and the HEAD commit + * ```js + * const map = async (filepath, [head, workdir]) => { + * return { + * filepath, + * oid: await head?.oid(), + * diff: diff( + * (await head?.content())?.toString('utf8') || '', + * (await workdir?.content())?.toString('utf8') || '' + * ) + * } + * } + * ``` + * + * Example 3: + * ```js + * let path = require('path') + * // Only examine files in the directory `cwd` + * let cwd = 'src/app' + * async function map (filepath, [head, workdir, stage]) { + * if ( + * // don't skip the root directory + * head.fullpath !== '.' && + * // return true for 'src' and 'src/app' + * !cwd.startsWith(filepath) && + * // return true for 'src/app/*' + * path.dirname(filepath) !== cwd + * ) { + * return null + * } else { + * return filepath + * } + * } + * ``` + * + * ## reduce(parent, children) + * + * {@link WalkerReduce typedef} + * + * This is the function that is called once per entry AFTER visiting the children of that node. + * + * Default: `async (parent, children) => parent === undefined ? children.flat() : [parent, children].flat()` + * + * The default implementation of this function returns all directories and children in a giant flat array. + * You can define a different accumulation method though. + * + * Example: Return a hierarchical structure + * ```js + * async function reduce (parent, children) { + * return Object.assign(parent, { children }) + * } + * ``` + * + * ## iterate(walk, children) + * + * {@link WalkerIterate typedef} + * + * {@link WalkerIterateCallback typedef} + * + * Default: `(walk, children) => Promise.all([...children].map(walk))` + * + * The default implementation recurses all children concurrently using Promise.all. + * However you could use a custom function to traverse children serially or use a global queue to throttle recursion. + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {Walker[]} args.trees - The trees you want to traverse + * @param {WalkerMap} [args.map] - Transform `WalkerEntry`s into a result form + * @param {WalkerReduce} [args.reduce] - Control how mapped entries are combined with their parent result + * @param {WalkerIterate} [args.iterate] - Fine-tune how entries within a tree are iterated over + * @param {object} [args.cache] - a [cache](cache.md) object + * + * @returns {Promise} The finished tree-walking result + */ +async function walk({ + fs, + dir, + gitdir = join(dir, '.git'), + trees, + map, + reduce, + iterate, + cache = {}, +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('trees', trees); + + return await _walk({ + fs: new FileSystem(fs), + cache, + dir, + gitdir, + trees, + map, + reduce, + iterate, + }) + } catch (err) { + err.caller = 'git.walk'; + throw err + } +} + +// @ts-check + +/** + * Write a blob object directly + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {Uint8Array} args.blob - The blob object to write + * + * @returns {Promise} Resolves successfully with the SHA-1 object id of the newly written object + * + * @example + * // Manually create a blob. + * let oid = await git.writeBlob({ + * fs, + * dir: '/tutorial', + * blob: new Uint8Array([]) + * }) + * + * console.log('oid', oid) // should be 'e69de29bb2d1d6434b8b29ae775ad8c2e48c5391' + * + */ +async function writeBlob({ fs, dir, gitdir = join(dir, '.git'), blob }) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('blob', blob); + + return await _writeObject({ + fs: new FileSystem(fs), + gitdir, + type: 'blob', + object: blob, + format: 'content', + }) + } catch (err) { + err.caller = 'git.writeBlob'; + throw err + } +} + +// @ts-check + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {string} args.gitdir + * @param {CommitObject} args.commit + * + * @returns {Promise} + * @see CommitObject + * + */ +async function _writeCommit({ fs, gitdir, commit }) { + // Convert object to buffer + const object = GitCommit.from(commit).toObject(); + const oid = await _writeObject({ + fs, + gitdir, + type: 'commit', + object, + format: 'content', + }); + return oid +} + +// @ts-check + +/** + * Write a commit object directly + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {CommitObject} args.commit - The object to write + * + * @returns {Promise} Resolves successfully with the SHA-1 object id of the newly written object + * @see CommitObject + * + */ +async function writeCommit({ + fs, + dir, + gitdir = join(dir, '.git'), + commit, +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('commit', commit); + + return await _writeCommit({ + fs: new FileSystem(fs), + gitdir, + commit, + }) + } catch (err) { + err.caller = 'git.writeCommit'; + throw err + } +} + +// @ts-check + +/** + * Write a git object directly + * + * `format` can have the following values: + * + * | param | description | + * | ---------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | + * | 'deflated' | Treat `object` as the raw deflate-compressed buffer for an object, meaning can be written to `.git/objects/**` as-is. | + * | 'wrapped' | Treat `object` as the inflated object buffer wrapped in the git object header. This is the raw buffer used when calculating the SHA-1 object id of a git object. | + * | 'content' | Treat `object` as the object buffer without the git header. | + * | 'parsed' | Treat `object` as a parsed representation of the object. | + * + * If `format` is `'parsed'`, then `object` must match one of the schemas for `CommitObject`, `TreeObject`, `TagObject`, or a `string` (for blobs). + * + * {@link CommitObject typedef} + * + * {@link TreeObject typedef} + * + * {@link TagObject typedef} + * + * If `format` is `'content'`, `'wrapped'`, or `'deflated'`, `object` should be a `Uint8Array`. + * + * @deprecated + * > This command is overly complicated. + * > + * > If you know the type of object you are writing, use [`writeBlob`](./writeBlob.md), [`writeCommit`](./writeCommit.md), [`writeTag`](./writeTag.md), or [`writeTree`](./writeTree.md). + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string | Uint8Array | CommitObject | TreeObject | TagObject} args.object - The object to write. + * @param {'blob'|'tree'|'commit'|'tag'} [args.type] - The kind of object to write. + * @param {'deflated' | 'wrapped' | 'content' | 'parsed'} [args.format = 'parsed'] - What format the object is in. The possible choices are listed below. + * @param {string} [args.oid] - If `format` is `'deflated'` then this param is required. Otherwise it is calculated. + * @param {string} [args.encoding] - If `type` is `'blob'` then `object` will be converted to a Uint8Array using `encoding`. + * + * @returns {Promise} Resolves successfully with the SHA-1 object id of the newly written object. + * + * @example + * // Manually create an annotated tag. + * let sha = await git.resolveRef({ fs, dir: '/tutorial', ref: 'HEAD' }) + * console.log('commit', sha) + * + * let oid = await git.writeObject({ + * fs, + * dir: '/tutorial', + * type: 'tag', + * object: { + * object: sha, + * type: 'commit', + * tag: 'my-tag', + * tagger: { + * name: 'your name', + * email: 'email@example.com', + * timestamp: Math.floor(Date.now()/1000), + * timezoneOffset: new Date().getTimezoneOffset() + * }, + * message: 'Optional message' + * } + * }) + * + * console.log('tag', oid) + * + */ +async function writeObject({ + fs: _fs, + dir, + gitdir = join(dir, '.git'), + type, + object, + format = 'parsed', + oid, + encoding = undefined, +}) { + try { + const fs = new FileSystem(_fs); + // Convert object to buffer + if (format === 'parsed') { + switch (type) { + case 'commit': + object = GitCommit.from(object).toObject(); + break + case 'tree': + object = GitTree.from(object).toObject(); + break + case 'blob': + object = Buffer.from(object, encoding); + break + case 'tag': + object = GitAnnotatedTag.from(object).toObject(); + break + default: + throw new ObjectTypeError(oid || '', type, 'blob|commit|tag|tree') + } + // GitObjectManager does not know how to serialize content, so we tweak that parameter before passing it. + format = 'content'; + } + oid = await _writeObject({ + fs, + gitdir, + type, + object, + oid, + format, + }); + return oid + } catch (err) { + err.caller = 'git.writeObject'; + throw err + } +} + +// @ts-check + +/** + * Write a ref which refers to the specified SHA-1 object id, or a symbolic ref which refers to the specified ref. + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.ref - The name of the ref to write + * @param {string} args.value - When `symbolic` is false, a ref or an SHA-1 object id. When true, a ref starting with `refs/`. + * @param {boolean} [args.force = false] - Instead of throwing an error if a ref named `ref` already exists, overwrite the existing ref. + * @param {boolean} [args.symbolic = false] - Whether the ref is symbolic or not. + * + * @returns {Promise} Resolves successfully when filesystem operations are complete + * + * @example + * await git.writeRef({ + * fs, + * dir: '/tutorial', + * ref: 'refs/heads/another-branch', + * value: 'HEAD' + * }) + * await git.writeRef({ + * fs, + * dir: '/tutorial', + * ref: 'HEAD', + * value: 'refs/heads/another-branch', + * force: true, + * symbolic: true + * }) + * console.log('done') + * + */ +async function writeRef({ + fs: _fs, + dir, + gitdir = join(dir, '.git'), + ref, + value, + force = false, + symbolic = false, +}) { + try { + assertParameter('fs', _fs); + assertParameter('gitdir', gitdir); + assertParameter('ref', ref); + assertParameter('value', value); + + const fs = new FileSystem(_fs); + + if (ref !== cleanGitRef.clean(ref)) { + throw new InvalidRefNameError(ref, cleanGitRef.clean(ref)) + } + + if (!force && (await GitRefManager.exists({ fs, gitdir, ref }))) { + throw new AlreadyExistsError('ref', ref) + } + + if (symbolic) { + await GitRefManager.writeSymbolicRef({ + fs, + gitdir, + ref, + value, + }); + } else { + value = await GitRefManager.resolve({ + fs, + gitdir, + ref: value, + }); + await GitRefManager.writeRef({ + fs, + gitdir, + ref, + value, + }); + } + } catch (err) { + err.caller = 'git.writeRef'; + throw err + } +} + +// @ts-check + +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {string} args.gitdir + * @param {TagObject} args.tag + * + * @returns {Promise} + */ +async function _writeTag({ fs, gitdir, tag }) { + // Convert object to buffer + const object = GitAnnotatedTag.from(tag).toObject(); + const oid = await _writeObject({ + fs, + gitdir, + type: 'tag', + object, + format: 'content', + }); + return oid +} + +// @ts-check + +/** + * Write an annotated tag object directly + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {TagObject} args.tag - The object to write + * + * @returns {Promise} Resolves successfully with the SHA-1 object id of the newly written object + * @see TagObject + * + * @example + * // Manually create an annotated tag. + * let sha = await git.resolveRef({ fs, dir: '/tutorial', ref: 'HEAD' }) + * console.log('commit', sha) + * + * let oid = await git.writeTag({ + * fs, + * dir: '/tutorial', + * tag: { + * object: sha, + * type: 'commit', + * tag: 'my-tag', + * tagger: { + * name: 'your name', + * email: 'email@example.com', + * timestamp: Math.floor(Date.now()/1000), + * timezoneOffset: new Date().getTimezoneOffset() + * }, + * message: 'Optional message' + * } + * }) + * + * console.log('tag', oid) + * + */ +async function writeTag({ fs, dir, gitdir = join(dir, '.git'), tag }) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('tag', tag); + + return await _writeTag({ + fs: new FileSystem(fs), + gitdir, + tag, + }) + } catch (err) { + err.caller = 'git.writeTag'; + throw err + } +} + +// @ts-check + +/** + * Write a tree object directly + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {TreeObject} args.tree - The object to write + * + * @returns {Promise} Resolves successfully with the SHA-1 object id of the newly written object. + * @see TreeObject + * @see TreeEntry + * + */ +async function writeTree({ fs, dir, gitdir = join(dir, '.git'), tree }) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + assertParameter('tree', tree); + + return await _writeTree({ + fs: new FileSystem(fs), + gitdir, + tree, + }) + } catch (err) { + err.caller = 'git.writeTree'; + throw err + } +} + +// default export +var index = { + Errors, + STAGE, + TREE, + WORKDIR, + add, + abortMerge, + addNote, + addRemote, + annotatedTag, + branch, + checkout, + clone, + commit, + getConfig, + getConfigAll, + setConfig, + currentBranch, + deleteBranch, + deleteRef, + deleteRemote, + deleteTag, + expandOid, + expandRef, + fastForward, + fetch, + findMergeBase, + findRoot, + getRemoteInfo, + getRemoteInfo2, + hashBlob, + indexPack, + init, + isDescendent, + isIgnored, + listBranches, + listFiles, + listNotes, + listRemotes, + listServerRefs, + listTags, + log, + merge, + packObjects, + pull, + push, + readBlob, + readCommit, + readNote, + readObject, + readTag, + readTree, + remove, + removeNote, + renameBranch, + resetIndex, + updateIndex, + resolveRef, + status, + statusMatrix, + tag, + version, + walk, + writeBlob, + writeCommit, + writeObject, + writeRef, + writeTag, + writeTree, +}; + +exports.Errors = Errors; +exports.STAGE = STAGE; +exports.TREE = TREE; +exports.WORKDIR = WORKDIR; +exports.abortMerge = abortMerge; +exports.add = add; +exports.addNote = addNote; +exports.addRemote = addRemote; +exports.annotatedTag = annotatedTag; +exports.branch = branch; +exports.checkout = checkout; +exports.clone = clone; +exports.commit = commit; +exports.currentBranch = currentBranch; +exports["default"] = index; +exports.deleteBranch = deleteBranch; +exports.deleteRef = deleteRef; +exports.deleteRemote = deleteRemote; +exports.deleteTag = deleteTag; +exports.expandOid = expandOid; +exports.expandRef = expandRef; +exports.fastForward = fastForward; +exports.fetch = fetch; +exports.findMergeBase = findMergeBase; +exports.findRoot = findRoot; +exports.getConfig = getConfig; +exports.getConfigAll = getConfigAll; +exports.getRemoteInfo = getRemoteInfo; +exports.getRemoteInfo2 = getRemoteInfo2; +exports.hashBlob = hashBlob; +exports.indexPack = indexPack; +exports.init = init; +exports.isDescendent = isDescendent; +exports.isIgnored = isIgnored; +exports.listBranches = listBranches; +exports.listFiles = listFiles; +exports.listNotes = listNotes; +exports.listRemotes = listRemotes; +exports.listServerRefs = listServerRefs; +exports.listTags = listTags; +exports.log = log; +exports.merge = merge; +exports.packObjects = packObjects; +exports.pull = pull; +exports.push = push; +exports.readBlob = readBlob; +exports.readCommit = readCommit; +exports.readNote = readNote; +exports.readObject = readObject; +exports.readTag = readTag; +exports.readTree = readTree; +exports.remove = remove; +exports.removeNote = removeNote; +exports.renameBranch = renameBranch; +exports.resetIndex = resetIndex; +exports.resolveRef = resolveRef; +exports.setConfig = setConfig; +exports.status = status; +exports.statusMatrix = statusMatrix; +exports.tag = tag; +exports.updateIndex = updateIndex; +exports.version = version; +exports.walk = walk; +exports.writeBlob = writeBlob; +exports.writeCommit = writeCommit; +exports.writeObject = writeObject; +exports.writeRef = writeRef; +exports.writeTag = writeTag; +exports.writeTree = writeTree; + + +/***/ }), + +/***/ 67034: +/***/ ((module) => { + +"use strict"; +module.exports = {"i8":"16.15.0"}; + +/***/ }), + +/***/ 1383: +/***/ ((module) => { + +"use strict"; +module.exports = JSON.parse('{"$schema":"http://json-schema.org/draft-07/schema#","title":"release-please manifest config schema","description":"Schema for defining manifest config file","type":"object","additionalProperties":false,"definitions":{"ReleaserConfigOptions":{"type":"object","properties":{"release-type":{"description":"The strategy to use for this component.","type":"string"},"bump-minor-pre-major":{"description":"Breaking changes only bump semver minor if version < 1.0.0","type":"boolean"},"bump-patch-for-minor-pre-major":{"description":"Feature changes only bump semver patch if version < 1.0.0","type":"boolean"},"prerelease-type":{"description":"Configuration option for the prerelease versioning strategy. If prerelease strategy used and type set, will set the prerelease part of the version to the provided value in case prerelease part is not present.","type":"string"},"versioning":{"description":"Versioning strategy. Defaults to `default`","type":"string"},"changelog-sections":{"description":"Override the Changelog configuration sections","type":"array","items":{"type":"object","properties":{"type":{"description":"Semantic commit type (e.g. `feat`, `chore`)","type":"string"},"section":{"description":"Changelog section title","type":"string"},"hidden":{"description":"Skip displaying this type of commit. Defaults to `false`.","type":"boolean"}},"required":["type","section"]}},"release-as":{"description":"[DEPRECATED] Override the next version of this package. Consider using a `Release-As` commit instead.","type":"string"},"skip-github-release":{"description":"Skip tagging GitHub releases for this package. Release-Please still requires releases to be tagged, so this option should only be used if you have existing infrastructure to tag these releases.Defaults to `false`.","type":"boolean"},"draft":{"description":"Create the GitHub release in draft mode. Defaults to `false`.","type":"boolean"},"prerelease":{"description":"Create the GitHub release as prerelease. Defaults to `false`.","type":"boolean"},"draft-pull-request":{"description":"Open the release pull request in draft mode. Defaults to `false`.","type":"boolean"},"extra-label":{"description":"Comma-separated list of labels to add to a newly opened pull request","type":"string"},"include-component-in-tag":{"description":"When tagging a release, include the component name as part of the tag. Defaults to `true`.","type":"boolean"},"include-v-in-tag":{"description":"When tagging a release, include `v` in the tag. Defaults to `false`.","type":"boolean"},"changelog-type":{"description":"The type of changelog to use. Defaults to `default`.","type":"string","enum":["default","github"]},"changelog-host":{"description":"Generate changelog links to this GitHub host. Useful for running against GitHub Enterprise.","type":"string"},"changelog-path":{"description":"Path to the file that tracks release note changes. Defaults to `CHANGELOG.md`.","type":"string"},"pull-request-title-pattern":{"description":"Customize the release pull request title.","type":"string"},"pull-request-header":{"description":"Customize the release pull request header.","type":"string"},"pull-request-footer":{"description":"Customize the release pull request footer.","type":"string"},"separate-pull-requests":{"description":"Open a separate release pull request for each component. Defaults to `false`.","type":"boolean"},"always-update":{"description":"Always update the pull request with the latest changes. Defaults to `false`.","type":"boolean"},"tag-separator":{"description":"Customize the separator between the component and version in the GitHub tag.","type":"string"},"extra-files":{"description":"Specify extra generic files to replace versions.","type":"array","items":{"anyOf":[{"description":"The path to the file. The `Generic` updater uses annotations to replace versions.","type":"string"},{"description":"An extra JSON, YAML, or TOML file with a targeted update via jsonpath.","type":"object","properties":{"type":{"description":"The file format type.","enum":["json","toml","yaml"]},"path":{"description":"The path to the file.","type":"string"},"glob":{"description":"Whether to treat the path as a glob. Defaults to `false`.","type":"boolean"},"jsonpath":{"description":"The jsonpath to the version entry in the file.","type":"string"}},"required":["type","path","jsonpath"]},{"description":"An extra XML file with a targeted update via xpath.","type":"object","properties":{"type":{"description":"The file format type.","enum":["xml"]},"path":{"description":"The path to the file.","type":"string"},"glob":{"description":"Whether to treat the path as a glob. Defaults to `false`.","type":"boolean"},"xpath":{"description":"The xpath to the version entry in the file.","type":"string"}},"required":["type","path","xpath"]},{"description":"An extra pom.xml file.","type":"object","properties":{"type":{"description":"The file format type.","enum":["pom"]},"path":{"description":"The path to the file.","type":"string"},"glob":{"description":"Whether to treat the path as a glob. Defaults to `false`.","type":"boolean"}},"required":["type","path"]},{"description":"An extra arbitrary file that includes release-please generic updater\'s annotation.","type":"object","properties":{"type":{"description":"The file format type.","enum":["generic"]},"path":{"description":"The path to the file.","type":"string"},"glob":{"description":"Whether to treat the path as a glob. Defaults to `false`.","type":"boolean"}},"required":["type","path"]}]}},"exclude-paths":{"description":"Path of commits to be excluded from parsing. If all files from commit belong to one of the paths it will be skipped","type":"array","items":{"type":"string"}},"version-file":{"description":"Path to the specialize version file. Used by `ruby` and `simple` strategies.","type":"string"},"snapshot-label":{"description":"Label to add to snapshot pull request. Used by `java` strategies.","type":"string"},"skip-snapshot":{"description":"If set, do not propose snapshot pull requests. Used by `java` strategies.","type":"boolean"},"initial-version":{"description":"Releases the initial library with a specified version","type":"string"},"component-no-space":{"description":"release-please automatically adds ` ` (space) in front of parsed ${component}. This option indicates whether that behaviour should be disabled. Defaults to `false`","type":"boolean"}}}},"allOf":[{"$ref":"#/definitions/ReleaserConfigOptions"},{"properties":{"$schema":{"description":"Path to the release-please manifest config schema","type":"string","format":"uri-reference"},"packages":{"description":"Per-path component configuration.","type":"object","additionalProperties":{"$ref":"#/definitions/ReleaserConfigOptions"}},"bootstrap-sha":{"description":"For the initial release of a library, only consider as far back as this commit SHA. This is an uncommon use case and should generally be avoided.","type":"string"},"last-release-sha":{"description":"For any release, only consider as far back as this commit SHA. This is an uncommon use case and should generally be avoided.","type":"string"},"always-link-local":{"description":"When using the `node-workspace` plugin, force all local dependencies to be linked.","type":"boolean"},"plugins":{"description":"Plugins to apply to pull requests. Plugins can be added to perform extra release processing that cannot be achieved by an individual release strategy.","type":"array","items":{"anyOf":[{"description":"The plugin name for plugins that do not require other options.","type":"string"},{"description":"Configuration for the `linked-versions` plugin.","type":"object","properties":{"type":{"description":"The name of the plugin.","type":"string","enum":["linked-versions"]},"groupName":{"description":"The name of the group of components.","type":"string"},"components":{"description":"List of component names that are part of this group.","type":"array","items":{"type":"string"}},"merge":{"description":"Whether to merge in-scope pull requests into a combined release pull request. Defaults to `true`.","type":"boolean"},"specialWords":{"description":"Words that sentence casing logic will not be applied to","type":"array","items":{"type":"string"}}},"required":["type","groupName","components"]},{"description":"Configuration for various `workspace` plugins.","type":"object","properties":{"type":{"description":"The name of the plugin.","type":"string","enum":["cargo-workspace","maven-workspace"]},"updateAllPackages":{"description":"Whether to force updating all packages regardless of the dependency tree. Defaults to `false`.","type":"boolean"},"merge":{"description":"Whether to merge in-scope pull requests into a combined release pull request. Defaults to `true`.","type":"boolean"},"considerAllArtifacts":{"description":"Whether to analyze all packages in the workspace for cross-component version bumping. This currently only works for the maven-workspace plugin. Defaults to `true`.","type":"boolean"}}},{"description":"Configuration for various `workspace` plugins.","type":"object","properties":{"type":{"description":"The name of the plugin.","type":"string","enum":["node-workspace"]},"updateAllPackages":{"description":"Whether to force updating all packages regardless of the dependency tree. Defaults to `false`.","type":"boolean"},"merge":{"description":"Whether to merge in-scope pull requests into a combined release pull request. Defaults to `true`.","type":"boolean"},"considerAllArtifacts":{"description":"Whether to analyze all packages in the workspace for cross-component version bumping. This currently only works for the maven-workspace plugin. Defaults to `true`.","type":"boolean"},"updatePeerDependencies":{"description":"Also bump peer dependency versions if they are modified. Defaults to `false`.","type":"boolean"}}},{"description":"Configuration for various `group-priority` plugin","type":"object","properties":{"type":{"description":"The name of the plugin.","type":"string","enum":["group-priority"]},"groups":{"description":"Group names ordered with highest priority first.","type":"array","items":{"type":"string"}}}},{"description":"Other plugins","type":"object","properties":{"type":{"description":"The name of the plugin.","type":"string"}}}]}},"signoff":{"description":"Text to be used as Signed-off-by in the commit.","type":"string"},"group-pull-request-title-pattern":{"description":"When grouping multiple release pull requests use this pattern for the title.","type":"string"},"release-search-depth":{"description":"When considering previously releases, only look this deep.","type":"number"},"commit-search-depth":{"description":"When considering commit history, only look this many commits deep.","type":"number"},"sequential-calls":{"description":"Whether to open pull requests/releases sequentially rather than concurrently. If you have many components, you may want to set this to avoid secondary rate limits.","type":"boolean"},"label":{"description":"Comma-separated list of labels to add to newly opened pull request. These are used to identify release pull requests.","type":"string"},"release-label":{"description":"Comma-separated list of labels to add to a pull request that has been released/tagged","type":"string"},"component-no-space":{"description":"release-please automatically adds ` ` (space) in front of parsed ${component}. This option indicates whether that behaviour should be disabled. Defaults to `false`","type":"boolean"}},"required":["packages"]}],"properties":{"$schema":true,"packages":true,"bootstrap-sha":true,"last-release-sha":true,"always-link-local":true,"plugins":true,"signoff":true,"group-pull-request-title-pattern":true,"release-search-depth":true,"commit-search-depth":true,"sequential-calls":true,"release-type":true,"bump-minor-pre-major":true,"bump-patch-for-minor-pre-major":true,"versioning":true,"changelog-sections":true,"release-as":true,"skip-github-release":true,"draft":true,"prerelease":true,"draft-pull-request":true,"label":true,"release-label":true,"extra-label":true,"include-component-in-tag":true,"include-v-in-tag":true,"changelog-type":true,"changelog-host":true,"changelog-path":true,"pull-request-title-pattern":true,"pull-request-header":true,"pull-request-footer":true,"separate-pull-requests":true,"always-update":true,"tag-separator":true,"extra-files":true,"version-file":true,"snapshot-label":true,"initial-version":true,"exclude-paths":true,"component-no-space":false}}'); + +/***/ }), + +/***/ 94592: +/***/ ((module) => { + +"use strict"; +module.exports = JSON.parse('{"$schema":"http://json-schema.org/draft-07/schema#","title":"release-please manifest config versions","description":"Schema for defining manifest versions file","type":"object","additionalProperties":{"type":"string"}}'); + /***/ }), /***/ 53768: @@ -110512,30 +136573,6 @@ module.exports = JSON.parse('[["0","\\u0000",128],["a1","。",62],["8140","  /***/ }), -/***/ 15833: -/***/ ((module) => { - -"use strict"; -module.exports = {"i8":"16.14.1"}; - -/***/ }), - -/***/ 38623: -/***/ ((module) => { - -"use strict"; -module.exports = JSON.parse('{"$schema":"http://json-schema.org/draft-07/schema#","title":"release-please manifest config schema","description":"Schema for defining manifest config file","type":"object","additionalProperties":false,"definitions":{"ReleaserConfigOptions":{"type":"object","properties":{"release-type":{"description":"The strategy to use for this component.","type":"string"},"bump-minor-pre-major":{"description":"Breaking changes only bump semver minor if version < 1.0.0","type":"boolean"},"bump-patch-for-minor-pre-major":{"description":"Feature changes only bump semver patch if version < 1.0.0","type":"boolean"},"prerelease-type":{"description":"Configuration option for the prerelease versioning strategy. If prerelease strategy used and type set, will set the prerelease part of the version to the provided value in case prerelease part is not present.","type":"string"},"versioning":{"description":"Versioning strategy. Defaults to `default`","type":"string"},"changelog-sections":{"description":"Override the Changelog configuration sections","type":"array","items":{"type":"object","properties":{"type":{"description":"Semantic commit type (e.g. `feat`, `chore`)","type":"string"},"section":{"description":"Changelog section title","type":"string"},"hidden":{"description":"Skip displaying this type of commit. Defaults to `false`.","type":"boolean"}},"required":["type","section"]}},"release-as":{"description":"[DEPRECATED] Override the next version of this package. Consider using a `Release-As` commit instead.","type":"string"},"skip-github-release":{"description":"Skip tagging GitHub releases for this package. Release-Please still requires releases to be tagged, so this option should only be used if you have existing infrastructure to tag these releases.Defaults to `false`.","type":"boolean"},"draft":{"description":"Create the GitHub release in draft mode. Defaults to `false`.","type":"boolean"},"prerelease":{"description":"Create the GitHub release as prerelease. Defaults to `false`.","type":"boolean"},"draft-pull-request":{"description":"Open the release pull request in draft mode. Defaults to `false`.","type":"boolean"},"extra-label":{"description":"Comma-separated list of labels to add to a newly opened pull request","type":"string"},"include-component-in-tag":{"description":"When tagging a release, include the component name as part of the tag. Defaults to `true`.","type":"boolean"},"include-v-in-tag":{"description":"When tagging a release, include `v` in the tag. Defaults to `false`.","type":"boolean"},"changelog-type":{"description":"The type of changelog to use. Defaults to `default`.","type":"string","enum":["default","github"]},"changelog-host":{"description":"Generate changelog links to this GitHub host. Useful for running against GitHub Enterprise.","type":"string"},"changelog-path":{"description":"Path to the file that tracks release note changes. Defaults to `CHANGELOG.md`.","type":"string"},"pull-request-title-pattern":{"description":"Customize the release pull request title.","type":"string"},"pull-request-header":{"description":"Customize the release pull request header.","type":"string"},"pull-request-footer":{"description":"Customize the release pull request footer.","type":"string"},"separate-pull-requests":{"description":"Open a separate release pull request for each component. Defaults to `false`.","type":"boolean"},"tag-separator":{"description":"Customize the separator between the component and version in the GitHub tag.","type":"string"},"extra-files":{"description":"Specify extra generic files to replace versions.","type":"array","items":{"anyOf":[{"description":"The path to the file. The `Generic` updater uses annotations to replace versions.","type":"string"},{"description":"An extra JSON, YAML, or TOML file with a targeted update via jsonpath.","type":"object","properties":{"type":{"description":"The file format type.","enum":["json","toml","yaml"]},"path":{"description":"The path to the file.","type":"string"},"glob":{"description":"Whether to treat the path as a glob. Defaults to `false`.","type":"boolean"},"jsonpath":{"description":"The jsonpath to the version entry in the file.","type":"string"}},"required":["type","path","jsonpath"]},{"description":"An extra XML file with a targeted update via xpath.","type":"object","properties":{"type":{"description":"The file format type.","enum":["xml"]},"path":{"description":"The path to the file.","type":"string"},"glob":{"description":"Whether to treat the path as a glob. Defaults to `false`.","type":"boolean"},"xpath":{"description":"The xpath to the version entry in the file.","type":"string"}},"required":["type","path","xpath"]},{"description":"An extra pom.xml file.","type":"object","properties":{"type":{"description":"The file format type.","enum":["pom"]},"path":{"description":"The path to the file.","type":"string"},"glob":{"description":"Whether to treat the path as a glob. Defaults to `false`.","type":"boolean"}},"required":["type","path"]},{"description":"An extra arbitrary file that includes release-please generic updater\'s annotation.","type":"object","properties":{"type":{"description":"The file format type.","enum":["generic"]},"path":{"description":"The path to the file.","type":"string"},"glob":{"description":"Whether to treat the path as a glob. Defaults to `false`.","type":"boolean"}},"required":["type","path"]}]}},"exclude-paths":{"description":"Path of commits to be excluded from parsing. If all files from commit belong to one of the paths it will be skipped","type":"array","items":{"type":"string"}},"version-file":{"description":"Path to the specialize version file. Used by `ruby` and `simple` strategies.","type":"string"},"snapshot-label":{"description":"Label to add to snapshot pull request. Used by `java` strategies.","type":"string"},"skip-snapshot":{"description":"If set, do not propose snapshot pull requests. Used by `java` strategies.","type":"boolean"},"initial-version":{"description":"Releases the initial library with a specified version","type":"string"},"component-no-space":{"description":"release-please automatically adds ` ` (space) in front of parsed ${component}. This option indicates whether that behaviour should be disabled. Defaults to `false`","type":"boolean"}}}},"allOf":[{"$ref":"#/definitions/ReleaserConfigOptions"},{"properties":{"$schema":{"description":"Path to the release-please manifest config schema","type":"string","format":"uri-reference"},"packages":{"description":"Per-path component configuration.","type":"object","additionalProperties":{"$ref":"#/definitions/ReleaserConfigOptions"}},"bootstrap-sha":{"description":"For the initial release of a library, only consider as far back as this commit SHA. This is an uncommon use case and should generally be avoided.","type":"string"},"last-release-sha":{"description":"For any release, only consider as far back as this commit SHA. This is an uncommon use case and should generally be avoided.","type":"string"},"always-link-local":{"description":"When using the `node-workspace` plugin, force all local dependencies to be linked.","type":"boolean"},"plugins":{"description":"Plugins to apply to pull requests. Plugins can be added to perform extra release processing that cannot be achieved by an individual release strategy.","type":"array","items":{"anyOf":[{"description":"The plugin name for plugins that do not require other options.","type":"string"},{"description":"Configuration for the `linked-versions` plugin.","type":"object","properties":{"type":{"description":"The name of the plugin.","type":"string","enum":["linked-versions"]},"groupName":{"description":"The name of the group of components.","type":"string"},"components":{"description":"List of component names that are part of this group.","type":"array","items":{"type":"string"}},"merge":{"description":"Whether to merge in-scope pull requests into a combined release pull request. Defaults to `true`.","type":"boolean"},"specialWords":{"description":"Words that sentence casing logic will not be applied to","type":"array","items":{"type":"string"}}},"required":["type","groupName","components"]},{"description":"Configuration for various `workspace` plugins.","type":"object","properties":{"type":{"description":"The name of the plugin.","type":"string","enum":["cargo-workspace","maven-workspace"]},"updateAllPackages":{"description":"Whether to force updating all packages regardless of the dependency tree. Defaults to `false`.","type":"boolean"},"merge":{"description":"Whether to merge in-scope pull requests into a combined release pull request. Defaults to `true`.","type":"boolean"},"considerAllArtifacts":{"description":"Whether to analyze all packages in the workspace for cross-component version bumping. This currently only works for the maven-workspace plugin. Defaults to `true`.","type":"boolean"}}},{"description":"Configuration for various `workspace` plugins.","type":"object","properties":{"type":{"description":"The name of the plugin.","type":"string","enum":["node-workspace"]},"updateAllPackages":{"description":"Whether to force updating all packages regardless of the dependency tree. Defaults to `false`.","type":"boolean"},"merge":{"description":"Whether to merge in-scope pull requests into a combined release pull request. Defaults to `true`.","type":"boolean"},"considerAllArtifacts":{"description":"Whether to analyze all packages in the workspace for cross-component version bumping. This currently only works for the maven-workspace plugin. Defaults to `true`.","type":"boolean"},"updatePeerDependencies":{"description":"Also bump peer dependency versions if they are modified. Defaults to `false`.","type":"boolean"}}},{"description":"Configuration for various `group-priority` plugin","type":"object","properties":{"type":{"description":"The name of the plugin.","type":"string","enum":["group-priority"]},"groups":{"description":"Group names ordered with highest priority first.","type":"array","items":{"type":"string"}}}},{"description":"Other plugins","type":"object","properties":{"type":{"description":"The name of the plugin.","type":"string"}}}]}},"signoff":{"description":"Text to be used as Signed-off-by in the commit.","type":"string"},"group-pull-request-title-pattern":{"description":"When grouping multiple release pull requests use this pattern for the title.","type":"string"},"release-search-depth":{"description":"When considering previously releases, only look this deep.","type":"number"},"commit-search-depth":{"description":"When considering commit history, only look this many commits deep.","type":"number"},"sequential-calls":{"description":"Whether to open pull requests/releases sequentially rather than concurrently. If you have many components, you may want to set this to avoid secondary rate limits.","type":"boolean"},"label":{"description":"Comma-separated list of labels to add to newly opened pull request. These are used to identify release pull requests.","type":"string"},"release-label":{"description":"Comma-separated list of labels to add to a pull request that has been released/tagged","type":"string"},"component-no-space":{"description":"release-please automatically adds ` ` (space) in front of parsed ${component}. This option indicates whether that behaviour should be disabled. Defaults to `false`","type":"boolean"}},"required":["packages"]}],"properties":{"$schema":true,"packages":true,"bootstrap-sha":true,"last-release-sha":true,"always-link-local":true,"plugins":true,"signoff":true,"group-pull-request-title-pattern":true,"release-search-depth":true,"commit-search-depth":true,"sequential-calls":true,"release-type":true,"bump-minor-pre-major":true,"bump-patch-for-minor-pre-major":true,"versioning":true,"changelog-sections":true,"release-as":true,"skip-github-release":true,"draft":true,"prerelease":true,"draft-pull-request":true,"label":true,"release-label":true,"extra-label":true,"include-component-in-tag":true,"include-v-in-tag":true,"changelog-type":true,"changelog-host":true,"changelog-path":true,"pull-request-title-pattern":true,"pull-request-header":true,"pull-request-footer":true,"separate-pull-requests":true,"tag-separator":true,"extra-files":true,"version-file":true,"snapshot-label":true,"initial-version":true,"exclude-paths":true,"component-no-space":false}}'); - -/***/ }), - -/***/ 45314: -/***/ ((module) => { - -"use strict"; -module.exports = JSON.parse('{"$schema":"http://json-schema.org/draft-07/schema#","title":"release-please manifest config versions","description":"Schema for defining manifest versions file","type":"object","additionalProperties":{"type":"string"}}'); - -/***/ }), - /***/ 72020: /***/ ((module) => { @@ -110616,7 +136653,7 @@ var exports = __webpack_exports__; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.main = void 0; const core = __nccwpck_require__(42186); -const release_please_1 = __nccwpck_require__(24363); +const release_please_1 = __nccwpck_require__(75833); const DEFAULT_CONFIG_FILE = 'release-please-config.json'; const DEFAULT_MANIFEST_FILE = '.release-please-manifest.json'; const DEFAULT_GITHUB_API_URL = 'https://api.github.com';