diff --git a/README.md b/README.md index e425c68..c593377 100644 --- a/README.md +++ b/README.md @@ -21,18 +21,29 @@ Using If the addon does not exist on the target branch, it will assist the user in the migration, following the OCA migration guide. -If the addon already exists on the target branch, it will retrieve missing -commits to port. If a Pull Request exists for a missing commit, it will be -ported with all its commits if they were not yet (fully) ported. +If the addon already exists on the target branch, it will retrieve commits +not fully ported grouped by Pull Request and propose to port them. + +Syntax: + + $ oca-port [options] + $ oca-port --help To check if an addon could be migrated or to get eligible commits to port: $ export GITHUB_TOKEN= - $ oca-port 14.0 15.0 shopfloor --verbose + $ cd + $ oca-port origin/14.0 origin/16.0 --verbose --dry-run + +To effectively migrate the addon or port its commits, remove the `--dry-run` option +so the tool will create a working local branch automatically (called destination) +from the `` branch: + + $ oca-port origin/14.0 origin/16.0 -To effectively migrate the addon or port its commits, use the `--fork` option: +You can control the destination with the `--destination` option: - $ oca-port 14.0 15.0 shopfloor --fork camptocamp + $ oca-port origin/14.0 origin/16.0 --destination camptocamp/16.0-port-things You can also directly blacklist a bunch of PRs on a given branch thanks to the `oca-port-pr` tool: @@ -88,15 +99,14 @@ You can also use `oca-port` as a Python package: ```python >>> import oca_port >>> app = oca_port.App( -... from_branch="14.0", -... to_branch="16.0", +... source="origin/14.0", +... target="origin/16.0", ... addon="stock_move_auto_assign", -... from_org": "OCA", -... from_remote": "origin", +... upstream_org": "OCA", ... repo_path": "/home/odoo/OCA/stock-logistics-warehouse", ... output": "json", ... fetch": True, -... github_token: "ghp_sheeXai3xu1yoopheiquoo3ohch0AefooSob" +... github_token: "" ... ) >>> json_data = app.run() >>> data = json.loads(json_data) diff --git a/oca_port/app.py b/oca_port/app.py index 9572a0b..8272bdd 100644 --- a/oca_port/app.py +++ b/oca_port/app.py @@ -3,16 +3,17 @@ import os import pathlib from dataclasses import dataclass +import re import git from . import utils -from .exceptions import ForkValueError, RemoteBranchValueError +from .exceptions import RemoteBranchValueError from .migrate_addon import MigrateAddon from .port_addon_pr import PortAddonPullRequest from .utils.git import Branch from .utils.github import GitHub -from .utils.misc import Output, bcolors as bc +from .utils.misc import Output, bcolors as bc, extract_ref_info @dataclass @@ -21,24 +22,23 @@ class App(Output): Parameters: - from_branch: - the source branch (e.g. '15.0') - to_branch: - the source branch (e.g. '16.0') + source: + string representation of the source branch, e.g. 'origin/15.0' + target: + string representation of the target branch, e.g. 'origin/16.0' addon: the name of the module to process + destination: + string representation of the destination branch, + e.g. 'camptocamp/16.0-addon-dev' + source_version: + Source Odoo version. To set if it cannot be detected from 'source'. + target_version: + Target Odoo version. To set if it cannot be detected from 'target'. repo_path: local path to the Git repository - fork: - name of the Git remote used as fork repo_name: name of the repository on the upstream organization (e.g. 'server-tools') - user_org: - name of the user's GitHub organization where the fork is hosted - from_org: - name of the upstream GitHub organization (default = 'OCA') - from_remote: - name of the Git remote considered as the upstream (default = 'origin') verbose: returns more details to the user non_interactive: @@ -60,17 +60,18 @@ class App(Output): to not trigger the "API rate limit exceeded" error). """ - from_branch: str - to_branch: str + source: str + target: str addon: str - repo_path: str - fork: str = None + destination: str = None + source_version: str = None + target_version: str = None + repo_path: str = "" repo_name: str = None - user_org: str = None - from_org: str = "OCA" - from_remote: str = "origin" + upstream_org: str = "OCA" verbose: bool = False non_interactive: bool = False + dry_run: bool = False output: str = None fetch: bool = False no_cache: bool = False @@ -81,35 +82,7 @@ class App(Output): _available_outputs = ("json",) def __post_init__(self): - # Handle with repo_path and repo_name - if self.repo_path: - self.repo_path = pathlib.Path(self.repo_path) - else: - raise ValueError("'repo_path' has to be set.") - if not self.repo_name: - self.repo_name = self.repo_path.name - # Handle Git repository - self.repo = git.Repo(self.repo_path) - if self.repo.is_dirty(untracked_files=True): - raise ValueError("changes not committed detected in this repository.") - # Handle user's organization and fork - if not self.user_org: - # Assume that the fork remote has the same name than the user organization - self.user_org = self.fork - if self.fork: - if self.fork not in self.repo.remotes: - raise ForkValueError(self.repo_name, self.fork) - # Transform branch strings to Branch objects - try: - self.from_branch = Branch( - self.repo, self.from_branch, default_remote=self.from_remote - ) - self.to_branch = Branch( - self.repo, self.to_branch, default_remote=self.from_remote - ) - except ValueError as exc: - if exc.args[1] not in self.repo.remotes: - raise RemoteBranchValueError(self.repo_name, exc.args[1]) from exc + self._prepare_parameters() # Force non-interactive mode: # - if we are not in CLI mode if not self.cli: @@ -132,12 +105,145 @@ def __post_init__(self): or (self.to_branch.remote and self.to_branch.ref() not in remote_branches) ): self.fetch_branches() + # Check if source & target branches exist + self._check_branch_exists(self.source.ref, raise_exc=True) + self._check_branch_exists(self.target.ref, raise_exc=True) # GitHub API helper self.github = GitHub(self.github_token or os.environ.get("GITHUB_TOKEN")) # Initialize storage & cache self.storage = utils.storage.InputStorage(self.to_branch, self.addon) self.cache = utils.cache.UserCacheFactory(self).build() + def _handle_odoo_versions(self): + odoo_version_pattern = r"^[0-9]+\.[0-9]$" + source_version = re.search(odoo_version_pattern, self.source.branch) + target_version = re.search(odoo_version_pattern, self.target.branch) + source_param = "--source-version" if self.cli else "source_version" + target_param = "--target-version" if self.cli else "target_version" + # Check Odoo versions from branches + if not source_version and not self.source_version: + raise ValueError( + f"Unable to identify Odoo source version from {self.source.branch}.\n" + f"Use {source_param} parameter to identify Odoo source version." + ) + if not target_version and not self.target_version: + raise ValueError( + f"Unable to identify Odoo target version from {self.target.branch}.\n" + f"Use {target_param} parameter to identify target Odoo version." + ) + # Check source_version and target_version parameters + if self.source_version and not re.search( + odoo_version_pattern, self.source_version + ): + raise ValueError(f"Unable to identify Odoo version from {source_param}.") + if self.target_version and not re.search( + odoo_version_pattern, self.target_version + ): + raise ValueError(f"Unable to identify Odoo version from {target_param}.") + self.source_version = self.source_version or source_version.string + self.target_version = self.target_version or target_version.string + + def _prepare_parameters(self): + # Handle Git repository + self.repo = git.Repo(self.repo_path) + if self.repo.is_dirty(untracked_files=True): + raise ValueError("changes not committed detected in this repository.") + + # Convert them to full remote info if needed + for key in ("source", "target", "destination"): + value = getattr(self, key) + if value and isinstance(value, str): + setattr(self, key, extract_ref_info(self.repo, key, value)) + # Check Odoo versions from source and target branches and parameters + self._handle_odoo_versions() + + # Always provide a destination: + if not self.destination: + self.destination = extract_ref_info(self.repo, "destination", "") + # Unset org that could have been taken from 'origin'. + self.destination.org = None + # If target.org is different than upstream_org, generate the + # destination from target so we get the remote+org for free (if any) + if self.target.org != self.upstream_org: + self.destination = extract_ref_info( + self.repo, "destination", self.target.ref + ) + # If destination is not a local one, or not an Odoo version, + # it will be generated by the specific tool + if ( + self.destination.remote + or self.destination.branch == self.target_version + ): + self.destination.branch = None + + # Handle with repo_path and repo_name + self.repo_path = pathlib.Path(self.repo_path) + self.repo_name = ( + self.repo_name + or self.source.repo + or self.target.repo + or self.repo_path.absolute().name + ) + if not self.repo_path: + raise ValueError("'repo_path' has to be set.") + + # Transform branch strings to Branch objects + self.from_branch = self._prepare_branch(self.source) + self.to_branch = self._prepare_branch(self.target) + self.dest_branch = self.to_branch + if self.destination.branch: + self.dest_branch = self._prepare_branch(self.destination) + + # Print a warning if the repository in the destination remote URL differs + # from the repository of the target as we probably want to push under the + # same repository name. + fishy_parameters = False + if not self.dry_run and self.destination.remote and self.target.repo: + dest_remote_url = self.repo.remotes[self.destination.remote].url + if self.target.repo not in dest_remote_url: + fishy_parameters = True + self._print( + "⚠️ Destination repository does not match the target. " + "Destination remote is perhaps not the right one." + ) + # Print a summary of source/target/destination + if self.verbose or fishy_parameters: + source_remote_url = ( + self.repo.remotes[self.source.remote].url if self.source.remote else "" + ) + msg = f"{bc.BOLD}Source{bc.END}: {self.source.ref}" + if source_remote_url: + msg += f", remote {bc.BOLD}{self.source.remote} {source_remote_url}{bc.END}" + self._print(msg) + target_remote_url = ( + self.repo.remotes[self.target.remote].url if self.target.remote else "" + ) + msg = f"{bc.BOLD}Target{bc.END}: {self.target.ref}" + if target_remote_url: + msg += f", remote {bc.BOLD}{self.target.remote} {target_remote_url}{bc.END}" + self._print(msg) + if not self.dry_run: + dest_remote_url = ( + self.repo.remotes[self.destination.remote].url + if self.destination.remote + else "" + ) + msg = f"{bc.BOLD}Destination{bc.END}: {self.destination.ref}" + if dest_remote_url: + msg += ( + f", remote {bc.BOLD}{self.destination.remote} " + f"{dest_remote_url}{bc.END}" + ) + self._print(msg) + + def _prepare_branch(self, info): + try: + return Branch(self.repo, info.branch, default_remote=info.remote) + except ValueError as exc: + remote = exc.args[1] + if remote not in self.repo.remotes: + raise RemoteBranchValueError(info) from exc + def fetch_branches(self): for branch in (self.from_branch, self.to_branch): if not branch.remote: @@ -147,6 +253,14 @@ def fetch_branches(self): self._print(f"Fetch {bc.BOLD}{branch.ref()}{bc.END} from {remote_url}") branch.repo.remotes[branch.remote].fetch(branch.name) + def _check_branch_exists(self, branch, raise_exc=False): + for ref in self.repo.refs: + if branch == ref.name: + return True + if raise_exc: + raise ValueError(f"Ref {branch} doesn't exist.") + return False + def _check_addon_exists(self, branch, raise_exc=False): repo = self.repo addon = self.addon diff --git a/oca_port/cli/main.py b/oca_port/cli/main.py index 312a607..3616c26 100644 --- a/oca_port/cli/main.py +++ b/oca_port/cli/main.py @@ -13,12 +13,25 @@ To check if an addon could be migrated or to get eligible commits to port: $ export GITHUB_TOKEN= - $ oca-port 13.0 14.0 shopfloor --verbose + $ oca-port origin/13.0 origin/14.0 shopfloor --verbose -To effectively migrate the addon or port its commits, use the `--fork` option: +To effectively migrate the addon or port its commits, use the `--destination` option: - $ oca-port 13.0 14.0 shopfloor --fork camptocamp + $ oca-port origin/13.0 origin/14.0 shopfloor --destination camptocamp/14-mig-shopfloor +Note that you can omit the remote to work on local branches if needed: + + $ oca-port 13.0 camptocamp/14-dev shopfloor + +NOTE: if the source branch is a local one, cache will be readonly and +won't be updated as commits data coming from such branch cannot be trust. + +The organization used to perform API requests to look for Pull Requests data +is the one defined through `--upstream-org` parameter (defaults to 'OCA'). +So you can work on local source and target branches while performing API requests +on the relevant upstream organization: + + $ oca-port 14.0 16.0 shopfloor --upstream-org camptocamp Migration of addon ------------------ @@ -38,7 +51,6 @@ in one branch and creating a draft PR against the upstream repository with all of them. """ -import os import click @@ -48,31 +60,33 @@ @click.command() -@click.argument("from_branch", required=True) -@click.argument("to_branch", required=True) +@click.argument("source", required=True) +@click.argument("target", required=True) @click.argument("addon", required=True) @click.option( - "--from-org", - default="OCA", - show_default=True, - help="Upstream organization name.", + "--destination", + help=("Git reference where work will be pushed, e.g. 'camptocamp/16.0-dev'."), ) @click.option( - "--from-remote", - default="origin", - show_default=True, - required=True, - help="Git remote from which source branches are fetched by default.", + "--source-version", + help="Source Odoo version. To set if it cannot be detected from 'source'.", ) -@click.option("--repo-name", help="Repository name, eg. server-tools.") @click.option( - "--fork", help="Git remote where branches with ported commits are pushed." + "--target-version", + help="Target Odoo version. To set if it cannot be detected from 'target'.", +) +@click.option("--repo-name", help="Repository name, e.g. 'server-tools'.") +@click.option( + "--upstream-org", + default="OCA", + show_default=True, + help="Upstream organization name. Used for API requests.", ) -@click.option("--user-org", show_default="--fork", help="User organization name.") @click.option("--verbose", is_flag=True, help="List the commits of Pull Requests.") @click.option( "--non-interactive", is_flag=True, help="Disable all interactive prompts." ) +@click.option("--dry-run", is_flag=True, help="Print results, no nothing.") @click.option( "--output", help=( @@ -85,23 +99,27 @@ @click.option("--no-cache", is_flag=True, help="Disable user's cache.") @click.option("--clear-cache", is_flag=True, help="Clear the user's cache.") def main( - from_branch: str, - to_branch: str, addon: str, - from_org: str, - from_remote: str, + source: str, + target: str, + destination: str, + source_version: str, + target_version: str, repo_name: str, - fork: str, - user_org: str, + upstream_org: str, verbose: bool, non_interactive: bool, output: str, fetch: bool, no_cache: bool, clear_cache: bool, + dry_run: bool, ): - """Migrate ADDON from FROM_BRANCH to TO_BRANCH or list Pull Requests to port - if ADDON already exists on TO_BRANCH. + """Migrate ADDON from SOURCE to TARGET or list Pull Requests to port. + + E.g.: + + $ oca-port origin/14.0 origin/16.0 auditlog Migration: @@ -109,29 +127,26 @@ def main( Port of Pull Requests (missing commits): - The PRs are found from FROM_BRANCH commits that do not exist in TO_BRANCH. + The PRs are found from SOURCE commits that do not exist in TARGET. The user will be asked if he wants to port them. - - To start the migration process, the `--fork` option must be provided in - order to push the resulting branch on the user's remote. """ try: app = App( addon=addon, - from_branch=from_branch, - to_branch=to_branch, - from_org=from_org, - from_remote=from_remote, - repo_path=os.getcwd(), + source=source, + target=target, + destination=destination, + source_version=source_version, + target_version=target_version, repo_name=repo_name, - fork=fork, - user_org=user_org, + upstream_org=upstream_org, verbose=verbose, non_interactive=non_interactive, output=output, fetch=fetch, no_cache=no_cache, clear_cache=clear_cache, + dry_run=dry_run, cli=True, ) except ForkValueError as exc: diff --git a/oca_port/cli/pr.py b/oca_port/cli/pr.py index 748c316..0f86c36 100644 --- a/oca_port/cli/pr.py +++ b/oca_port/cli/pr.py @@ -44,8 +44,6 @@ def blacklist( # TODO: we assume you are in the right repo folder when you run this repo = git.Repo(os.getcwd()) - if repo.is_dirty(untracked_files=True): - raise ValueError("changes not committed detected in this repository.") # Transform branch strings to Branch objects try: branch = Branch(repo, target_branch, default_remote=remote) diff --git a/oca_port/migrate_addon.py b/oca_port/migrate_addon.py index b50fbb1..599c3f9 100644 --- a/oca_port/migrate_addon.py +++ b/oca_port/migrate_addon.py @@ -16,27 +16,27 @@ "https://github.com/OCA/maintainer-tools/wiki/Merge-commits-in-pull-requests" ) MIG_TASKS_URL = ( - "https://github.com/OCA/maintainer-tools/wiki/Migration-to-version-{branch}" + "https://github.com/OCA/maintainer-tools/wiki/Migration-to-version-{version}" "#tasks-to-do-in-the-migration" ) -MIG_NEW_PR_TITLE = "[{to_branch}][MIG] {addon}" +MIG_NEW_PR_TITLE = "[{version}][MIG] {addon}" MIG_NEW_PR_URL = ( "https://github.com/{from_org}/{repo_name}/compare/" - "{to_branch}...{user_org}:{mig_branch}?expand=1&title={title}" + "{to_branch}...{to_org}:{mig_branch}?expand=1&title={title}" ) MIG_TIPS = "\n".join( [ f"\n{bc.BOLD}{bc.OKCYAN}The next steps are:{bc.END}", ("\t1) Reduce the number of commits " f"('{bc.DIM}OCA Transbot...{bc.END}'):"), f"\t\t=> {bc.BOLD}{MIG_MERGE_COMMITS_URL}{bc.END}", - "\t2) Adapt the module to the {to_branch} version:", + "\t2) Adapt the module to the {version} version:", f"\t\t=> {bc.BOLD}" "{mig_tasks_url}" f"{bc.END}", ( "\t3) On a shell command, type this for uploading the content to GitHub:\n" f"{bc.DIM}" "\t\t$ git add --all\n" - '\t\t$ git commit -m "[MIG] {addon}: Migration to {to_branch}"\n' - "\t\t$ git push {fork} {mig_branch} --set-upstream" + '\t\t$ git commit -m "[MIG] {addon}: Migration to {version}"\n' + "\t\t$ git push {remote} {mig_branch} --set-upstream" f"{bc.END}" ), "\t4) Create the PR against {from_org}/{repo_name}:", @@ -49,7 +49,7 @@ ( "\t1) On a shell command, type this for uploading the content to GitHub:\n" f"{bc.DIM}" - "\t\t$ git push {fork} {mig_branch} --set-upstream" + "\t\t$ git push {remote} {mig_branch} --set-upstream" f"{bc.END}" ), "\t2) Create the PR against {from_org}/{repo_name}:", @@ -64,8 +64,11 @@ def __init__(self, app): self._results = {"process": "migrate", "results": {}} self.mig_branch = g.Branch( self.app.repo, - MIG_BRANCH_NAME.format( - branch=self.app.to_branch.name[:4], addon=self.app.addon + ( + self.app.destination.branch + or MIG_BRANCH_NAME.format( + branch=self.app.target_version, addon=self.app.addon + ) ), ) @@ -85,11 +88,11 @@ def run(self): return False, None # Looking for an existing PR to review existing_pr = None - if self.app.from_org and self.app.repo_name: + if self.app.upstream_org and self.app.repo_name: existing_pr = self.app.github.search_migration_pr( - from_org=self.app.from_org, + from_org=self.app.upstream_org, repo_name=self.app.repo_name, - branch=self.app.to_branch.name, + branch=self.app.target.branch, addon=self.app.addon, ) if existing_pr: @@ -101,7 +104,12 @@ def run(self): "Thank you!" ) self._results["results"]["existing_pr"] = existing_pr.to_dict(number=True) - if self.app.non_interactive: + if self.app.non_interactive or self.app.dry_run: + self._print( + f"ℹ️ {bc.BOLD}{self.app.addon}{bc.END} can be migrated " + f"from {bc.BOLD}{self.app.source_version}{bc.END} " + f"to {bc.BOLD}{self.app.target_version}{bc.END}." + ) # If an output is defined we return the result in the expected format if self.app.output: return True, self._render_output(self.app.output, self._results) @@ -113,17 +121,13 @@ def run(self): return True, None confirm = ( f"Migrate {bc.BOLD}{self.app.addon}{bc.END} " - f"from {bc.BOLD}{self.app.from_branch.name}{bc.END} " - f"to {bc.BOLD}{self.app.to_branch.name}{bc.END}?" + f"from {bc.BOLD}{self.app.source_version}{bc.END} " + f"to {bc.BOLD}{self.app.target_version}{bc.END}?" ) if not click.confirm(confirm): self.app.storage.blacklist_addon(confirm=True) if not self.app.storage.dirty: return False, None - # Check if a migration PR already exists - # TODO - if not self.app.fork: - raise click.UsageError("Please set the '--fork' option") if self.app.repo.untracked_files: raise click.ClickException("Untracked files detected, abort") self._checkout_base_branch() @@ -139,7 +143,7 @@ def run(self): g.run_pre_commit(self.app.repo, self.app.addon) # Check if the addon has commits that update neighboring addons to # make it work properly - PortAddonPullRequest(self.app, create_branch=False, push_branch=False).run() + PortAddonPullRequest(self.app, push_branch=False).run() self._print_tips() return True, None @@ -201,36 +205,36 @@ def _apply_patches(self, patches_dir): ) def _print_tips(self, blacklisted=False): - mig_tasks_url = MIG_TASKS_URL.format(branch=self.app.to_branch.name) + mig_tasks_url = MIG_TASKS_URL.format(version=self.app.target_version) pr_title_encoded = urllib.parse.quote( MIG_NEW_PR_TITLE.format( - to_branch=self.app.to_branch.name[:4], addon=self.app.addon + version=self.app.target_version, addon=self.app.addon ) ) new_pr_url = MIG_NEW_PR_URL.format( - from_org=self.app.from_org, + from_org=self.app.upstream_org, repo_name=self.app.repo_name, to_branch=self.app.to_branch.name, - user_org=self.app.user_org, + to_org=self.app.destination.org or "YOUR_ORG", mig_branch=self.mig_branch.name, title=pr_title_encoded, ) if blacklisted: tips = BLACKLIST_TIPS.format( - from_org=self.app.from_org, + from_org=self.app.upstream_org, repo_name=self.app.repo_name, - fork=self.app.fork, + remote=self.app.destination.remote, mig_branch=self.mig_branch.name, new_pr_url=new_pr_url, ) print(tips) return tips = MIG_TIPS.format( - from_org=self.app.from_org, + from_org=self.app.upstream_org, repo_name=self.app.repo_name, addon=self.app.addon, - to_branch=self.app.to_branch.name, - fork=self.app.fork, + version=self.app.target_version, + remote=self.app.destination.remote or "YOUR_REMOTE", mig_branch=self.mig_branch.name, mig_tasks_url=mig_tasks_url, new_pr_url=new_pr_url, diff --git a/oca_port/port_addon_pr.py b/oca_port/port_addon_pr.py index 1913b24..dfc336e 100644 --- a/oca_port/port_addon_pr.py +++ b/oca_port/port_addon_pr.py @@ -2,14 +2,18 @@ # License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl) import os +import hashlib +import itertools import shutil import tempfile +import urllib.parse from collections import defaultdict import click import git from .utils import git as g, misc +from .utils.session import Session from .utils.misc import Output, bcolors as bc AUTHOR_EMAILS_TO_SKIP = [ @@ -26,7 +30,7 @@ "Added translation using Weblate", ] -PR_BRANCH_NAME = "oca-port-from-{from_branch}-to-{to_branch}-pr-{pr_number}" +PR_BRANCH_NAME = "oca-port-{addon}-{source_version}-to-{target_version}-{key}" FOLDERS_TO_SKIP = [ "setup", @@ -44,6 +48,11 @@ "static/description/index.html", ] +NEW_PR_URL = ( + "https://github.com/{from_org}/{repo_name}/compare/" + "{to_branch}...{to_org}:{pr_branch}?expand=1&title={title}" +) + # Fake PR for commits w/o any PR (used as fallback) FAKE_PR = g.PullRequest(*[""] * 6) @@ -59,11 +68,11 @@ def path_to_skip(commit_path): class PortAddonPullRequest(Output): - def __init__(self, app, create_branch=True, push_branch=True): + def __init__(self, app, push_branch=True): """Port pull requests of an addon.""" self.app = app - self.create_branch = create_branch - self.push_branch = push_branch + self.push_branch = push_branch and bool(self.app.destination.remote) + self.open_pr = bool(self.app.destination.org) self._results = {"process": "port_commits", "results": {}} def run(self): @@ -74,7 +83,7 @@ def run(self): return False, None self._print( f"{bc.BOLD}{self.app.addon}{bc.END} already exists " - f"on {bc.BOLD}{self.app.to_branch.name}{bc.END}, " + f"on {bc.BOLD}{self.app.to_branch.ref()}{bc.END}, " "checking PRs to port..." ) branches_diff = BranchesDiff(self.app) @@ -95,42 +104,120 @@ def run(self): # Nothing to port -> return an empty output return False, self._render_output(self.app.output, {}) return False, None - if self.app.fork: + if not self.app.dry_run: self._print() - self._port_pull_requests(branches_diff) + # Set a destination branch (automatically generated if not already provided) + self.app.destination.branch = self._get_dest_branch_name(branches_diff) + # Launch the porting session + porting_done = self._port_pull_requests(branches_diff) + if porting_done: + self._commit_blacklist() + self._push_and_open_pr() return True, None + def _get_dest_branch_name(self, branches_diff): + dest_branch_name = self.app.destination.branch + # Define a destination branch if not set + if branches_diff.commits_diff and not dest_branch_name: + commits_to_port = [ + commit.hexsha + for commit in itertools.chain.from_iterable( + branches_diff.commits_diff.values() + ) + ] + h = hashlib.shake_256("-".join(commits_to_port).encode()) + key = h.hexdigest(3) + dest_branch_name = PR_BRANCH_NAME.format( + addon=self.app.addon, + source_version=self.app.source_version, + target_version=self.app.target_version, + key=key, + ) + return dest_branch_name + def _port_pull_requests(self, branches_diff): """Open new Pull Requests (if it doesn't exist) on the GitHub repository.""" - base_ref = branches_diff.app.to_branch # e.g. 'origin/14.0' - previous_pr = previous_pr_branch = None - processed_prs = [] + # Now we have a destination branch, check if there is ongoing work on it + wip = self._print_wip_session() + dest_branch_name = self.app.destination.branch + # Nothing to port + if not branches_diff.commits_diff or not dest_branch_name: + # Nothing to port while having WIP means the porting is done + return wip + # Check if destination branch exists, and create it if not + dest_branch_exists = dest_branch_name in self.app.repo.heads + base_ref = self.app.to_branch # e.g. 'origin/14.0' + if dest_branch_exists: + target_commit = self.app.repo.commit(self.app.target.ref) + dest_commit = self.app.repo.commit(dest_branch_name) + # If target and destination branches are on the same commit we don't care + if target_commit != dest_commit: + # If the local branch already exists, ask the user if he wants + # to recreate it + confirm = ( + f"Branch {bc.BOLD}{dest_branch_name}{bc.END} already exists, " + f"recreate it from {bc.BOLD}{self.app.to_branch.ref()}{bc.END}?\n" + "(⚠️ you will lose ongoing work)" + ) + if not click.confirm(confirm): + msg = "ℹ️ To resume the work from this branch, relaunch with:\n\n" + cmd = ( + f"\t{bc.DIM}oca-port {self.app.source.ref} " + f"{dest_branch_name} {self.app.addon} %s{bc.END}" + ) + opts = [] + if self.app.source.branch != self.app.source_version: + opts.append(f"--source-version={self.app.source_version}") + if dest_branch_name != self.app.target_version: + opts.append(f"--target-version={self.app.target_version}") + if self.app.verbose: + opts.append("--verbose") + cmd = cmd % " ".join(opts) + self._print(msg + cmd) + return False + # Delete the local branch + if self.app.repo.active_branch.name == dest_branch_name: + # We cannot delete an active branch, checkout the underlying + # commit instead + self.app.repo.git.checkout(dest_commit) + self.app.repo.delete_head(dest_branch_name, "-f") + dest_branch_exists = False + # Clear any ongoing work from the session + session = self._init_session() + session.clear() + if not dest_branch_exists: + self.app.repo.git.checkout( + "--no-track", "-b", dest_branch_name, base_ref.ref() + ) + # Checkout the destination branch before porting PRs + dest_branch = g.Branch(self.app.repo, dest_branch_name) + self.app.repo.heads[dest_branch.name].checkout() last_pr = ( list(branches_diff.commits_diff.keys())[-1] if branches_diff.commits_diff else None ) for pr, commits in branches_diff.commits_diff.items(): - current_commit = self.app.repo.commit(self.app.to_branch.ref()) - pr_branch, based_on_previous = self._port_pull_request_commits( + # Check if PR has been blacklisted in user's session + if self._is_pr_blacklisted(pr): + if self._confirm_pr_blacklisted(pr): + continue + # Port PR + current_commit = self.app.repo.commit(dest_branch.ref()) + pr_ported = self._port_pull_request_commits( pr, commits, base_ref, - previous_pr, - previous_pr_branch, + dest_branch, ) - if pr_branch: + if pr_ported: # Check if commits have been ported. # If none has been ported, blacklist automatically the current PR. - if self.app.repo.commit(pr_branch.ref()) == current_commit: + if self.app.repo.commit(dest_branch.ref()) == current_commit: self._print("\tℹ️ Nothing has been ported, skipping") - self.app.storage.blacklist_pr( - pr.number, - confirm=True, - reason=f"(auto) Nothing to port from PR #{pr.number}", + self._handle_pr_blacklist( + pr, reason=f"(auto) Nothing to port from PR #{pr.number}" ) - if self.app.storage.dirty: - self.app.storage.commit() msg = ( f"\t{bc.DIM}PR #{pr.number} has been" if pr.number @@ -138,97 +225,170 @@ def _port_pull_requests(self, branches_diff): ) + f" automatically blacklisted{bc.ENDD}" self._print(msg) continue - previous_pr = pr - previous_pr_branch = pr_branch - if based_on_previous: - processed_prs.append(pr) - else: - processed_prs = [pr] + self._handle_pr_ported(pr) if pr == last_pr: self._print("\t🎉 Last PR processed! 🎉") - is_pushed = self._push_branch_to_remote(pr_branch) - if not is_pushed: - continue - pr_data = self._prepare_pull_request_data(processed_prs, pr_branch) - pr_url = self._search_pull_request(pr_data["base"], pr_data["title"]) - if pr_url: - self._print(f"\tExisting PR has been refreshed => {pr_url}") - else: - self._create_pull_request(pr_branch, pr_data, processed_prs) - - def _port_pull_request_commits( - self, - pr, - commits, - base_ref, - previous_pr=None, - previous_pr_branch=None, - ): + return True + + def _get_session_name(self): + return f"{self.app.addon}-{self.app.destination.branch}" + + def _init_session(self): + session = Session(self.app, self._get_session_name()) + data = session.get_data() + data.setdefault("addon", self.app.addon) + data.setdefault("repo_name", self.app.repo_name) + data.setdefault("pull_requests", {}) + session.set_data(data) + return session + + def _is_pr_blacklisted(self, pr): + """Check if PR is blacklisted in current user's session.""" + session = self._init_session() + data = session.get_data() + return bool(data["pull_requests"]["blacklisted"][pr.ref]) + + def _confirm_pr_blacklisted(self, pr): + """Ask the user if PR should still be blacklisted.""" + self._print( + f"- {bc.BOLD}{bc.WARNING}PR #{pr.number}{bc.END} " + f"is blacklisted in current user's session" + ) + if not click.confirm("\tKeep it blacklisted?"): + # Remove the PR from the session + session = self._init_session() + data = session.get_data() + if pr.ref in data["pull_requests"]["blacklisted"]: + del data["pull_requests"]["blacklisted"][pr.ref] + session.set_data(data) + return False + return True + + def _handle_pr_blacklisted(self, pr): + """Check if PR is blacklisted in current user's session. + + Return True if workflow + """ + + def _handle_pr_blacklist(self, pr, reason=None): + if not click.confirm("\tBlacklist this PR?"): + return False + if not reason: + reason = click.prompt("\tReason", type=str) + session = self._init_session() + data = session.get_data() + blacklisted = data["pull_requests"].setdefault("blacklisted", {}) + if pr.ref not in blacklisted: + pr_data = pr.to_dict(number=True) + pr_data["reason"] = reason + data["pull_requests"]["blacklisted"][pr.ref] = pr_data + session.set_data(data) + return True + + def _handle_pr_ported(self, pr): + session = self._init_session() + data = session.get_data() + ported = data["pull_requests"].setdefault("ported", {}) + if pr.ref not in ported: + data["pull_requests"]["ported"][pr.ref] = pr.to_dict(number=True) + session.set_data(data) + + def _commit_blacklist(self): + session = self._init_session() + data = session.get_data() + blacklisted = data["pull_requests"].setdefault("blacklisted", {}) + for pr in blacklisted.values(): + if self.app.storage.is_pr_blacklisted(pr["number"]): + continue + self.app.storage.blacklist_pr(pr["number"], reason=pr["reason"]) + if self.app.storage.dirty: + pr_refs = ", ".join([str(pr["number"]) for pr in blacklisted.values()]) + self.app.storage.commit( + msg=f"oca-port: blacklist PR(s) {pr_refs} for {self.app.addon}" + ) + + def _print_wip_session(self): + session = self._init_session() + data = session.get_data() + wip = False + if data["pull_requests"]: + self._print( + f"ℹ️ Existing session for branch " + f"{bc.BOLD}{self.app.destination.branch}{bc.END}:" + ) + wip = True + # Ported PRs + if data["pull_requests"]["ported"]: + self._print("\t✅ Ported PRs:") + for pr_data in data["pull_requests"]["ported"].values(): + self._print( + f"\t- {bc.BOLD}{bc.OKBLUE}{pr_data['ref']}{bc.END} " + f"{bc.OKBLUE}{pr_data['title']}{bc.ENDC}:" + ) + # Blacklisted PRs + if data["pull_requests"]["blacklisted"]: + self._print("\t⛔ Blacklisted PRs:") + for pr_data in data["pull_requests"]["blacklisted"].values(): + self._print( + f"\t- {bc.BOLD}{bc.WARNING}{pr_data['ref']}{bc.END} " + f"{bc.WARNING}{pr_data['title']}{bc.ENDC}:" + ) + # if data["pull_requests"]: + # self._print() + return wip + + def _push_and_open_pr(self): + session = self._init_session() + data = session.get_data() + processed_prs = data["pull_requests"]["ported"] + blacklisted_prs = data["pull_requests"]["blacklisted"] + if not processed_prs and not blacklisted_prs: + self._print("ℹ️ Nothing has been ported or blacklisted.") + return False + pr_data = self._prepare_pull_request_data(processed_prs, blacklisted_prs) + # Try to push and open PR against remote repository + is_pushed = self._push_branch_to_remote() + if not is_pushed: + self._print( + f"\nℹ️ Branch {bc.BOLD}{self.app.destination.branch}{bc.END} couldn't " + "be pushed (no remote defined)" + ) + self._print_tips(pr_data) + return False + if not self.open_pr: + self._print( + f"\nℹ️ PR based on {bc.BOLD}{self.app.destination.branch}{bc.END} couldn't " + "be open (no remote defined)" + ) + self._print_tips(pr_data) + return False + pr_url = self._search_pull_request(pr_data["base"], pr_data["title"]) + if pr_url: + self._print(f"Existing PR has been refreshed => {pr_url}") + else: + self._create_pull_request(pr_data, processed_prs) + + def _print_tips(self, pr_data): + self._print("Here is the default PR content that would have been used:") + self._print(f"\n{bc.BOLD}Title:{bc.END}") + self._print(pr_data["title"]) + self._print(f"\n{bc.BOLD}Description:{bc.END}") + self._print(pr_data["body"]) + + def _port_pull_request_commits(self, pr, commits, base_ref, branch): """Port commits of a Pull Request in a new branch.""" if pr.number: self._print( - f"- {bc.BOLD}{bc.OKCYAN}Port PR #{pr.number}{bc.END} " - f"({pr.url}) {bc.OKCYAN}{pr.title}{bc.ENDC}..." + f"- {bc.BOLD}{bc.OKCYAN}Port PR {pr.ref}{bc.END} " + f"{bc.OKCYAN}{pr.title}{bc.ENDC}..." ) + self._print(f"\t{pr.url}") else: self._print(f"- {bc.BOLD}{bc.OKCYAN}Port commits w/o PR{bc.END}...") - based_on_previous = False - # Ensure to not start to work from a working branch - if self.app.to_branch.name in self.app.repo.heads: - self.app.repo.heads[self.app.to_branch.name].checkout() - else: - self.app.repo.git.checkout( - "--no-track", - "-b", - self.app.to_branch.name, - self.app.to_branch.ref(), - ) # Ask the user if he wants to port the PR (or orphaned commits) if not click.confirm("\tPort it?" if pr.number else "\tPort them?"): - self.app.storage.blacklist_pr(pr.ref, confirm=True) - if not self.app.storage.dirty: - return None, based_on_previous - # Create a local branch based on upstream - if self.create_branch: - branch_name = PR_BRANCH_NAME.format( - from_branch=self.app.from_branch.name, - to_branch=self.app.to_branch.name, - pr_number=pr.number, - ) - if branch_name in self.app.repo.heads: - # If the local branch already exists, ask the user if he wants - # to recreate it + check if this existing branch is based on - # the previous PR branch - if previous_pr_branch: - based_on_previous = self.app.repo.is_ancestor( - previous_pr_branch.name, branch_name - ) - confirm = ( - f"\tBranch {bc.BOLD}{branch_name}{bc.END} already exists, " - "recreate it?\n\t(⚠️ you will lose the existing branch)" - ) - if not click.confirm(confirm): - return g.Branch(self.app.repo, branch_name), based_on_previous - self.app.repo.delete_head(branch_name, "-f") - if previous_pr and click.confirm( - f"\tUse the previous {bc.BOLD}PR #{previous_pr.number}{bc.END} " - "branch as base?" - ): - base_ref = previous_pr_branch - based_on_previous = True - # Set the new branch name the same than the previous one - # but with the PR number as suffix. - branch_name = f"{previous_pr_branch.name}-{pr.number}" - self._print( - f"\tCreate branch {bc.BOLD}{branch_name}{bc.END} from {base_ref.ref()}..." - ) - self.app.repo.git.checkout("--no-track", "-b", branch_name, base_ref.ref()) - else: - branch_name = self.app.to_branch.name - # If the PR has been blacklisted we need to commit this information - if self.app.storage.dirty: - self.app.storage.commit() - return g.Branch(self.app.repo, branch_name), based_on_previous + self._handle_pr_blacklist(pr) + return False # Cherry-pick commits of the source PR for commit in commits: @@ -277,7 +437,7 @@ def _port_pull_request_commits( ): self.app.repo.git.am("--abort") continue - return g.Branch(self.app.repo, branch_name), based_on_previous + return True @staticmethod def _skip_diff(commit, diff): @@ -327,45 +487,65 @@ def _skip_diff(commit, diff): ) return False, "" - def _push_branch_to_remote(self, branch): - """Force push the local branch to remote fork.""" + def _push_branch_to_remote(self): + """Force push the local branch to remote destination fork.""" if not self.push_branch: return False confirm = ( - f"\tPush branch '{bc.BOLD}{branch.name}{bc.END}' " - f"to remote '{bc.BOLD}{self.app.fork}{bc.END}'?" + f"Push branch '{bc.BOLD}{self.app.destination.branch}{bc.END}' " + f"to remote '{bc.BOLD}{self.app.destination.remote}{bc.END}'?" ) if click.confirm(confirm): - branch.repo.git.push(self.app.fork, branch.name, "--force-with-lease") - branch.remote = self.app.fork + self.app.repo.git.push( + self.app.destination.remote, + self.app.destination.branch, + "--force-with-lease", + ) return True return False - def _prepare_pull_request_data(self, processed_prs, pr_branch): + def _prepare_pull_request_data(self, processed_prs, blacklisted_prs): + # Adapt the content depending on the number of ported PRs + title = body = "" if len(processed_prs) > 1: title = ( - f"[{self.app.to_branch.name}][FW] {self.app.addon}: multiple ports " - f"from {self.app.from_branch.name}" + f"[{self.app.target_version}][FW] {self.app.addon}: multiple ports " + f"from {self.app.source_version}" ) - lines = [f"- #{pr.number}" for pr in processed_prs] + lines = [f"- #{pr['number']}" for pr in processed_prs.values()] body = "\n".join( [ - f"Port of the following PRs from {self.app.from_branch.name} " - f"to {self.app.to_branch.name}:" + f"Port of the following PRs from {self.app.source_version} " + f"to {self.app.target_version}:" ] + lines ) - else: - pr = processed_prs[0] - title = f"[{self.app.to_branch.name}][FW] {pr.title}" + if len(processed_prs) == 1: + pr = list(processed_prs.values())[0] + title = f"[{self.app.target_version}][FW] {pr['title']}" body = ( - f"Port of #{pr.number} from {self.app.from_branch.name} " - f"to {self.app.to_branch.name}." + f"Port of #{pr['number']} from {self.app.source_version} " + f"to {self.app.target_version}." ) + # Handle blacklisted PRs + if blacklisted_prs: + if not title: + title = ( + f"[{self.app.target_version}][FW] Blacklist of some PRs " + f"from {self.app.source_version}" + ) + lines2 = [ + f"- #{pr['number']}: {pr['reason']}" for pr in blacklisted_prs.values() + ] + body2 = "\n".join(["The following PRs have been blacklisted:"] + lines2) + if body: + body = "\n\n".join([body, body2]) + else: + body = body2 return { "draft": True, "title": title, - "head": f"{self.app.user_org}:{pr_branch.name}", + "head": f"{self.app.destination.org}:{self.app.destination.branch}", "base": self.app.to_branch.name, "body": body, } @@ -374,7 +554,7 @@ def _search_pull_request(self, base_branch, title): params = { "q": ( f"is:pr " - f"repo:{self.app.from_org}/{self.app.repo_name} " + f"repo:{self.app.upstream_org}/{self.app.repo_name} " f"base:{base_branch} " f"state:open {title} in:title" ), @@ -383,29 +563,43 @@ def _search_pull_request(self, base_branch, title): if response["items"]: return response["items"][0]["html_url"] - def _create_pull_request(self, pr_branch, pr_data, processed_prs): + def _create_pull_request(self, pr_data, processed_prs): if len(processed_prs) > 1: self._print( - "\tPR(s) ported locally:", + "PR(s) ported locally:", ", ".join( [f"{bc.OKCYAN}#{pr.number}{bc.ENDC}" for pr in processed_prs] ), ) if click.confirm( - f"\tCreate a draft PR from '{bc.BOLD}{pr_branch.name}{bc.END}' " + f"Create a draft PR from '{bc.BOLD}{self.app.destination.branch}{bc.END}' " f"to '{bc.BOLD}{self.app.to_branch.name}{bc.END}' " - f"against {bc.BOLD}{self.app.from_org}/{self.app.repo_name}{bc.END}?" + f"against {bc.BOLD}{self.app.upstream_org}/{self.app.repo_name}{bc.END}?" ): response = self.app.github.request( - f"repos/{self.app.from_org}/{self.app.repo_name}/pulls", + f"repos/{self.app.upstream_org}/{self.app.repo_name}/pulls", method="post", json=pr_data, ) pr_url = response["html_url"] self._print( - f"\t\t{bc.BOLD}{bc.OKCYAN}PR created =>" f"{bc.ENDC} {pr_url}{bc.END}" + f"\t{bc.BOLD}{bc.OKCYAN}PR created =>" f"{bc.ENDC} {pr_url}{bc.END}" ) return pr_url + # Invite the user to open the PR on its own + pr_title_encoded = urllib.parse.quote(pr_data["title"]) + new_pr_url = NEW_PR_URL.format( + from_org=self.app.upstream_org, + repo_name=self.app.repo_name, + to_branch=self.app.to_branch.name, + to_org=self.app.destination.org, + pr_branch=self.app.destination.branch, + title=pr_title_encoded, + ) + self._print( + "\nℹ️ You can still open the PR yourself there:\n" f"\t{new_pr_url}\n" + ) + self._print_tips(pr_data) class BranchesDiff(Output): @@ -482,14 +676,14 @@ def _skip_commit(commit): ) def print_diff(self, verbose=False): - lines_to_print = [""] + lines_to_print = [] fake_pr = None i = 0 for i, pr in enumerate(self.commits_diff, 1): if pr.number: lines_to_print.append( - f"{i}) {bc.BOLD}{bc.OKBLUE}PR #{pr.number}{bc.END} " - f"({pr.url or 'w/o PR'}) {bc.OKBLUE}{pr.title}{bc.ENDC}:" + f"{i}) {bc.BOLD}{bc.OKBLUE}{pr.ref}{bc.END} " + f"{bc.OKBLUE}{pr.title}{bc.ENDC}:" ) lines_to_print.append(f"\tBy {pr.author}, merged at {pr.merged_at}") else: @@ -507,6 +701,8 @@ def print_diff(self, verbose=False): f"\t=> {bc.BOLD}{bc.OKBLUE}{len(self.commits_diff[pr])} " f"commit(s){bc.END} not (fully) ported" ) + if pr.number: + lines_to_print.append(f"\t=> {pr.url}") if verbose or not pr.number: for commit in self.commits_diff[pr]: lines_to_print.append( @@ -530,6 +726,8 @@ def print_diff(self, verbose=False): f"{self.app.from_branch.ref()} to {self.app.to_branch.ref()}" ) lines_to_print.insert(0, message) + if self.commits_diff: + lines_to_print.insert(1, "") self._print("\n".join(lines_to_print)) def get_commits_diff(self): @@ -642,7 +840,7 @@ def _get_original_pr(self, commit: g.Commit): if not any("github.com" in remote.url for remote in self.app.repo.remotes): return raw_data = self.app.github.get_original_pr( - self.app.from_org, + self.app.upstream_org, self.app.repo_name, self.app.from_branch.name, commit.hexsha, @@ -653,7 +851,7 @@ def _get_original_pr(self, commit: g.Commit): # NOTE: commits fetched from PR are already in the right order pr_number = raw_data["number"] pr_commits_data = self.app.github.request( - f"repos/{self.app.from_org}/{self.app.repo_name}" + f"repos/{self.app.upstream_org}/{self.app.repo_name}" f"/pulls/{pr_number}/commits?per_page=100" ) pr_commits = [pr["sha"] for pr in pr_commits_data] diff --git a/oca_port/tests/common.py b/oca_port/tests/common.py index 07fc347..4aec9be 100644 --- a/oca_port/tests/common.py +++ b/oca_port/tests/common.py @@ -12,26 +12,25 @@ class CommonCase(unittest.TestCase): - _settings = { - "branch1": "15.0", - "remote_branch1": "origin/15.0", - "branch2": "16.0", - "remote_branch2": "origin/16.0", - "branch3": "17.0", - "remote_branch3": "origin/17.0", - "addon": "my_module", - "from_org": None, - "from_remote": None, # We're testing locally without any remote - "user_org": "OCA-test", - "no_cache": True, - } + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.upstream_org = "ORG" + cls.fork_org = "FORK" + cls.repo_name = "test" + cls.source1 = "origin/15.0" + cls.source2 = "origin/16.0" + cls.target1 = "origin/16.0" + cls.target2 = "origin/17.0" + cls.dest_branch = "dev" + cls.destination = f"{cls.fork_org}/{cls.dest_branch}" + cls.addon = "my_module" + cls.no_cache = True def setUp(self): # Create a temporary Git repository self.repo_upstream_path = self._create_tmp_git_repository() - self.module_path = os.path.join( - self.repo_upstream_path, self._settings["addon"] - ) + self.module_path = os.path.join(self.repo_upstream_path, self.addon) self.manifest_path = os.path.join(self.module_path, "__manifest__.py") self._fill_git_repository(self.repo_upstream_path) # By cloning the first repository this will set an 'origin' remote @@ -57,6 +56,9 @@ def _clone_tmp_git_repository(self, upstream_path): git.Repo.clone_from(upstream_path, repo_path) return repo_path + def _git_repo(self, repo_path): + return git.Repo(repo_path) + def _fill_git_repository(self, repo_path): """Create branches with some content in the Git repository.""" repo = git.Repo(repo_path) @@ -68,28 +70,31 @@ def _fill_git_repository(self, repo_path): with open(tpl_manifest_path) as tpl_manifest: tpl_manifest_lines = tpl_manifest.readlines() # Commit a file in '15.0' - repo.git.checkout("--orphan", self._settings["branch1"]) + branch1 = self.source1.split("/")[1] + repo.git.checkout("--orphan", branch1) os.makedirs(self.module_path, exist_ok=True) with open(self.manifest_path, "w") as manifest: manifest.writelines(tpl_manifest_lines) repo.index.add(self.manifest_path) - commit = repo.index.commit(f"[ADD] {self._settings['addon']}") + commit = repo.index.commit(f"[ADD] {self.addon}") # Port the commit from 'branch1' to 'branch2' - repo.git.checkout("--orphan", self._settings["branch2"]) + branch2 = self.source2.split("/")[1] + repo.git.checkout("--orphan", branch2) repo.git.reset("--hard") # FIXME without a delay, both branches are targeting the same commit, # no idea why. time.sleep(1) repo.git.cherry_pick(commit.hexsha) # Create an empty 'branch3' - repo.git.checkout("--orphan", self._settings["branch3"]) + branch3 = self.target2.split("/")[1] + repo.git.checkout("--orphan", branch3) repo.git.reset("--hard") repo.git.commit("-m", "Init", "--allow-empty") def _add_fork_remote(self, repo_path): repo = git.Repo(repo_path) # We do not really care about the remote URL here, re-use origin one - repo.create_remote(self._settings["user_org"], repo.remotes.origin.url) + repo.create_remote(self.fork_org, repo.remotes.origin.url) def _commit_change_on_branch(self, repo_path, branch): """Commit a change that can be ported to another branch.""" @@ -102,20 +107,20 @@ def _commit_change_on_branch(self, repo_path, branch): manifest.seek(0) manifest.write(content) repo.index.add(self.manifest_path) - commit = repo.index.commit(f"[FIX] {self._settings['addon']}: fix dependency") + commit = repo.index.commit(f"[FIX] {self.addon}: fix dependency") return commit.hexsha - def _create_app(self, from_branch, to_branch, **kwargs): + def _create_app(self, source, target, destination=None, **kwargs): params = { - "from_branch": from_branch, - "to_branch": to_branch, - "addon": self._settings["addon"], - "from_org": self._settings["from_org"], - "from_remote": self._settings["from_remote"], + "source": source, + "target": target, + "destination": destination, + "addon": self.addon, + "source_version": None, + "target_version": None, "repo_path": self.repo_path, - "repo_name": "test", - "user_org": self._settings["user_org"], - "no_cache": self._settings["no_cache"], + "repo_name": self.repo_name, + "no_cache": self.no_cache, } params.update(kwargs) return App(**params) diff --git a/oca_port/tests/test_app.py b/oca_port/tests/test_app.py index 1423665..d8a7194 100644 --- a/oca_port/tests/test_app.py +++ b/oca_port/tests/test_app.py @@ -1,13 +1,71 @@ import json +from oca_port.utils.misc import extract_ref_info + from . import common class TestApp(common.CommonCase): + def test_app_init(self): + # Simple case: call oca-port only with SOURCE, TARGET and ADDON parameters + # Check with --dry-run option + # $ oca-port origin/15.0 origin/16.0 my_module --dry-run + app = self._create_app(self.source1, self.target1, dry_run=True) + # Check without --dry-run option + # $ oca-port origin/15.0 origin/16.0 my_module + app = self._create_app(self.source1, self.target1) + self.assertEqual(app.destination.org, app.target.org) + self.assertEqual(app.destination.remote, app.target.remote) + self.assertEqual(app.destination.repo, app.target.repo) + self.assertEqual(app.destination.branch, None) # Is automatically set later + # Check without --dry-run option and with a different destination + # $ oca-port origin/15.0 origin/16.0 --destination FORK/dev my_module + app = self._create_app(self.source1, self.target1, destination=self.destination) + self.assertFalse(app.destination.org) + self.assertEqual(app.destination.remote, self.fork_org) + self.assertEqual(app.destination.branch, self.dest_branch) + + # Check with a branch that doesn't exist + # $ oca-port origin/15.0 14.0 + error_msg = "Ref 14.0 doesn't exist." + with self.assertRaisesRegex(ValueError, error_msg): + self._create_app(self.source1, "14.0") + # $ oca-port 14.0 origin/15.0 + with self.assertRaisesRegex(ValueError, error_msg): + self._create_app("14.0", self.target1) + + # Check with a branch that doesn't match an Odoo version + # $ oca-port origin/15.0 dev + error_msg = "Unable to identify Odoo target version from dev." + with self.assertRaisesRegex(ValueError, error_msg): + self._create_app(self.source1, "dev") + # $ oca-port dev origin/15.0 + error_msg = "Unable to identify Odoo source version from dev." + with self.assertRaisesRegex(ValueError, error_msg): + self._create_app("dev", self.target1) + + # Check with a local branch matching an Odoo version as target + # $ oca-port origin/15.0 16.0 my_module + # NOTE: ensure the local branch exists first + repo = self._git_repo(self.repo_path) + repo.remotes["origin"].fetch("16.0:16.0") + app = self._create_app(self.source1, "16.0") + self.assertFalse(app.target.org) + self.assertEqual(app.target.branch, "16.0") + self.assertFalse(app.destination.org) + self.assertFalse(app.destination.remote) + self.assertFalse(app.destination.branch) + self.assertFalse(app.destination.branch) + + def test_check_addon_exists(self): + app = self._create_app(self.source1, self.target2) + # source + self.assertTrue(app.check_addon_exists_from_branch()) + # target + self.assertFalse(app.check_addon_exists_to_branch()) + def test_app_nothing_to_port(self): - app = self._create_app( - self._settings["remote_branch1"], self._settings["remote_branch2"] - ) + app = self._create_app(self.source1, self.target1) try: app.run() except SystemExit as exc: @@ -15,14 +73,10 @@ def test_app_nothing_to_port(self): self.assertEqual(exc.args[0], 0) def test_app_commit_to_port(self): - self._commit_change_on_branch( - self.repo_upstream_path, self._settings["branch1"] - ) - app = self._create_app( - self._settings["remote_branch1"], - self._settings["remote_branch2"], - fetch=True, - ) + repo = self._git_repo(self.repo_path) + source1 = extract_ref_info(repo, "source", self.source1) + self._commit_change_on_branch(self.repo_upstream_path, source1.branch) + app = self._create_app(self.source1, self.target1, fetch=True) try: app.run() except SystemExit as exc: @@ -30,16 +84,12 @@ def test_app_commit_to_port(self): self.assertEqual(exc.args[0], 110) # The other way around, no commit to backport (no exception) # (with CLI, the returned exit code is then 0) - app = self._create_app( - self._settings["remote_branch2"], self._settings["remote_branch1"] - ) + app = self._create_app(self.target1, self.source1) res = app.run() self.assertFalse(res) def test_app_module_to_migrate(self): - app = self._create_app( - self._settings["remote_branch2"], self._settings["remote_branch3"] - ) + app = self._create_app(self.source2, self.target2) try: app.run() except SystemExit as exc: @@ -47,20 +97,18 @@ def test_app_module_to_migrate(self): self.assertEqual(exc.args[0], 100) # The other way around, nothing to migrate as the module doesn't exist # (with CLI, the returned exit code is then 1) - app = self._create_app( - self._settings["remote_branch3"], self._settings["remote_branch2"] - ) + app = self._create_app(self.target2, self.source2) error_msg = "my_module does not exist on origin/17.0" with self.assertRaisesRegex(ValueError, error_msg): app.run() def test_app_commit_to_port_non_interactive(self): - self._commit_change_on_branch( - self.repo_upstream_path, self._settings["branch1"] - ) + repo = self._git_repo(self.repo_path) + source1 = extract_ref_info(repo, "source", self.source1) + self._commit_change_on_branch(self.repo_upstream_path, source1.branch) app = self._create_app( - self._settings["remote_branch1"], - self._settings["remote_branch2"], + self.source1, + self.target1, non_interactive=True, fetch=True, ) @@ -68,30 +116,22 @@ def test_app_commit_to_port_non_interactive(self): self.assertTrue(result) self.assertIsInstance(result, bool) # The other way around, no commit to backport - app = self._create_app( - self._settings["remote_branch2"], - self._settings["remote_branch1"], - non_interactive=True, - ) + app = self._create_app(self.target1, self.source1, non_interactive=True) result = app.run() self.assertFalse(result) self.assertIsInstance(result, bool) def test_app_module_to_migrate_non_interactive(self): app = self._create_app( - self._settings["remote_branch2"], - self._settings["remote_branch3"], + self.source2, + self.target2, non_interactive=True, ) result = app.run() self.assertTrue(result) self.assertIsInstance(result, bool) # The other way around, nothing to migrate as the module doesn't exist - app = self._create_app( - self._settings["remote_branch3"], - self._settings["remote_branch2"], - non_interactive=True, - ) + app = self._create_app(self.target2, self.source2, non_interactive=True) error_msg = "my_module does not exist on origin/17.0" with self.assertRaisesRegex(ValueError, error_msg): app.run() @@ -99,18 +139,20 @@ def test_app_module_to_migrate_non_interactive(self): def test_app_wrong_output(self): with self.assertRaisesRegex(ValueError, "Supported outputs are"): self._create_app( - self._settings["remote_branch2"], - self._settings["remote_branch3"], + self.source2, + self.target2, output="wrong_format", ) def test_app_commit_to_port_output_json(self): + repo = self._git_repo(self.repo_path) + source1 = extract_ref_info(repo, "source", self.source1) commit_sha = self._commit_change_on_branch( - self.repo_upstream_path, self._settings["branch1"] + self.repo_upstream_path, source1.branch ) app = self._create_app( - self._settings["remote_branch1"], - self._settings["remote_branch2"], + self.source1, + self.target1, output="json", fetch=True, ) @@ -133,11 +175,7 @@ def test_app_commit_to_port_output_json(self): }, ) # The other way around, no commit to backport - app = self._create_app( - self._settings["remote_branch2"], - self._settings["remote_branch1"], - output="json", - ) + app = self._create_app(self.target1, self.source1, output="json") output = app.run() self.assertTrue(output) self.assertIsInstance(output, str) @@ -146,8 +184,8 @@ def test_app_commit_to_port_output_json(self): def test_app_module_to_migrate_output_json(self): app = self._create_app( - self._settings["remote_branch2"], - self._settings["remote_branch3"], + self.source2, + self.target2, output="json", ) output = app.run() @@ -157,11 +195,7 @@ def test_app_module_to_migrate_output_json(self): self.assertEqual(output["process"], "migrate") self.assertEqual(output["results"], {}) # The other way around, nothing to migrate as the module doesn't exist - app = self._create_app( - self._settings["remote_branch3"], - self._settings["remote_branch2"], - output="json", - ) + app = self._create_app(self.target2, self.source2, output="json") error_msg = "my_module does not exist on origin/17.0" with self.assertRaisesRegex(ValueError, error_msg): app.run() diff --git a/oca_port/tests/test_utils_cache.py b/oca_port/tests/test_utils_cache.py index 9d0a254..32ad57f 100644 --- a/oca_port/tests/test_utils_cache.py +++ b/oca_port/tests/test_utils_cache.py @@ -9,12 +9,17 @@ class TestUserCache(common.CommonCase): def setUp(self): super().setUp() - app = self._create_app( - self._settings["remote_branch1"], - self._settings["remote_branch2"], - from_org="TEST", - ) + app = self._create_app(self.source1, self.target1, upstream_org="TEST") self.cache = cache.UserCache(app) + # As source branch as no organization, cache is by default readonly. + # Unset this to run tests. + self.cache.readonly = False + # Clear the cache before each test + self.cache.clear() + + def tearDown(self): + # Clear the cache after each test + self.cache.clear() def test_commit_ported(self): sha = "TEST" diff --git a/oca_port/tests/test_utils_misc.py b/oca_port/tests/test_utils_misc.py index c30bddc..1bc88ea 100644 --- a/oca_port/tests/test_utils_misc.py +++ b/oca_port/tests/test_utils_misc.py @@ -11,6 +11,8 @@ from oca_port.utils import misc +from . import common + @contextmanager def make_tmp_addon(name, manifest_name="__manifest__.py"): @@ -30,35 +32,67 @@ def capture_stdout(): sys.stdout = sys.__stdout__ -def test_get_manifest_path(): - with make_tmp_addon("foo") as addons_path: - expected = f"{addons_path}/foo/__manifest__.py" - assert misc.get_manifest_path(addons_path / "foo") == expected - +class TestMisc(common.CommonCase): + def test_get_manifest_path(self): + with make_tmp_addon("foo") as addons_path: + expected = f"{addons_path}/foo/__manifest__.py" + assert misc.get_manifest_path(addons_path / "foo") == expected -def test_clean_text(): - assert misc.clean_text("[13.0] foo 13.0") == "foo" + def test_clean_text(self): + assert misc.clean_text("[13.0] foo 13.0") == "foo" + def test_default_dict_from_dict(self): + res = misc.defaultdict_from_dict({"a": 1}) + # original values preserved + assert res["a"] == 1 + # any key is a dict by default + assert isinstance(res["b"], defaultdict) + assert isinstance(res["b"]["c"], defaultdict) + assert isinstance(res["b"]["c"]["d"], defaultdict) -def test_default_dict_from_dict(): - res = misc.defaultdict_from_dict({"a": 1}) - # original values preserved - assert res["a"] == 1 - # any key is a dict by default - assert isinstance(res["b"], defaultdict) - assert isinstance(res["b"]["c"], defaultdict) - assert isinstance(res["b"]["c"]["d"], defaultdict) + def test_output_cli_mode(self): + output = misc.Output() + output.cli = True + output.output = None + with capture_stdout() as captured: + output._print("foo") + output._print("baz") + output._print("bar") + captured.seek(0) + assert captured.read() == "foo\nbaz\nbar\n" + assert output._render_output("json", {"a": 1}) == '{"a": 1}' -def test_output_cli_mode(): - output = misc.Output() - output.cli = True - output.output = None - with capture_stdout() as captured: - output._print("foo") - output._print("baz") - output._print("bar") - captured.seek(0) - assert captured.read() == "foo\nbaz\nbar\n" + def test_extract_ref_info_http(self): + repo = self._git_repo(self.repo_path) + org, repo_name, remote, branch = "Example", "test_repo", "test_ref", "16.0" + repo.create_remote(remote, f"https://user:pwd@github.com/{org}/{repo_name}.git") + ref = f"{remote}/{branch}" + expected_info = { + "ref": ref, + "kind": "source", + "branch": branch, + "remote": remote, + "repo": repo_name, + "org": org, + "platform": "github", + } + info = misc.extract_ref_info(repo, "source", ref) + self.assertDictEqual(info, expected_info) - assert output._render_output("json", {"a": 1}) == '{"a": 1}' + def test_extract_ref_info_ssh(self): + repo = self._git_repo(self.repo_path) + org, repo_name, remote, branch = "Example", "test_repo", "test_ref", "16.0" + repo.create_remote(remote, f"git@github.com:{org}/{repo_name}.git") + ref = f"{remote}/{branch}" + expected_info = { + "ref": ref, + "kind": "source", + "branch": branch, + "remote": remote, + "repo": repo_name, + "org": org, + "platform": "github", + } + info = misc.extract_ref_info(repo, "source", ref) + self.assertDictEqual(info, expected_info) diff --git a/oca_port/tests/test_utils_session.py b/oca_port/tests/test_utils_session.py new file mode 100644 index 0000000..c725b90 --- /dev/null +++ b/oca_port/tests/test_utils_session.py @@ -0,0 +1,30 @@ +# Copyright 2023 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl) + +from . import common + +from oca_port.utils import session + + +class TestSession(common.CommonCase): + def setUp(self): + super().setUp() + app = self._create_app(self.source1, self.target1, upstream_org="TEST") + self.session_name = "test" + self.session = session.Session(app, self.session_name) + # Clear the session before each test + self.session.clear() + + def tearDown(self): + # Clear the session after each test + self.session.clear() + + def test_session(self): + key = "test" + value = ["a", "b"] + data = self.session.get_data() + self.assertFalse(data) + data[key] = value + self.session.set_data(data) + data2 = self.session.get_data() + self.assertEqual(data2, data) diff --git a/oca_port/utils/__init__.py b/oca_port/utils/__init__.py index b827abb..b9c8cb7 100644 --- a/oca_port/utils/__init__.py +++ b/oca_port/utils/__init__.py @@ -1,7 +1,8 @@ -__all__ = ["cache", "git", "github", "misc", "storage"] +__all__ = ["cache", "git", "github", "misc", "session", "storage"] from . import cache from . import git from . import github from . import misc +from . import session from . import storage diff --git a/oca_port/utils/cache.py b/oca_port/utils/cache.py index 37c1a60..cdd2983 100644 --- a/oca_port/utils/cache.py +++ b/oca_port/utils/cache.py @@ -5,7 +5,6 @@ import logging import os import pathlib -import shutil from collections import defaultdict from . import misc @@ -83,7 +82,8 @@ class UserCache: """Manage the user's cache, in respect to XDG conventions. This class manages the following data: - - a list of already ported commits from one branch to another. + - a list of already ported commits from one branch to another + - some commits data like impacted file paths It allows to speed up further commit scans on a given module. """ @@ -96,6 +96,11 @@ class UserCache: def __init__(self, app): """Initialize user's cache manager.""" self.app = app + # NOTE: set cache as readonly if the source branch is not linked to + # an organization (local branch): we cannot put in cache commits data + # coming from such branch in the behalf of upstream organization/repo, + # that could produce wrong cache results for further use. + self.readonly = not self.app.source.org self.dir_path = self._get_dir_path() self._ported_commits_path = self._get_ported_commits_path() self._ported_commits = self._get_ported_commits() @@ -120,7 +125,7 @@ def _get_ported_commits_path(self): ) return self.dir_path.joinpath( self._ported_dirname, - self.app.from_org, + self.app.upstream_org, self.app.repo_name, file_name, ) @@ -133,7 +138,7 @@ def _get_commits_to_port_path(self): ) return self.dir_path.joinpath( self._to_port_dirname, - self.app.from_org, + self.app.upstream_org, self.app.repo_name, file_name, ) @@ -143,7 +148,7 @@ def _get_commits_data_path(self): file_name = f"{self.app.repo_name}.json" return self.dir_path.joinpath( self._commits_data_dirname, - self.app.from_org, + self.app.upstream_org, file_name, ) @@ -180,6 +185,8 @@ def _get_commits_data(self): def mark_commit_as_ported(self, commit_sha: str): """Mark commit as ported.""" + if self.readonly: + return if self.is_commit_ported(commit_sha): return self._ported_commits.append(commit_sha) @@ -192,6 +199,8 @@ def is_commit_ported(self, commit_sha: str): def store_commit_pr(self, commit_sha: str, data): """Store the original PR data of a commit.""" + if self.readonly: + return pr_number = data["number"] self._commits_to_port["pull_requests"][str(pr_number)] = data self._commits_to_port["commits"][commit_sha]["pr"] = pr_number @@ -209,12 +218,28 @@ def get_commit_files(self, commit_sha: str): def set_commit_files(self, commit_sha: str, files: list): """Set file paths modified by a commit.""" + if self.readonly: + return self._commits_data[commit_sha]["files"] = list(files) + if os.environ.get("OCA_PORT_AGRESSIVE_CACHE_WRITE"): + # IO can be very slow on some filesystems (like checking modified + # paths of a commit), and saving the cache on each analyzed commit + # could help in case current oca-port process is killed before + # writing its cache on disk, so the next call will be faster. + self._save_commits_data() def save(self): """Save cache files.""" + if self.readonly: + return + self._save_commits_to_port() + self._save_commits_data() + + def _save_commits_to_port(self): # commits/PRs to port self._save_cache(self._commits_to_port, self._commits_to_port_path) + + def _save_commits_data(self): # commits data file self._save_cache(self._commits_data, self._commits_data_path) @@ -226,6 +251,12 @@ def _save_cache(self, cache, path): pass def clear(self): - """Clear the cache by removing the content of the cache directory.""" - if self._cache_dirname and str(self.dir_path).endswith(self._cache_dirname): - shutil.rmtree(self.dir_path) + """Clear the cache files.""" + paths = [ + self._ported_commits_path, + self._commits_to_port_path, + self._commits_data_path, + ] + for path in paths: + if path and path.exists(): + path.unlink() diff --git a/oca_port/utils/git.py b/oca_port/utils/git.py index ff7561b..230931c 100644 --- a/oca_port/utils/git.py +++ b/oca_port/utils/git.py @@ -290,7 +290,7 @@ def run_pre_commit(repo, addon, commit=True, hook=None): repo.git.add("-A") if commit: repo.git.commit( - "-m", f"[IMP] {addon}: apply pre-commit auto fixes", "--no-verify" + "-m", f"[IMP] {addon}: pre-commit auto fixes", "--no-verify" ) diff --git a/oca_port/utils/misc.py b/oca_port/utils/misc.py index 395861f..b1a510e 100644 --- a/oca_port/utils/misc.py +++ b/oca_port/utils/misc.py @@ -1,6 +1,7 @@ # Copyright 2022 Camptocamp SA # License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl) +import giturlparse import json import os import re @@ -73,24 +74,48 @@ def __getattr__(self, attrib): return self.__class__(val) if isinstance(val, dict) else val -REF_REGEX = r"((?P[\w-]+)/)?((?P[\w-]+)#)?(?P.*)" +REF_REGEX = r"((?P[\w-]+)/)?(?P.*)" -def parse_gh_ref(ref): - """Parse github reference in the form [org/][repo#]branch""" +def parse_ref(ref): + """Parse reference in the form '[remote/]branch'.""" group = re.match(REF_REGEX, ref) return SmartDict(group.groupdict()) if group else None -def make_gh_info(kind, ref, remote=None): - info = parse_gh_ref(ref) +def extract_ref_info(repo, kind, ref, remote=None): + """Extract info from `ref`. + + >>> extract_ref_info(repo, "source", "origin/16.0") + {'remote': 'origin', 'repo': 'server-tools', 'platform': 'github', 'branch': '16.0', 'kind': 'src', 'org': 'OCA'} + """ + info = parse_ref(ref) if not info: - # FIXME raise ValueError(f"No valid {kind}") - info["_kind"] = kind - info["remote"] = remote or info.org - if not info.org: - info["org"] = "OCA" + info["ref"] = ref + info["kind"] = kind + info["remote"] = info["remote"] or remote + info.update({"org": None, "platform": None}) + if info["remote"]: + remote_url = repo.remotes[info["remote"]].url + p = giturlparse.parse(remote_url) + try: + info["repo"] = p.repo + except AttributeError: + pass + info["platform"] = p.platform + info["org"] = p.owner + else: + # Fallback on 'origin' to grab info like platform, and repository name + if "origin" in repo.remotes: + remote_url = repo.remotes["origin"].url + p = giturlparse.parse(remote_url) + try: + info["repo"] = p.repo + except AttributeError: + pass + info["platform"] = p.platform + info["org"] = p.owner return info diff --git a/oca_port/utils/session.py b/oca_port/utils/session.py new file mode 100644 index 0000000..9c3ae40 --- /dev/null +++ b/oca_port/utils/session.py @@ -0,0 +1,85 @@ +# Copyright 2024 Camptocamp SA +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl) + +import hashlib +import json +import os +import pathlib +from collections import defaultdict + +from . import misc + + +class Session: + """Manage the user's session data, in respect to XDG conventions. + + This class is used to store the list of processed/blacklisted PRs or modules + during a porting session. + """ + + _cache_dirname = "oca-port" + _sessions_dirname = "sessions" + + def __init__(self, app, name): + """Initialize user's session manager.""" + self.app = app + self.dir_path = self._get_dir_path() + self._sessions_dir_path = self._get_sessions_dir_path() + self._key = hashlib.shake_256(name.encode()).hexdigest(3) + session_file = ( + f"{self.app.addon}" + f"-{self.app.source_version}-{self.app.target_version}" + f"-{self._key}.json" + ) + self._session_path = self._sessions_dir_path.joinpath(session_file) + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.clear() + + @classmethod + def _get_dir_path(cls): + """Return the path of the session directory.""" + default_cache_dir_path = pathlib.Path.home().joinpath(".cache") + return pathlib.Path( + os.environ.get("XDG_CACHE_HOME", default_cache_dir_path), + cls._cache_dirname, + ) + + def _get_sessions_dir_path(self): + """Return the dir path storing sessions data.""" + return self.dir_path.joinpath( + self._sessions_dirname, + self.app.upstream_org, + ) + + def get_data(self): + """Return the data of the session.""" + try: + with self._session_path.open() as file_: + return json.load(file_, object_hook=misc.defaultdict_from_dict) + except (json.JSONDecodeError, FileNotFoundError): + # Mainly to handle empty files (first initialization of the session) + # but also to not crash if JSON files get corrupted. + # Returns a "nested dict" object to not worry about checking keys + nested_dict = lambda: defaultdict(nested_dict) # noqa + return nested_dict() + + def set_data(self, data): + """Store `data` for the given `session`.""" + self._sessions_dir_path.mkdir(parents=True, exist_ok=True) + self._save_data(data, self._session_path) + + def _save_data(self, data, path): + try: + with path.open(mode="w") as file_: + json.dump(data, file_, indent=2) + except Exception: + pass + + def clear(self): + """Clear the session file.""" + if self._session_path and self._session_path.exists(): + self._session_path.unlink() diff --git a/oca_port/utils/storage.py b/oca_port/utils/storage.py index 1422c57..54a62b4 100644 --- a/oca_port/utils/storage.py +++ b/oca_port/utils/storage.py @@ -82,9 +82,7 @@ def is_pr_blacklisted(self, pr_ref): pr_ref = str(pr_ref or "orphaned_commits") return self._data.get("pull_requests", {}).get(pr_ref, False) - def blacklist_pr(self, pr_ref, confirm=False, reason=None): - if confirm and not click.confirm("\tBlacklist this PR?"): - return + def blacklist_pr(self, pr_ref, reason=None): if not reason: reason = click.prompt("\tReason", type=str) pr_ref = str(pr_ref or "orphaned_commits") @@ -117,11 +115,6 @@ def commit(self, msg=None): raise click.ClickException( "changes not committed detected in this repository." ) - # Ensure to be on a dedicated branch - if self.repo.active_branch.name == self.to_branch.name: - raise click.ClickException( - "performing commit on upstream branch is not allowed." - ) # Commit all changes under ./.oca-port self.repo.index.add(self.storage_dirname) if self.repo.is_dirty(): diff --git a/pyproject.toml b/pyproject.toml index d13baed..29325c2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,6 +18,7 @@ dependencies = [ "click", "gitpython", "requests", + "giturlparse", ] requires-python = ">=3.8" dynamic = ["version"]