Skip to content

Commit

Permalink
2024-03-28 nightly release (a197412)
Browse files Browse the repository at this point in the history
  • Loading branch information
pytorchbot committed Mar 28, 2024
1 parent ba976e5 commit c75c3f5
Show file tree
Hide file tree
Showing 132 changed files with 4,314 additions and 640 deletions.
6 changes: 6 additions & 0 deletions .ci/scripts/setup-macos.sh
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,12 @@ print_cmake_info() {
codesign -f -s - "${CMAKE_EXEC}" || true
}

setup_macos_env_variables() {
CMAKE_PREFIX_PATH=$(python -c 'from distutils.sysconfig import get_python_lib; print(get_python_lib())')
export CMAKE_PREFIX_PATH
}

setup_macos_env_variables
# NB: we need buck2 in all cases because cmake build also depends on calling
# buck2 atm
install_buck
Expand Down
23 changes: 21 additions & 2 deletions .ci/scripts/test_llama.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,11 @@ source "$(dirname "${BASH_SOURCE[0]}")/utils.sh"
MODEL_NAME=$1 # stories110M.pt
BUILD_TOOL=$2 # buck2 or cmake
DTYPE=$3 # fp16 or fp32

MODE=${4:-"xnnpack"} # portable or xnnpack
if [[ $# -lt 4 ]]; then # Assuming 4 mandatory args
echo "Expecting atleast 4 positional arguments"
echo "Usage: [...]"
fi
if [[ -z "${MODEL_NAME:-}" ]]; then
echo "Missing model name, exiting..."
exit 1
Expand All @@ -28,6 +32,11 @@ if [[ -z "${DTYPE:-}" ]]; then
exit 1
fi

if [[ -z "${MODE:-}" ]]; then
echo "Missing mode, choose portable or xnnpack, exiting..."
exit 1
fi

if [[ -z "${BUCK:-}" ]]; then
BUCK=buck2
fi
Expand All @@ -42,12 +51,18 @@ which "${PYTHON_EXECUTABLE}"
cmake_install_executorch_libraries() {
echo "Installing libexecutorch.a, libextension_module.so, libportable_ops_lib.a"
rm -rf cmake-out
if [[ "${MODE}" == "xnnpack" ]]; then
XNNPACK=ON
else
XNNPACK=OFF
fi
retry cmake -DBUCK2="$BUCK" \
-DCMAKE_INSTALL_PREFIX=cmake-out \
-DCMAKE_BUILD_TYPE=Release \
-DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \
-DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \
-DEXECUTORCH_BUILD_OPTIMIZED=ON \
-DEXECUTORCH_BUILD_XNNPACK="$XNNPACK" \
-DPYTHON_EXECUTABLE="$PYTHON_EXECUTABLE" \
-Bcmake-out .
cmake --build cmake-out -j9 --target install --config Release
Expand Down Expand Up @@ -101,7 +116,11 @@ fi
# Export model.
EXPORTED_MODEL_NAME="${EXPORTED_MODEL_NAME}.pte"
echo "Exporting ${EXPORTED_MODEL_NAME}"
$PYTHON_EXECUTABLE -m examples.models.llama2.export_llama -c stories110M.pt -p "${PARAMS}" -d "${DTYPE}"
EXPORT_ARGS="-c stories110M.pt -p ${PARAMS} -d ${DTYPE} -n ${EXPORTED_MODEL_NAME}"
if [[ "${MODE}" == "xnnpack" ]]; then
EXPORT_ARGS="${EXPORT_ARGS} --pt2e_quantize xnnpack_dynamic"
fi
$PYTHON_EXECUTABLE -m examples.models.llama2.export_llama ${EXPORT_ARGS}

# Create tokenizer.bin.
echo "Creating tokenizer.bin"
Expand Down
4 changes: 2 additions & 2 deletions .ci/scripts/utils.sh
Original file line number Diff line number Diff line change
Expand Up @@ -134,8 +134,8 @@ cmake_install_executorch_lib() {

download_stories_model_artifacts() {
# Download stories110M.pt and tokenizer from Github
wget "https://huggingface.co/karpathy/tinyllamas/resolve/main/stories110M.pt"
wget "https://raw.githubusercontent.com/karpathy/llama2.c/master/tokenizer.model"
curl -Ls "https://huggingface.co/karpathy/tinyllamas/resolve/main/stories110M.pt" --output stories110M.pt
curl -Ls "https://raw.githubusercontent.com/karpathy/llama2.c/master/tokenizer.model" --output tokenizer.model
# Create params.json file
touch params.json
echo '{"dim": 768, "multiple_of": 32, "n_heads": 12, "n_layers": 12, "norm_eps": 1e-05, "vocab_size": 32000}' > params.json
Expand Down
228 changes: 228 additions & 0 deletions .github/scripts/cherry_pick.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,228 @@
#!/usr/bin/env python3
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.

import json
import os
import re
from typing import Any, Optional

from urllib.error import HTTPError

from github_utils import gh_fetch_url, gh_post_pr_comment

from gitutils import get_git_remote_name, get_git_repo_dir, GitRepo
from trymerge import get_pr_commit_sha, GitHubPR


# This is only a suggestion for now, not a strict requirement
REQUIRES_ISSUE = {
"regression",
"critical",
"fixnewfeature",
}


def parse_args() -> Any:
from argparse import ArgumentParser

parser = ArgumentParser("cherry pick a landed PR onto a release branch")
parser.add_argument(
"--onto-branch", type=str, required=True, help="the target release branch"
)
parser.add_argument(
"--github-actor", type=str, required=True, help="all the world’s a stage"
)
parser.add_argument(
"--classification",
choices=["regression", "critical", "fixnewfeature", "docs", "release"],
required=True,
help="the cherry pick category",
)
parser.add_argument("pr_num", type=int)
parser.add_argument(
"--fixes",
type=str,
default="",
help="the GitHub issue that the cherry pick fixes",
)
parser.add_argument("--dry-run", action="store_true")

return parser.parse_args()


def get_merge_commit_sha(repo: GitRepo, pr: GitHubPR) -> Optional[str]:
"""
Return the merge commit SHA iff the PR has been merged. For simplicity, we
will only cherry pick PRs that have been merged into main
"""
commit_sha = get_pr_commit_sha(repo, pr)
return commit_sha if pr.is_closed() else None


def cherry_pick(
github_actor: str,
repo: GitRepo,
pr: GitHubPR,
commit_sha: str,
onto_branch: str,
classification: str,
fixes: str,
dry_run: bool = False,
) -> None:
"""
Create a local branch to cherry pick the commit and submit it as a pull request
"""
current_branch = repo.current_branch()
cherry_pick_branch = create_cherry_pick_branch(
github_actor, repo, pr, commit_sha, onto_branch
)

try:
if not dry_run:
org, project = repo.gh_owner_and_name()
cherry_pick_pr = submit_pr(repo, pr, cherry_pick_branch, onto_branch)

msg = f"The cherry pick PR is at {cherry_pick_pr}"
if fixes:
msg += f" and it is linked with issue {fixes}"
elif classification in REQUIRES_ISSUE:
msg += f" and it is recommended to link a {classification} cherry pick PR with an issue"

post_comment(org, project, pr.pr_num, msg)

finally:
if current_branch:
repo.checkout(branch=current_branch)


def create_cherry_pick_branch(
github_actor: str, repo: GitRepo, pr: GitHubPR, commit_sha: str, onto_branch: str
) -> str:
"""
Create a local branch and cherry pick the commit. Return the name of the local
cherry picking branch.
"""
repo.checkout(branch=onto_branch)
repo._run_git("submodule", "update", "--init", "--recursive")

# Remove all special characters if we want to include the actor in the branch name
github_actor = re.sub("[^0-9a-zA-Z]+", "_", github_actor)

cherry_pick_branch = f"cherry-pick-{pr.pr_num}-by-{github_actor}"
repo.create_branch_and_checkout(branch=cherry_pick_branch)

# We might want to support ghstack later
repo._run_git("cherry-pick", "-x", "-X", "theirs", commit_sha)
repo.push(branch=cherry_pick_branch, dry_run=False)

return cherry_pick_branch


def submit_pr(
repo: GitRepo,
pr: GitHubPR,
cherry_pick_branch: str,
onto_branch: str,
) -> str:
"""
Submit the cherry pick PR and return the link to the PR
"""
org, project = repo.gh_owner_and_name()

default_msg = f"Cherry pick #{pr.pr_num} onto {onto_branch} branch"
title = pr.info.get("title", default_msg)
body = pr.info.get("body", default_msg)

try:
response = gh_fetch_url(
f"https://api.github.com/repos/{org}/{project}/pulls",
method="POST",
data={
"title": title,
"body": body,
"head": cherry_pick_branch,
"base": onto_branch,
},
headers={"Accept": "application/vnd.github.v3+json"},
reader=json.load,
)

cherry_pick_pr = response.get("html_url", "")
if not cherry_pick_pr:
raise RuntimeError(
f"Fail to find the cherry pick PR: {json.dumps(response)}"
)

return str(cherry_pick_pr)

except HTTPError as error:
msg = f"Fail to submit the cherry pick PR: {error}"
raise RuntimeError(msg) from error


def post_comment(org: str, project: str, pr_num: int, msg: str) -> None:
"""
Post a comment on the PR itself to point to the cherry picking PR when success
or print the error when failure
"""
internal_debugging = ""

run_url = os.getenv("GH_RUN_URL")
# Post a comment to tell folks that the PR is being cherry picked
if run_url is not None:
internal_debugging = "\n".join(
line
for line in (
"<details><summary>Details for Dev Infra team</summary>",
f'Raised by <a href="{run_url}">workflow job</a>\n',
"</details>",
)
if line
)

comment = "\n".join(
(f"### Cherry picking #{pr_num}", f"{msg}", "", f"{internal_debugging}")
)
gh_post_pr_comment(org, project, pr_num, comment)


def main() -> None:
args = parse_args()
pr_num = args.pr_num

repo = GitRepo(get_git_repo_dir(), get_git_remote_name())
org, project = repo.gh_owner_and_name()

pr = GitHubPR(org, project, pr_num)

try:
commit_sha = get_merge_commit_sha(repo, pr)
if not commit_sha:
raise RuntimeError(
f"Refuse to cherry pick #{pr_num} because it hasn't been merged yet"
)

cherry_pick(
args.github_actor,
repo,
pr,
commit_sha,
args.onto_branch,
args.classification,
args.fixes,
args.dry_run,
)

except RuntimeError as error:
if not args.dry_run:
post_comment(org, project, pr_num, str(error))
else:
raise error


if __name__ == "__main__":
main()
Loading

0 comments on commit c75c3f5

Please sign in to comment.