-
Notifications
You must be signed in to change notification settings - Fork 458
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
2024-03-13 nightly release (3507412)
- Loading branch information
pytorchbot
committed
Mar 13, 2024
1 parent
862f755
commit 7d8e22d
Showing
427 changed files
with
18,621 additions
and
12,868 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1 +1 @@ | ||
f5b99976adcbb01fd71bd0a39ea15bdac6c9e48a | ||
6ca9ae4f8693639c395544327f7e362441a58c79 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -2,3 +2,5 @@ | |
ciflow_push_tags: | ||
- ciflow/nightly | ||
- ciflow/trunk | ||
- ciflow/binaries | ||
- ciflow/binaries/all |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,57 @@ | ||
# From https://github.com/pytorch/test-infra/wiki/Using-Nova-Reusable-Build-Workflows | ||
name: Build Linux Wheels | ||
|
||
on: | ||
pull_request: | ||
paths: | ||
- build/packaging/** | ||
- .github/workflows/build-wheels-linux.yml | ||
push: | ||
branches: | ||
- nightly | ||
- release/* | ||
tags: | ||
# NOTE: Binary build pipelines should only get triggered on release candidate builds | ||
# Release candidate tags look like: v1.11.0-rc1 | ||
- v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+ | ||
- ciflow/binaries/* | ||
workflow_dispatch: | ||
|
||
jobs: | ||
generate-matrix: | ||
uses: pytorch/test-infra/.github/workflows/generate_binary_build_matrix.yml@main | ||
with: | ||
package-type: wheel | ||
os: linux | ||
test-infra-repository: pytorch/test-infra | ||
test-infra-ref: main | ||
with-cuda: disabled | ||
with-rocm: disabled | ||
|
||
build: | ||
needs: generate-matrix | ||
permissions: | ||
id-token: write | ||
contents: read | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
include: | ||
- repository: pytorch/executorch | ||
pre-script: build/packaging/pre_build_script.sh | ||
post-script: build/packaging/post_build_script.sh | ||
smoke-test-script: build/packaging/smoke_test.py | ||
package-name: executorch | ||
name: ${{ matrix.repository }} | ||
uses: pytorch/test-infra/.github/workflows/build_wheels_linux.yml@main | ||
with: | ||
repository: ${{ matrix.repository }} | ||
ref: "" | ||
test-infra-repository: pytorch/test-infra | ||
test-infra-ref: main | ||
build-matrix: ${{ needs.generate-matrix.outputs.matrix }} | ||
pre-script: ${{ matrix.pre-script }} | ||
post-script: ${{ matrix.post-script }} | ||
package-name: ${{ matrix.package-name }} | ||
smoke-test-script: ${{ matrix.smoke-test-script }} | ||
trigger-event: ${{ github.event_name }} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,58 @@ | ||
# From https://github.com/pytorch/test-infra/wiki/Using-Nova-Reusable-Build-Workflows | ||
name: Build M1 Wheels | ||
|
||
on: | ||
pull_request: | ||
paths: | ||
- build/packaging/** | ||
- .github/workflows/build-wheels-m1.yml | ||
push: | ||
branches: | ||
- nightly | ||
- release/* | ||
tags: | ||
# NOTE: Binary build pipelines should only get triggered on release candidate builds | ||
# Release candidate tags look like: v1.11.0-rc1 | ||
- v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+ | ||
- ciflow/binaries/* | ||
workflow_dispatch: | ||
|
||
jobs: | ||
generate-matrix: | ||
uses: pytorch/test-infra/.github/workflows/generate_binary_build_matrix.yml@main | ||
with: | ||
package-type: wheel | ||
os: macos-arm64 | ||
test-infra-repository: pytorch/test-infra | ||
test-infra-ref: main | ||
with-cuda: disabled | ||
with-rocm: disabled | ||
|
||
build: | ||
needs: generate-matrix | ||
permissions: | ||
id-token: write | ||
contents: read | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
include: | ||
- repository: pytorch/executorch | ||
pre-script: build/packaging/pre_build_script.sh | ||
post-script: build/packaging/post_build_script.sh | ||
smoke-test-script: build/packaging/smoke_test.py | ||
package-name: executorch | ||
name: ${{ matrix.repository }} | ||
uses: pytorch/test-infra/.github/workflows/build_wheels_macos.yml@main | ||
with: | ||
repository: ${{ matrix.repository }} | ||
ref: "" | ||
test-infra-repository: pytorch/test-infra | ||
test-infra-ref: main | ||
build-matrix: ${{ needs.generate-matrix.outputs.matrix }} | ||
pre-script: ${{ matrix.pre-script }} | ||
post-script: ${{ matrix.post-script }} | ||
package-name: ${{ matrix.package-name }} | ||
runner-type: macos-m1-stable | ||
smoke-test-script: ${{ matrix.smoke-test-script }} | ||
trigger-event: ${{ github.event_name }} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
# Copyright © 2024 Apple Inc. All rights reserved. | ||
# | ||
# Please refer to the license found in the LICENSE file in the root directory of the source tree. | ||
|
||
from coremltools.optimize.torch.quantization._coreml_quantizer import CoreMLQuantizer |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,107 @@ | ||
# Copyright © 2024 Apple Inc. All rights reserved. | ||
# | ||
# Please refer to the license found in the LICENSE file in the root directory of the source tree. | ||
|
||
import numpy as np | ||
import pytest | ||
from typing import Tuple | ||
|
||
import torch | ||
from torch._export import capture_pre_autograd_graph | ||
from torch.ao.quantization.quantize_pt2e import convert_pt2e, prepare_pt2e, prepare_qat_pt2e | ||
|
||
from executorch.backends.apple.coreml.quantizer.coreml_quantizer import CoreMLQuantizer | ||
|
||
from coremltools.optimize.torch.quantization.quantization_config import ( | ||
LinearQuantizerConfig, | ||
QuantizationScheme, | ||
) | ||
|
||
|
||
class TestCoreMLQuantizer: | ||
@staticmethod | ||
def quantize_and_compare( | ||
model, | ||
example_inputs: Tuple[torch.Tensor], | ||
quantization_type: str, | ||
) -> None: | ||
assert quantization_type in {"PTQ", "QAT"} | ||
|
||
pre_autograd_aten_dialect = capture_pre_autograd_graph(model, example_inputs) | ||
|
||
quantization_config = LinearQuantizerConfig.from_dict( | ||
{ | ||
"global_config": { | ||
"quantization_scheme": QuantizationScheme.symmetric, | ||
"milestones": [0, 0, 10, 10], | ||
"activation_dtype": torch.quint8, | ||
"weight_dtype": torch.qint8, | ||
"weight_per_channel": True, | ||
} | ||
} | ||
) | ||
quantizer = CoreMLQuantizer(quantization_config) | ||
|
||
if quantization_type == "PTQ": | ||
prepared_graph = prepare_pt2e(pre_autograd_aten_dialect, quantizer) | ||
elif quantization_type == "QAT": | ||
prepared_graph = prepare_qat_pt2e(pre_autograd_aten_dialect, quantizer) | ||
|
||
prepared_graph(*example_inputs) | ||
converted_graph = convert_pt2e(prepared_graph) | ||
|
||
model_output = model(*example_inputs).detach().numpy() | ||
quantized_output = converted_graph(*example_inputs).detach().numpy() | ||
np.testing.assert_allclose(quantized_output, model_output, rtol=5e-2, atol=5e-2) | ||
|
||
@pytest.mark.parametrize("quantization_type", ("PTQ", "QAT")) | ||
def test_conv_relu(self, quantization_type): | ||
SHAPE = (1, 3, 256, 256) | ||
|
||
class Model(torch.nn.Module): | ||
def __init__(self) -> None: | ||
super().__init__() | ||
self.conv = torch.nn.Conv2d( | ||
in_channels=3, out_channels=16, kernel_size=3, padding=1 | ||
) | ||
self.relu = torch.nn.ReLU() | ||
|
||
def forward(self, x: torch.Tensor) -> torch.Tensor: | ||
a = self.conv(x) | ||
return self.relu(a) | ||
|
||
model = Model() | ||
|
||
example_inputs = (torch.randn(SHAPE),) | ||
self.quantize_and_compare( | ||
model, | ||
example_inputs, | ||
quantization_type, | ||
) | ||
|
||
@pytest.mark.parametrize("quantization_type", ("PTQ", "QAT")) | ||
def test_linear(self, quantization_type): | ||
SHAPE = (1, 5) | ||
|
||
class Model(torch.nn.Module): | ||
def __init__(self) -> None: | ||
super().__init__() | ||
self.linear = torch.nn.Linear(5, 10) | ||
|
||
def forward(self, x: torch.Tensor) -> torch.Tensor: | ||
return self.linear(x) | ||
|
||
model = Model() | ||
|
||
example_inputs = (torch.randn(SHAPE),) | ||
self.quantize_and_compare( | ||
model, | ||
example_inputs, | ||
quantization_type, | ||
) | ||
|
||
|
||
if __name__ == "__main__": | ||
test_runner = TestCoreMLQuantizer() | ||
test_runner.test_conv_relu("PTQ") | ||
test_runner.test_linear("QAT") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.