diff --git a/.noir-sync-commit b/.noir-sync-commit index 07ca104bbc0..ca337fe3b35 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -130d99125a09110a3ee3e877d88d83b5aa37f369 +819a53a7db921f40febc0e480539df3bfaf917a2 diff --git a/noir/noir-repo/.github/actions/download-nargo/action.yml b/noir/noir-repo/.github/actions/download-nargo/action.yml new file mode 100644 index 00000000000..b727520978a --- /dev/null +++ b/noir/noir-repo/.github/actions/download-nargo/action.yml @@ -0,0 +1,18 @@ +name: Download Nargo +description: Downloads the nargo binary from an artifact and adds it to the path + +runs: + using: composite + steps: + - name: Download nargo binary + uses: actions/download-artifact@v4 + with: + name: nargo + path: ./nargo + + - name: Set nargo on PATH + shell: bash + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH diff --git a/noir/noir-repo/.github/critical_libraries_status/noir-lang/sha256/.failures.jsonl b/noir/noir-repo/.github/critical_libraries_status/noir-lang/sha256/.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/noir/noir-repo/.github/scripts/merge-bench-reports.sh b/noir/noir-repo/.github/scripts/merge-bench-reports.sh deleted file mode 100755 index 23a62874148..00000000000 --- a/noir/noir-repo/.github/scripts/merge-bench-reports.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -set -eu - -echo "Merging reports" - -REPORT_NAME=$1 -NAME_PLURAL=""$REPORT_NAME"s" - -combined_reports="[]" - -# Iterate over each report and merge them -for report in ./reports/*; do - # The report is saved under ./$REPORT_NAME_{ matrix_report }/$REPORT_NAME_{ matrix_report }.json - FILE_PATH=$(echo $(ls $report)) - - # Extract the $NAME_PLURAL array from each report and merge it - combined_reports=$(jq '[."'"$NAME_PLURAL"'"[]] + '"$combined_reports" <<< "$(cat "$report/$FILE_PATH")") -done - -combined_reports=$(jq '[."'$NAME_PLURAL'"[]] + '"$combined_reports" <<< "$(cat ./$REPORT_NAME.json)") - -# Wrap the merged memory reports into a new object as to keep the $NAME_PLURAL key -final_report="{\"$NAME_PLURAL\": $combined_reports}" - -echo "$final_report" > $REPORT_NAME.json - -cat $REPORT_NAME.json \ No newline at end of file diff --git a/noir/noir-repo/.github/workflows/formatting.yml b/noir/noir-repo/.github/workflows/formatting.yml index 4e836ef2493..34216c22e01 100644 --- a/noir/noir-repo/.github/workflows/formatting.yml +++ b/noir/noir-repo/.github/workflows/formatting.yml @@ -123,18 +123,7 @@ jobs: uses: actions/checkout@v4 - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo - - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V + uses: ./.github/actions/download-nargo - name: Format stdlib working-directory: ./noir_stdlib diff --git a/noir/noir-repo/.github/workflows/reports.yml b/noir/noir-repo/.github/workflows/reports.yml index 2a1dbc078ac..ceb29e18ef3 100644 --- a/noir/noir-repo/.github/workflows/reports.yml +++ b/noir/noir-repo/.github/workflows/reports.yml @@ -54,18 +54,7 @@ jobs: echo "$HOME/.bb/" >> $GITHUB_PATH - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo - - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V + uses: ./.github/actions/download-nargo - name: Generate gates report working-directory: ./test_programs @@ -100,18 +89,7 @@ jobs: - uses: actions/checkout@v4 - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo - - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V + uses: ./.github/actions/download-nargo - name: Generate Brillig bytecode size report working-directory: ./test_programs @@ -160,18 +138,7 @@ jobs: - uses: actions/checkout@v4 - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo - - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V + uses: ./.github/actions/download-nargo - name: Generate Brillig execution report working-directory: ./test_programs @@ -220,18 +187,7 @@ jobs: - uses: actions/checkout@v4 - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo - - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V + uses: ./.github/actions/download-nargo - name: Generate Memory report working-directory: ./test_programs @@ -272,18 +228,7 @@ jobs: - uses: actions/checkout@v4 - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo - - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V + uses: ./.github/actions/download-nargo - name: Generate Compilation report working-directory: ./test_programs @@ -336,29 +281,19 @@ jobs: name: External repo compilation and execution reports - ${{ matrix.project.repo }}/${{ matrix.project.path }} steps: - - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo - - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V - - uses: actions/checkout@v4 with: path: scripts sparse-checkout: | + .github/actions/download-nargo/action.yml test_programs/compilation_report.sh test_programs/execution_report.sh test_programs/parse_time.sh sparse-checkout-cone-mode: false + - name: Download nargo binary + uses: ./scripts/.github/actions/download-nargo + - name: Checkout uses: actions/checkout@v4 with: @@ -451,27 +386,17 @@ jobs: name: External repo memory report - ${{ matrix.project.repo }}/${{ matrix.project.path }} steps: - - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo - - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V - - uses: actions/checkout@v4 with: path: scripts sparse-checkout: | + .github/actions/download-nargo/action.yml test_programs/memory_report.sh test_programs/parse_memory.sh sparse-checkout-cone-mode: false + + - name: Download nargo binary + uses: ./scripts/.github/actions/download-nargo - name: Checkout uses: actions/checkout@v4 @@ -552,21 +477,22 @@ jobs: uses: actions/download-artifact@v4 with: name: in_progress_compilation_report + path: ./reports - name: Download matrix compilation reports uses: actions/download-artifact@v4 with: pattern: compilation_report_* path: ./reports + merge-multiple: true - name: Merge compilation reports using jq run: | - mv ./.github/scripts/merge-bench-reports.sh merge-bench-reports.sh - ./merge-bench-reports.sh compilation_report - jq ".compilation_reports | map({name: .artifact_name, value: (.time[:-1] | tonumber), unit: \"s\"}) " ./compilation_report.json > time_bench.json + # Github actions seems to not expand "**" in globs by default. + shopt -s globstar + jq --slurp '. | flatten' ./reports/* | tee time_bench.json - name: Store benchmark result - continue-on-error: true uses: benchmark-action/github-action-benchmark@4de1bed97a47495fc4c5404952da0499e31f5c29 with: name: "Compilation Time" @@ -607,15 +533,15 @@ jobs: with: pattern: compilation_mem_report_* path: ./reports + merge-multiple: true - name: Merge memory reports using jq run: | - mv ./.github/scripts/merge-bench-reports.sh merge-bench-reports.sh - ./merge-bench-reports.sh memory_report - jq ".memory_reports | map({name: .artifact_name, value: (.peak_memory | tonumber), unit: \"MB\"}) " ./memory_report.json > memory_bench.json + # Github actions seems to not expand "**" in globs by default. + shopt -s globstar + jq --slurp '. | flatten' ./reports/* | tee memory_bench.json - name: Store benchmark result - continue-on-error: true uses: benchmark-action/github-action-benchmark@4de1bed97a47495fc4c5404952da0499e31f5c29 with: name: "Compilation Memory" @@ -656,17 +582,15 @@ jobs: with: pattern: execution_mem_report_* path: ./reports + merge-multiple: true - name: Merge memory reports using jq run: | - mv ./.github/scripts/merge-bench-reports.sh merge-bench-reports.sh - ./merge-bench-reports.sh memory_report - # Rename the memory report as to not clash with the compilation memory report file name - cp memory_report.json execution_memory_report.json - jq ".memory_reports | map({name: .artifact_name, value: (.peak_memory | tonumber), unit: \"MB\"}) " ./execution_memory_report.json > memory_bench.json + # Github actions seems to not expand "**" in globs by default. + shopt -s globstar + jq --slurp '. | flatten' ./reports/* | tee memory_bench.json - name: Store benchmark result - continue-on-error: true uses: benchmark-action/github-action-benchmark@4de1bed97a47495fc4c5404952da0499e31f5c29 with: name: "Execution Memory" @@ -708,15 +632,15 @@ jobs: with: pattern: execution_report_* path: ./reports + merge-multiple: true - name: Merge execution reports using jq run: | - mv ./.github/scripts/merge-bench-reports.sh merge-bench-reports.sh - ./merge-bench-reports.sh execution_report - jq ".execution_reports | map({name: .artifact_name, value: (.time[:-1] | tonumber), unit: \"s\"}) " ./execution_report.json > time_bench.json + # Github actions seems to not expand "**" in globs by default. + shopt -s globstar + jq --slurp '. | flatten' ./reports/* | tee time_bench.json - name: Store benchmark result - continue-on-error: true uses: benchmark-action/github-action-benchmark@4de1bed97a47495fc4c5404952da0499e31f5c29 with: name: "Execution Time" diff --git a/noir/noir-repo/.github/workflows/test-js-packages.yml b/noir/noir-repo/.github/workflows/test-js-packages.yml index 6b40a06e0a2..d71fbb0cfd5 100644 --- a/noir/noir-repo/.github/workflows/test-js-packages.yml +++ b/noir/noir-repo/.github/workflows/test-js-packages.yml @@ -13,6 +13,25 @@ concurrency: cancel-in-progress: true jobs: + critical-library-list: + name: Load critical library list + runs-on: ubuntu-22.04 + outputs: + libraries: ${{ steps.get_critical_libraries.outputs.libraries }} + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Build list of libraries + id: get_critical_libraries + run: | + LIBRARIES=$(yq ./EXTERNAL_NOIR_LIBRARIES.yml -o json | jq -c '.libraries | map({ repo, path: (.path // ""), timeout:(.timeout // 2), nargo_args })') + echo "libraries=$LIBRARIES" + echo "libraries=$LIBRARIES" >> $GITHUB_OUTPUT + env: + GH_TOKEN: ${{ github.token }} + yarn-lock: runs-on: ubuntu-22.04 timeout-minutes: 30 @@ -244,10 +263,7 @@ jobs: uses: actions/checkout@v4 - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo + uses: ./.github/actions/download-nargo - name: Download artifact uses: actions/download-artifact@v4 @@ -261,14 +277,6 @@ jobs: name: noirc_abi_wasm path: ./tooling/noirc_abi_wasm - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V - - name: Install Yarn dependencies uses: ./.github/actions/setup @@ -300,17 +308,7 @@ jobs: uses: ./.github/actions/setup - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo - - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" + uses: ./.github/actions/download-nargo - name: Build fixtures run: yarn workspace @noir-lang/noir_wasm test:build_fixtures @@ -335,10 +333,7 @@ jobs: uses: actions/checkout@v4 - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo + uses: ./.github/actions/download-nargo - name: Download acvm_js package artifact uses: actions/download-artifact@v4 @@ -352,14 +347,6 @@ jobs: name: noirc_abi_wasm path: ./tooling/noirc_abi_wasm - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V - - name: Install Yarn dependencies uses: ./.github/actions/setup @@ -388,10 +375,7 @@ jobs: echo "$HOME/.bb/" >> $GITHUB_PATH - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo + uses: ./.github/actions/download-nargo - name: Download acvm_js package artifact uses: actions/download-artifact@v4 @@ -411,14 +395,6 @@ jobs: name: noirc_abi_wasm path: ./tooling/noirc_abi_wasm - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V - - name: Install Yarn dependencies uses: ./.github/actions/setup @@ -493,25 +469,13 @@ jobs: with: version: nightly-8660e5b941fe7f4d67e246cfd3dafea330fb53b1 - - name: Install `bb` run: | ./scripts/install_bb.sh echo "$HOME/.bb/" >> $GITHUB_PATH - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo - - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V + uses: ./.github/actions/download-nargo - name: Run `prove_and_verify` working-directory: ./examples/prove_and_verify @@ -521,25 +485,6 @@ jobs: working-directory: ./examples/codegen_verifier run: ./test.sh - critical-library-list: - name: Load critical library list - runs-on: ubuntu-22.04 - outputs: - libraries: ${{ steps.get_critical_libraries.outputs.libraries }} - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Build list of libraries - id: get_critical_libraries - run: | - LIBRARIES=$(grep -Po "^https://github.com/\K.+" ./CRITICAL_NOIR_LIBRARIES | jq -R -s -c 'split("\n") | map(select(. != "")) | map({ repo: ., path: ""})') - echo "libraries=$LIBRARIES" - echo "libraries=$LIBRARIES" >> $GITHUB_OUTPUT - env: - GH_TOKEN: ${{ github.token }} - external-repo-checks: needs: [build-nargo, critical-library-list] runs-on: ubuntu-22.04 @@ -548,17 +493,7 @@ jobs: fail-fast: false matrix: project: ${{ fromJson( needs.critical-library-list.outputs.libraries )}} - include: - - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/aztec-nr } - - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-contracts } - - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/blob } - - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/parity-lib } - - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/private-kernel-lib } - - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/reset-kernel-lib } - - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/types } - # Use 1 test threads for rollup-lib because each test requires a lot of memory, and multiple ones in parallel exceed the maximum memory limit. - - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/rollup-lib, nargo_args: "--test-threads 1" } - + name: Check external repo - ${{ matrix.project.repo }}/${{ matrix.project.path }} steps: - name: Checkout @@ -574,18 +509,7 @@ jobs: ref: ${{ matrix.project.ref }} - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo - - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V + uses: ./noir-repo/.github/actions/download-nargo - name: Remove requirements on compiler version working-directory: ./test-repo @@ -598,19 +522,24 @@ jobs: id: test_report working-directory: ./test-repo/${{ matrix.project.path }} run: | - output_file=${{ github.workspace }}/noir-repo/.github/critical_libraries_status/${{ matrix.project.repo }}/${{ matrix.project.path }}.actual.jsonl BEFORE=$SECONDS nargo test --silence-warnings --skip-brillig-constraints-check --format json ${{ matrix.project.nargo_args }} | tee $output_file TIME=$(($SECONDS-$BEFORE)) + if [ "TIME" -gt "${{ matrix.project.timeout }}" ]; then + # Library testing time has regressed past the set timeout. + # Don't bump this timeout without understanding why this has happened and confirming that you're not the cause. + exit 1 + fi + NAME=${{ matrix.project.repo }}/${{ matrix.project.path }} # Replace any slashes with underscores NAME=${NAME//\//_} TEST_REPORT_NAME=test_report_$NAME echo "test_report_name=$TEST_REPORT_NAME" >> $GITHUB_OUTPUT - jq --null-input "{ test_reports: [{ name: \"$NAME\", value: (\"$TIME\" | tonumber), unit: \"s\" }]}" > $TEST_REPORT_NAME.json + jq --null-input "[{ name: \"$NAME\", value: (\"$TIME\" | tonumber), unit: \"s\" }]" > $TEST_REPORT_NAME.json if [ ! -s $output_file ]; then # The file is empty so we delete it to signal that `nargo test` failed before it could run any tests @@ -618,12 +547,13 @@ jobs: fi env: NARGO_IGNORE_TEST_FAILURES_FROM_FOREIGN_CALLS: true - + - name: Compare test results working-directory: ./noir-repo run: .github/scripts/check_test_results.sh .github/critical_libraries_status/${{ matrix.project.repo }}/${{ matrix.project.path }}.failures.jsonl .github/critical_libraries_status/${{ matrix.project.repo }}/${{ matrix.project.path }}.actual.jsonl - name: Upload test report + if: ${{ matrix.project.timeout > 10 }} # We want to avoid recording benchmarking for a ton of tiny libraries, these should be covered with aggressive timeouts uses: actions/upload-artifact@v4 with: name: ${{ steps.test_report.outputs.test_report_name }} @@ -637,6 +567,11 @@ jobs: timeout-minutes: 30 name: Compile `noir-contracts` zero inliner aggressiveness steps: + - name: Checkout + uses: actions/checkout@v4 + with: + path: noir-repo + - name: Checkout uses: actions/checkout@v4 with: @@ -644,18 +579,7 @@ jobs: path: test-repo - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo - - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V + uses: ./noir-repo/.github/actions/download-nargo - name: Remove requirements on compiler version working-directory: ./test-repo @@ -689,16 +613,15 @@ jobs: with: pattern: test_report_* path: ./reports + merge-multiple: true - name: Merge test reports using jq run: | - jq --null-input "{ test_reports: [] }" > test_report.json - mv ./.github/scripts/merge-bench-reports.sh merge-bench-reports.sh - ./merge-bench-reports.sh test_report - jq ".test_reports" < ./test_report.json > test_bench.json + # Github actions seems to not expand "**" in globs by default. + shopt -s globstar + jq --slurp '. | flatten' ./reports/* | tee test_bench.json - name: Store benchmark result - continue-on-error: true uses: benchmark-action/github-action-benchmark@4de1bed97a47495fc4c5404952da0499e31f5c29 with: name: "Test Suite Duration" diff --git a/noir/noir-repo/EXTERNAL_NOIR_LIBRARIES.yml b/noir/noir-repo/EXTERNAL_NOIR_LIBRARIES.yml new file mode 100644 index 00000000000..5e95d435f3e --- /dev/null +++ b/noir/noir-repo/EXTERNAL_NOIR_LIBRARIES.yml @@ -0,0 +1,79 @@ + +libraries: + noir_check_shuffle: + repo: noir-lang/noir_check_shuffle + timeout: 2 + ec: + repo: noir-lang/ec + timeout: 2 + eddsa: + repo: noir-lang/eddsa + timeout: 2 + mimc: + repo: noir-lang/mimc + timeout: 2 + schnorr: + repo: noir-lang/schnorr + timeout: 2 + noir_sort: + repo: noir-lang/noir_sort + timeout: 2 + noir-edwards: + repo: noir-lang/noir-edwards + timeout: 2 + noir-bignum: + repo: noir-lang/noir-bignum + noir_bigcurve: + repo: noir-lang/noir_bigcurve + noir_base64: + repo: noir-lang/noir_base64 + timeout: 2 + noir_string_search: + repo: noir-lang/noir_string_search + timeout: 2 + sparse_array: + repo: noir-lang/sparse_array + timeout: 2 + noir_rsa: + repo: noir-lang/noir_rsa + timeout: 2 + noir_json_parser: + repo: noir-lang/noir_json_parser + timeout: 10 + sha256: + repo: noir-lang/sha256 + timeout: 3 + aztec_nr: + repo: AztecProtocol/aztec-packages + path: noir-projects/aztec-nr + timeout: 60 + noir_contracts: + repo: AztecProtocol/aztec-packages + path: noir-projects/noir-contracts + timeout: 60 + blob: + repo: AztecProtocol/aztec-packages + path: noir-projects/noir-protocol-circuits/crates/blob + timeout: 70 + protocol_circuits_parity_lib: + repo: AztecProtocol/aztec-packages + path: noir-projects/noir-protocol-circuits/crates/parity-lib + timeout: 4 + protocol_circuits_private_kernel_lib: + repo: AztecProtocol/aztec-packages + path: noir-projects/noir-protocol-circuits/crates/private-kernel-lib + timeout: 250 + protocol_circuits_reset_kernel_lib: + repo: AztecProtocol/aztec-packages + path: noir-projects/noir-protocol-circuits/crates/reset-kernel-lib + timeout: 15 + protocol_circuits_types: + repo: AztecProtocol/aztec-packages + path: noir-projects/noir-protocol-circuits/crates/types + timeout: 60 + protocol_circuits_rollup_lib: + repo: AztecProtocol/aztec-packages + path: noir-projects/noir-protocol-circuits/crates/rollup-lib + timeout: 300 + # Use 1 test threads for rollup-lib because each test requires a lot of memory, and multiple ones in parallel exceed the maximum memory limit. + nargo_args: "--test-threads 1" diff --git a/noir/noir-repo/acvm-repo/acvm_js/build.sh b/noir/noir-repo/acvm-repo/acvm_js/build.sh index c07d2d8a4c1..16fb26e55db 100755 --- a/noir/noir-repo/acvm-repo/acvm_js/build.sh +++ b/noir/noir-repo/acvm-repo/acvm_js/build.sh @@ -25,7 +25,7 @@ function run_if_available { require_command jq require_command cargo require_command wasm-bindgen -#require_command wasm-opt +require_command wasm-opt self_path=$(dirname "$(readlink -f "$0")") pname=$(cargo read-manifest | jq -r '.name') diff --git a/noir/noir-repo/compiler/integration-tests/package.json b/noir/noir-repo/compiler/integration-tests/package.json index e33179f31e7..053e9efeed2 100644 --- a/noir/noir-repo/compiler/integration-tests/package.json +++ b/noir/noir-repo/compiler/integration-tests/package.json @@ -13,7 +13,7 @@ "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { - "@aztec/bb.js": "portal:../../../../barretenberg/ts", + "@aztec/bb.js": "0.72.1", "@noir-lang/noir_js": "workspace:*", "@noir-lang/noir_wasm": "workspace:*", "@nomicfoundation/hardhat-chai-matchers": "^2.0.0", diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/acir/acir_variable.rs b/noir/noir-repo/compiler/noirc_evaluator/src/acir/acir_variable.rs index 41e2c2dad1e..ea8edc2e754 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/acir/acir_variable.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/acir/acir_variable.rs @@ -549,12 +549,12 @@ impl> AcirContext { &mut self, lhs: AcirVar, rhs: AcirVar, + predicate: AcirVar, assert_message: Option>, ) -> Result<(), RuntimeError> { let diff_var = self.sub_var(lhs, rhs)?; - let one = self.add_constant(F::one()); - let _ = self.inv_var(diff_var, one)?; + let _ = self.inv_var(diff_var, predicate)?; if let Some(payload) = assert_message { self.acir_ir .assertion_payloads @@ -613,7 +613,7 @@ impl> AcirContext { } NumericType::Signed { bit_size } => { let (quotient_var, _remainder_var) = - self.signed_division_var(lhs, rhs, bit_size)?; + self.signed_division_var(lhs, rhs, bit_size, predicate)?; Ok(quotient_var) } } @@ -1050,6 +1050,7 @@ impl> AcirContext { lhs: AcirVar, rhs: AcirVar, bit_size: u32, + predicate: AcirVar, ) -> Result<(AcirVar, AcirVar), RuntimeError> { // We derive the signed division from the unsigned euclidean division. // note that this is not euclidean division! @@ -1079,7 +1080,7 @@ impl> AcirContext { // Performs the division using the unsigned values of lhs and rhs let (q1, r1) = - self.euclidean_division_var(unsigned_lhs, unsigned_rhs, bit_size - 1, one)?; + self.euclidean_division_var(unsigned_lhs, unsigned_rhs, bit_size - 1, predicate)?; // Unsigned to signed: derive q and r from q1,r1 and the signs of lhs and rhs // Quotient sign is lhs sign * rhs sign, whose resulting sign bit is the XOR of the sign bits @@ -1117,7 +1118,9 @@ impl> AcirContext { }; let (_, remainder_var) = match numeric_type { - NumericType::Signed { bit_size } => self.signed_division_var(lhs, rhs, bit_size)?, + NumericType::Signed { bit_size } => { + self.signed_division_var(lhs, rhs, bit_size, predicate)? + } _ => self.euclidean_division_var(lhs, rhs, bit_size, predicate)?, }; Ok(remainder_var) diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/acir/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/acir/mod.rs index 46c9681ea8d..63a5a382299 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/acir/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/acir/mod.rs @@ -762,7 +762,12 @@ impl<'a> Context<'a> { None }; - self.acir_context.assert_neq_var(lhs, rhs, assert_payload)?; + self.acir_context.assert_neq_var( + lhs, + rhs, + self.current_side_effects_enabled_var, + assert_payload, + )?; } Instruction::Cast(value_id, _) => { let acir_var = self.convert_numeric_value(*value_id, dfg)?; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_globals.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_globals.rs index 30709f2a6b2..5e7f250a6b0 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_globals.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_globals.rs @@ -313,6 +313,7 @@ mod tests { 2, "Expected just a `Return`, but got more than a single opcode" ); + // TODO: Bring this back (https://github.com/noir-lang/noir/issues/7306) // assert!(matches!(&artifact.byte_code[0], Opcode::Return)); } else if func_id.to_u32() == 2 { assert_eq!( @@ -430,6 +431,7 @@ mod tests { 30, "Expected enough opcodes to initialize the globals" ); + // TODO: Bring this back (https://github.com/noir-lang/noir/issues/7306) // let Opcode::Const { destination, bit_size, value } = &artifact.byte_code[0] else { // panic!("First opcode is expected to be `Const`"); // }; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs index c17fc2d0b7a..d916cd534a7 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs @@ -494,6 +494,11 @@ impl SsaBuilder { } fn print(mut self, msg: &str) -> Self { + // Always normalize if we are going to print at least one of the passes + if !matches!(self.ssa_logging, SsaLogging::None) { + self.ssa.normalize_ids(); + } + let print_ssa_pass = match &self.ssa_logging { SsaLogging::None => false, SsaLogging::All => true, @@ -505,7 +510,6 @@ impl SsaBuilder { } }; if print_ssa_pass { - self.ssa.normalize_ids(); println!("After {msg}:\n{}", self.ssa); } self diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index a44226096e4..a0bab3e8e55 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -595,13 +595,17 @@ impl Instruction { match binary.operator { BinaryOp::Add { unchecked: false } | BinaryOp::Sub { unchecked: false } - | BinaryOp::Mul { unchecked: false } - | BinaryOp::Div - | BinaryOp::Mod => { + | BinaryOp::Mul { unchecked: false } => { // Some binary math can overflow or underflow, but this is only the case // for unsigned types (here we assume the type of binary.lhs is the same) dfg.type_of_value(binary.rhs).is_unsigned() } + BinaryOp::Div | BinaryOp::Mod => { + // Div and Mod require a predicate if the RHS may be zero. + dfg.get_numeric_constant(binary.rhs) + .map(|rhs| !rhs.is_zero()) + .unwrap_or(true) + } BinaryOp::Add { unchecked: true } | BinaryOp::Sub { unchecked: true } | BinaryOp::Mul { unchecked: true } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs index aea6eda193b..092b11ca3ee 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -98,49 +98,6 @@ impl Ssa { function.constant_fold(false, brillig_info); } - // It could happen that we inlined all calls to a given brillig function. - // In that case it's unused so we can remove it. This is what we check next. - self.remove_unused_brillig_functions(brillig_functions) - } - - fn remove_unused_brillig_functions( - mut self, - mut brillig_functions: BTreeMap, - ) -> Ssa { - // Remove from the above map functions that are called - for function in self.functions.values() { - for block_id in function.reachable_blocks() { - for instruction_id in function.dfg[block_id].instructions() { - let instruction = &function.dfg[*instruction_id]; - let Instruction::Call { func: func_id, arguments: _ } = instruction else { - continue; - }; - - let func_value = &function.dfg[*func_id]; - let Value::Function(func_id) = func_value else { continue }; - - if function.runtime().is_acir() { - brillig_functions.remove(func_id); - } - } - } - } - - // The ones that remain are never called: let's remove them. - for (func_id, func) in &brillig_functions { - // We never want to remove the main function (it could be `unconstrained` or it - // could have been turned into brillig if `--force-brillig` was given). - // We also don't want to remove entry points. - let runtime = func.runtime(); - if self.main_id == *func_id - || (runtime.is_entry_point() && matches!(runtime, RuntimeType::Acir(_))) - { - continue; - } - - self.functions.remove(func_id); - } - self } } @@ -1767,4 +1724,65 @@ mod test { let ssa = ssa.purity_analysis().fold_constants_using_constraints(); assert_normalized_ssa_equals(ssa, expected); } + + #[test] + fn does_not_deduplicate_field_divisions_under_different_predicates() { + // Regression test for https://github.com/noir-lang/noir/issues/7283 + let src = " + acir(inline) fn main f0 { + b0(v0: Field, v1: Field, v2: u1): + enable_side_effects v2 + v3 = div v1, v0 + v4 = mul v3, v0 + v5 = not v2 + enable_side_effects v5 + v6 = div v1, v0 + return + } + "; + + let ssa = Ssa::from_str(src).unwrap(); + let ssa = ssa.fold_constants(); + assert_normalized_ssa_equals(ssa, src); + } + + #[test] + fn does_not_deduplicate_unsigned_divisions_under_different_predicates() { + // Regression test for https://github.com/noir-lang/noir/issues/7283 + let src = " + acir(inline) fn main f0 { + b0(v0: u32, v1: u32, v2: u1): + enable_side_effects v2 + v3 = div v1, v0 + v4 = not v2 + enable_side_effects v4 + v5 = div v1, v0 + return + } + "; + + let ssa = Ssa::from_str(src).unwrap(); + let ssa = ssa.fold_constants(); + assert_normalized_ssa_equals(ssa, src); + } + + #[test] + fn does_not_deduplicate_signed_divisions_under_different_predicates() { + // Regression test for https://github.com/noir-lang/noir/issues/7283 + let src = " + acir(inline) fn main f0 { + b0(v0: i32, v1: i32, v2: u1): + enable_side_effects v2 + v3 = div v1, v0 + v4 = not v2 + enable_side_effects v4 + v5 = div v1, v0 + return + } + "; + + let ssa = Ssa::from_str(src).unwrap(); + let ssa = ssa.fold_constants(); + assert_normalized_ssa_equals(ssa, src); + } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs index a8a309a9f12..0b5e37e817e 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs @@ -2,7 +2,7 @@ //! The purpose of this pass is to inline the instructions of each function call //! within the function caller. If all function calls are known, there will only //! be a single function remaining when the pass finishes. -use std::collections::{BTreeMap, BTreeSet, HashSet, VecDeque}; +use std::collections::{BTreeSet, HashSet, VecDeque}; use acvm::acir::AcirField; use im::HashMap; @@ -21,6 +21,10 @@ use crate::ssa::{ ssa_gen::Ssa, }; +pub(super) mod inline_info; + +pub(super) use inline_info::{compute_inline_infos, InlineInfo, InlineInfos}; + /// An arbitrary limit to the maximum number of recursive call /// frames at any point in time. const RECURSION_LIMIT: u32 = 1000; @@ -206,367 +210,6 @@ fn called_functions(func: &Function) -> BTreeSet { called_functions_vec(func).into_iter().collect() } -/// Information about a function to aid the decision about whether to inline it or not. -/// The final decision depends on what we're inlining it into. -#[derive(Default, Debug)] -pub(super) struct InlineInfo { - is_brillig_entry_point: bool, - is_acir_entry_point: bool, - is_recursive: bool, - pub(super) should_inline: bool, - weight: i64, - cost: i64, -} - -impl InlineInfo { - /// Functions which are to be retained, not inlined. - pub(super) fn is_inline_target(&self) -> bool { - self.is_brillig_entry_point - || self.is_acir_entry_point - || self.is_recursive - || !self.should_inline - } - - pub(super) fn should_inline(inline_infos: &InlineInfos, called_func_id: FunctionId) -> bool { - inline_infos.get(&called_func_id).map(|info| info.should_inline).unwrap_or_default() - } -} - -type InlineInfos = BTreeMap; - -/// The functions we should inline into (and that should be left in the final program) are: -/// - main -/// - Any Brillig function called from Acir -/// - Some Brillig functions depending on aggressiveness and some metrics -/// - Any Acir functions with a [fold inline type][InlineType::Fold], -/// -/// The returned `InlineInfos` won't have every function in it, only the ones which the algorithm visited. -pub(super) fn compute_inline_infos( - ssa: &Ssa, - inline_no_predicates_functions: bool, - aggressiveness: i64, -) -> InlineInfos { - let mut inline_infos = InlineInfos::default(); - - inline_infos.insert( - ssa.main_id, - InlineInfo { - is_acir_entry_point: ssa.main().runtime().is_acir(), - is_brillig_entry_point: ssa.main().runtime().is_brillig(), - ..Default::default() - }, - ); - - // Handle ACIR functions. - for (func_id, function) in ssa.functions.iter() { - if function.runtime().is_brillig() { - continue; - } - - // If we have not already finished the flattening pass, functions marked - // to not have predicates should be preserved. - let preserve_function = !inline_no_predicates_functions && function.is_no_predicates(); - if function.runtime().is_entry_point() || preserve_function { - inline_infos.entry(*func_id).or_default().is_acir_entry_point = true; - } - - // Any Brillig function called from ACIR is an entry into the Brillig VM. - for called_func_id in called_functions(function) { - if ssa.functions[&called_func_id].runtime().is_brillig() { - inline_infos.entry(called_func_id).or_default().is_brillig_entry_point = true; - } - } - } - - let callers = compute_callers(ssa); - let times_called = compute_times_called(&callers); - - mark_brillig_functions_to_retain( - ssa, - inline_no_predicates_functions, - aggressiveness, - ×_called, - &mut inline_infos, - ); - - inline_infos -} - -/// Compute the time each function is called from any other function. -fn compute_times_called( - callers: &BTreeMap>, -) -> HashMap { - callers - .iter() - .map(|(callee, callers)| { - let total_calls = callers.values().sum(); - (*callee, total_calls) - }) - .collect() -} - -/// Compute for each function the set of functions that call it, and how many times they do so. -fn compute_callers(ssa: &Ssa) -> BTreeMap> { - ssa.functions - .iter() - .flat_map(|(caller_id, function)| { - let called_functions = called_functions_vec(function); - called_functions.into_iter().map(|callee_id| (*caller_id, callee_id)) - }) - .fold( - // Make sure an entry exists even for ones that don't get called. - ssa.functions.keys().map(|id| (*id, BTreeMap::new())).collect(), - |mut acc, (caller_id, callee_id)| { - let callers = acc.entry(callee_id).or_default(); - *callers.entry(caller_id).or_default() += 1; - acc - }, - ) -} - -/// Compute for each function the set of functions called by it, and how many times it does so. -fn compute_callees(ssa: &Ssa) -> BTreeMap> { - ssa.functions - .iter() - .flat_map(|(caller_id, function)| { - let called_functions = called_functions_vec(function); - called_functions.into_iter().map(|callee_id| (*caller_id, callee_id)) - }) - .fold( - // Make sure an entry exists even for ones that don't call anything. - ssa.functions.keys().map(|id| (*id, BTreeMap::new())).collect(), - |mut acc, (caller_id, callee_id)| { - let callees = acc.entry(caller_id).or_default(); - *callees.entry(callee_id).or_default() += 1; - acc - }, - ) -} - -/// Compute something like a topological order of the functions, starting with the ones -/// that do not call any other functions, going towards the entry points. When cycles -/// are detected, take the one which are called by the most to break the ties. -/// -/// This can be used to simplify the most often called functions first. -/// -/// Returns the functions paired with their own as well as transitive weight, -/// which accumulates the weight of all the functions they call, as well as own. -pub(super) fn compute_bottom_up_order(ssa: &Ssa) -> Vec<(FunctionId, (usize, usize))> { - let mut order = Vec::new(); - let mut visited = HashSet::new(); - - // Call graph which we'll repeatedly prune to find the "leaves". - let mut callees = compute_callees(ssa); - let callers = compute_callers(ssa); - - // Number of times a function is called, used to break cycles in the call graph by popping the next candidate. - let mut times_called = compute_times_called(&callers).into_iter().collect::>(); - times_called.sort_by_key(|(id, cnt)| { - // Sort by called the *least* by others, as these are less likely to cut the graph when removed. - let called_desc = -(*cnt as i64); - // Sort entries first (last to be popped). - let is_entry_asc = -called_desc.signum(); - // Finally break ties by ID. - (is_entry_asc, called_desc, *id) - }); - - // Start with the weight of the functions in isolation, then accumulate as we pop off the ones they call. - let own_weights = ssa - .functions - .iter() - .map(|(id, f)| (*id, compute_function_own_weight(f))) - .collect::>(); - let mut weights = own_weights.clone(); - - // Seed the queue with functions that don't call anything. - let mut queue = callees - .iter() - .filter_map(|(id, callees)| callees.is_empty().then_some(*id)) - .collect::>(); - - loop { - while let Some(id) = queue.pop_front() { - // Pull the current weight of yet-to-be emitted callees (a nod to mutual recursion). - for (callee, cnt) in &callees[&id] { - if *callee != id { - weights[&id] = weights[&id].saturating_add(cnt.saturating_mul(weights[callee])); - } - } - // Own weight plus the weights accumulated from callees. - let weight = weights[&id]; - let own_weight = own_weights[&id]; - - // Emit the function. - order.push((id, (own_weight, weight))); - visited.insert(id); - - // Update the callers of this function. - for (caller, cnt) in &callers[&id] { - // Update the weight of the caller with the weight of this function. - weights[caller] = weights[caller].saturating_add(cnt.saturating_mul(weight)); - // Remove this function from the callees of the caller. - let callees = callees.get_mut(caller).unwrap(); - callees.remove(&id); - // If the caller doesn't call any other function, enqueue it, - // unless it's the entry function, which is never called by anything, so it should be last. - if callees.is_empty() && !visited.contains(caller) && !callers[caller].is_empty() { - queue.push_back(*caller); - } - } - } - // If we ran out of the queue, maybe there is a cycle; take the next most called function. - while let Some((id, _)) = times_called.pop() { - if !visited.contains(&id) { - queue.push_back(id); - break; - } - } - if times_called.is_empty() && queue.is_empty() { - assert_eq!(order.len(), callers.len()); - return order; - } - } -} - -/// Traverse the call graph starting from a given function, marking function to be retained if they are: -/// * recursive functions, or -/// * the cost of inlining outweighs the cost of not doing so -fn mark_functions_to_retain_recursive( - ssa: &Ssa, - inline_no_predicates_functions: bool, - aggressiveness: i64, - times_called: &HashMap, - inline_infos: &mut InlineInfos, - mut explored_functions: im::HashSet, - func: FunctionId, -) { - // Check if we have set any of the fields this method touches. - let decided = |inline_infos: &InlineInfos| { - inline_infos - .get(&func) - .map(|info| info.is_recursive || info.should_inline || info.weight != 0) - .unwrap_or_default() - }; - - // Check if we have already decided on this function - if decided(inline_infos) { - return; - } - - // If recursive, this function won't be inlined - if explored_functions.contains(&func) { - inline_infos.entry(func).or_default().is_recursive = true; - return; - } - explored_functions.insert(func); - - // Decide on dependencies first, so we know their weight. - let called_functions = called_functions_vec(&ssa.functions[&func]); - for callee in &called_functions { - mark_functions_to_retain_recursive( - ssa, - inline_no_predicates_functions, - aggressiveness, - times_called, - inline_infos, - explored_functions.clone(), - *callee, - ); - } - - // We could have decided on this function while deciding on dependencies - // if the function is recursive. - if decided(inline_infos) { - return; - } - - // We'll use some heuristics to decide whether to inline or not. - // We compute the weight (roughly the number of instructions) of the function after inlining - // And the interface cost of the function (the inherent cost at the callsite, roughly the number of args and returns) - // We then can compute an approximation of the cost of inlining vs the cost of retaining the function - // We do this computation using saturating i64s to avoid overflows, - // and because we want to calculate a difference which can be negative. - - // Total weight of functions called by this one, unless we decided not to inline them. - // Callees which appear multiple times would be inlined multiple times. - let inlined_function_weights: i64 = called_functions.iter().fold(0, |acc, callee| { - let info = &inline_infos[callee]; - // If the callee is not going to be inlined then we can ignore its cost. - if info.should_inline { - acc.saturating_add(info.weight) - } else { - acc - } - }); - - let this_function_weight = inlined_function_weights - .saturating_add(compute_function_own_weight(&ssa.functions[&func]) as i64); - - let interface_cost = compute_function_interface_cost(&ssa.functions[&func]) as i64; - - let times_called = times_called[&func] as i64; - - let inline_cost = times_called.saturating_mul(this_function_weight); - let retain_cost = times_called.saturating_mul(interface_cost) + this_function_weight; - let net_cost = inline_cost.saturating_sub(retain_cost); - - let runtime = ssa.functions[&func].runtime(); - // We inline if the aggressiveness is higher than inline cost minus the retain cost - // If aggressiveness is infinite, we'll always inline - // If aggressiveness is 0, we'll inline when the inline cost is lower than the retain cost - // If aggressiveness is minus infinity, we'll never inline (other than in the mandatory cases) - let should_inline = (net_cost < aggressiveness) - || runtime.is_inline_always() - || (runtime.is_no_predicates() && inline_no_predicates_functions); - - let info = inline_infos.entry(func).or_default(); - info.should_inline = should_inline; - info.weight = this_function_weight; - info.cost = net_cost; -} - -/// Mark Brillig functions that should not be inlined because they are recursive or expensive. -fn mark_brillig_functions_to_retain( - ssa: &Ssa, - inline_no_predicates_functions: bool, - aggressiveness: i64, - times_called: &HashMap, - inline_infos: &mut InlineInfos, -) { - let brillig_entry_points = inline_infos - .iter() - .filter_map(|(id, info)| info.is_brillig_entry_point.then_some(*id)) - .collect::>(); - - for entry_point in brillig_entry_points { - mark_functions_to_retain_recursive( - ssa, - inline_no_predicates_functions, - aggressiveness, - times_called, - inline_infos, - im::HashSet::default(), - entry_point, - ); - } -} - -/// Compute a weight of a function based on the number of instructions in its reachable blocks. -fn compute_function_own_weight(func: &Function) -> usize { - let mut weight = 0; - for block_id in func.reachable_blocks() { - weight += func.dfg[block_id].instructions().len() + 1; // We add one for the terminator - } - // We use an approximation of the average increase in instruction ratio from SSA to Brillig - // In order to get the actual weight we'd need to codegen this function to brillig. - weight -} - -/// Compute interface cost of a function based on the number of inputs and outputs. -fn compute_function_interface_cost(func: &Function) -> usize { - func.parameters().len() + func.returns().len() -} - impl InlineContext { /// Create a new context object for the function inlining pass. /// This starts off with an empty mapping of instructions for main's parameters. @@ -1148,12 +791,10 @@ mod test { map::Id, types::{NumericType, Type}, }, - opt::assert_normalized_ssa_equals, + opt::{assert_normalized_ssa_equals, inlining::inline_info::compute_bottom_up_order}, Ssa, }; - use super::compute_bottom_up_order; - #[test] fn basic_inlining() { // fn foo { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining/inline_info.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining/inline_info.rs new file mode 100644 index 00000000000..26bb2cad675 --- /dev/null +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining/inline_info.rs @@ -0,0 +1,371 @@ +use std::collections::{BTreeMap, HashSet, VecDeque}; + +use im::HashMap; + +use crate::ssa::{ + ir::function::{Function, FunctionId}, + ssa_gen::Ssa, +}; + +use super::{called_functions, called_functions_vec}; + +/// Information about a function to aid the decision about whether to inline it or not. +/// The final decision depends on what we're inlining it into. +#[derive(Default, Debug)] +pub(crate) struct InlineInfo { + is_brillig_entry_point: bool, + is_acir_entry_point: bool, + is_recursive: bool, + pub(crate) should_inline: bool, + weight: i64, + cost: i64, +} + +impl InlineInfo { + /// Functions which are to be retained, not inlined. + pub(crate) fn is_inline_target(&self) -> bool { + self.is_brillig_entry_point + || self.is_acir_entry_point + || self.is_recursive + || !self.should_inline + } + + pub(crate) fn should_inline(inline_infos: &InlineInfos, called_func_id: FunctionId) -> bool { + inline_infos.get(&called_func_id).map(|info| info.should_inline).unwrap_or_default() + } +} + +pub(crate) type InlineInfos = BTreeMap; + +/// The functions we should inline into (and that should be left in the final program) are: +/// - main +/// - Any Brillig function called from Acir +/// - Some Brillig functions depending on aggressiveness and some metrics +/// - Any Acir functions with a [fold inline type][InlineType::Fold], +/// +/// The returned `InlineInfos` won't have every function in it, only the ones which the algorithm visited. +pub(crate) fn compute_inline_infos( + ssa: &Ssa, + inline_no_predicates_functions: bool, + aggressiveness: i64, +) -> InlineInfos { + let mut inline_infos = InlineInfos::default(); + + inline_infos.insert( + ssa.main_id, + InlineInfo { + is_acir_entry_point: ssa.main().runtime().is_acir(), + is_brillig_entry_point: ssa.main().runtime().is_brillig(), + ..Default::default() + }, + ); + + // Handle ACIR functions. + for (func_id, function) in ssa.functions.iter() { + if function.runtime().is_brillig() { + continue; + } + + // If we have not already finished the flattening pass, functions marked + // to not have predicates should be preserved. + let preserve_function = !inline_no_predicates_functions && function.is_no_predicates(); + if function.runtime().is_entry_point() || preserve_function { + inline_infos.entry(*func_id).or_default().is_acir_entry_point = true; + } + + // Any Brillig function called from ACIR is an entry into the Brillig VM. + for called_func_id in called_functions(function) { + if ssa.functions[&called_func_id].runtime().is_brillig() { + inline_infos.entry(called_func_id).or_default().is_brillig_entry_point = true; + } + } + } + + let callers = compute_callers(ssa); + let times_called = compute_times_called(&callers); + + mark_brillig_functions_to_retain( + ssa, + inline_no_predicates_functions, + aggressiveness, + ×_called, + &mut inline_infos, + ); + + inline_infos +} + +/// Compute the time each function is called from any other function. +fn compute_times_called( + callers: &BTreeMap>, +) -> HashMap { + callers + .iter() + .map(|(callee, callers)| { + let total_calls = callers.values().sum(); + (*callee, total_calls) + }) + .collect() +} + +/// Compute for each function the set of functions that call it, and how many times they do so. +fn compute_callers(ssa: &Ssa) -> BTreeMap> { + ssa.functions + .iter() + .flat_map(|(caller_id, function)| { + let called_functions = called_functions_vec(function); + called_functions.into_iter().map(|callee_id| (*caller_id, callee_id)) + }) + .fold( + // Make sure an entry exists even for ones that don't get called. + ssa.functions.keys().map(|id| (*id, BTreeMap::new())).collect(), + |mut acc, (caller_id, callee_id)| { + let callers = acc.entry(callee_id).or_default(); + *callers.entry(caller_id).or_default() += 1; + acc + }, + ) +} + +/// Compute for each function the set of functions called by it, and how many times it does so. +fn compute_callees(ssa: &Ssa) -> BTreeMap> { + ssa.functions + .iter() + .flat_map(|(caller_id, function)| { + let called_functions = called_functions_vec(function); + called_functions.into_iter().map(|callee_id| (*caller_id, callee_id)) + }) + .fold( + // Make sure an entry exists even for ones that don't call anything. + ssa.functions.keys().map(|id| (*id, BTreeMap::new())).collect(), + |mut acc, (caller_id, callee_id)| { + let callees = acc.entry(caller_id).or_default(); + *callees.entry(callee_id).or_default() += 1; + acc + }, + ) +} + +/// Compute something like a topological order of the functions, starting with the ones +/// that do not call any other functions, going towards the entry points. When cycles +/// are detected, take the one which are called by the most to break the ties. +/// +/// This can be used to simplify the most often called functions first. +/// +/// Returns the functions paired with their own as well as transitive weight, +/// which accumulates the weight of all the functions they call, as well as own. +pub(crate) fn compute_bottom_up_order(ssa: &Ssa) -> Vec<(FunctionId, (usize, usize))> { + let mut order = Vec::new(); + let mut visited = HashSet::new(); + + // Call graph which we'll repeatedly prune to find the "leaves". + let mut callees = compute_callees(ssa); + let callers = compute_callers(ssa); + + // Number of times a function is called, used to break cycles in the call graph by popping the next candidate. + let mut times_called = compute_times_called(&callers).into_iter().collect::>(); + times_called.sort_by_key(|(id, cnt)| { + // Sort by called the *least* by others, as these are less likely to cut the graph when removed. + let called_desc = -(*cnt as i64); + // Sort entries first (last to be popped). + let is_entry_asc = -called_desc.signum(); + // Finally break ties by ID. + (is_entry_asc, called_desc, *id) + }); + + // Start with the weight of the functions in isolation, then accumulate as we pop off the ones they call. + let own_weights = ssa + .functions + .iter() + .map(|(id, f)| (*id, compute_function_own_weight(f))) + .collect::>(); + let mut weights = own_weights.clone(); + + // Seed the queue with functions that don't call anything. + let mut queue = callees + .iter() + .filter_map(|(id, callees)| callees.is_empty().then_some(*id)) + .collect::>(); + + loop { + while let Some(id) = queue.pop_front() { + // Pull the current weight of yet-to-be emitted callees (a nod to mutual recursion). + for (callee, cnt) in &callees[&id] { + if *callee != id { + weights[&id] = weights[&id].saturating_add(cnt.saturating_mul(weights[callee])); + } + } + // Own weight plus the weights accumulated from callees. + let weight = weights[&id]; + let own_weight = own_weights[&id]; + + // Emit the function. + order.push((id, (own_weight, weight))); + visited.insert(id); + + // Update the callers of this function. + for (caller, cnt) in &callers[&id] { + // Update the weight of the caller with the weight of this function. + weights[caller] = weights[caller].saturating_add(cnt.saturating_mul(weight)); + // Remove this function from the callees of the caller. + let callees = callees.get_mut(caller).unwrap(); + callees.remove(&id); + // If the caller doesn't call any other function, enqueue it, + // unless it's the entry function, which is never called by anything, so it should be last. + if callees.is_empty() && !visited.contains(caller) && !callers[caller].is_empty() { + queue.push_back(*caller); + } + } + } + // If we ran out of the queue, maybe there is a cycle; take the next most called function. + while let Some((id, _)) = times_called.pop() { + if !visited.contains(&id) { + queue.push_back(id); + break; + } + } + if times_called.is_empty() && queue.is_empty() { + assert_eq!(order.len(), callers.len()); + return order; + } + } +} + +/// Compute a weight of a function based on the number of instructions in its reachable blocks. +fn compute_function_own_weight(func: &Function) -> usize { + let mut weight = 0; + for block_id in func.reachable_blocks() { + weight += func.dfg[block_id].instructions().len() + 1; // We add one for the terminator + } + // We use an approximation of the average increase in instruction ratio from SSA to Brillig + // In order to get the actual weight we'd need to codegen this function to brillig. + weight +} + +/// Compute interface cost of a function based on the number of inputs and outputs. +fn compute_function_interface_cost(func: &Function) -> usize { + func.parameters().len() + func.returns().len() +} + +/// Traverse the call graph starting from a given function, marking function to be retained if they are: +/// * recursive functions, or +/// * the cost of inlining outweighs the cost of not doing so +fn mark_functions_to_retain_recursive( + ssa: &Ssa, + inline_no_predicates_functions: bool, + aggressiveness: i64, + times_called: &HashMap, + inline_infos: &mut InlineInfos, + mut explored_functions: im::HashSet, + func: FunctionId, +) { + // Check if we have set any of the fields this method touches. + let decided = |inline_infos: &InlineInfos| { + inline_infos + .get(&func) + .map(|info| info.is_recursive || info.should_inline || info.weight != 0) + .unwrap_or_default() + }; + + // Check if we have already decided on this function + if decided(inline_infos) { + return; + } + + // If recursive, this function won't be inlined + if explored_functions.contains(&func) { + inline_infos.entry(func).or_default().is_recursive = true; + return; + } + explored_functions.insert(func); + + // Decide on dependencies first, so we know their weight. + let called_functions = called_functions_vec(&ssa.functions[&func]); + for callee in &called_functions { + mark_functions_to_retain_recursive( + ssa, + inline_no_predicates_functions, + aggressiveness, + times_called, + inline_infos, + explored_functions.clone(), + *callee, + ); + } + + // We could have decided on this function while deciding on dependencies + // if the function is recursive. + if decided(inline_infos) { + return; + } + + // We'll use some heuristics to decide whether to inline or not. + // We compute the weight (roughly the number of instructions) of the function after inlining + // And the interface cost of the function (the inherent cost at the callsite, roughly the number of args and returns) + // We then can compute an approximation of the cost of inlining vs the cost of retaining the function + // We do this computation using saturating i64s to avoid overflows, + // and because we want to calculate a difference which can be negative. + + // Total weight of functions called by this one, unless we decided not to inline them. + // Callees which appear multiple times would be inlined multiple times. + let inlined_function_weights: i64 = called_functions.iter().fold(0, |acc, callee| { + let info = &inline_infos[callee]; + // If the callee is not going to be inlined then we can ignore its cost. + if info.should_inline { + acc.saturating_add(info.weight) + } else { + acc + } + }); + + let this_function_weight = inlined_function_weights + .saturating_add(compute_function_own_weight(&ssa.functions[&func]) as i64); + + let interface_cost = compute_function_interface_cost(&ssa.functions[&func]) as i64; + + let times_called = times_called[&func] as i64; + + let inline_cost = times_called.saturating_mul(this_function_weight); + let retain_cost = times_called.saturating_mul(interface_cost) + this_function_weight; + let net_cost = inline_cost.saturating_sub(retain_cost); + + let runtime = ssa.functions[&func].runtime(); + // We inline if the aggressiveness is higher than inline cost minus the retain cost + // If aggressiveness is infinite, we'll always inline + // If aggressiveness is 0, we'll inline when the inline cost is lower than the retain cost + // If aggressiveness is minus infinity, we'll never inline (other than in the mandatory cases) + let should_inline = (net_cost < aggressiveness) + || runtime.is_inline_always() + || (runtime.is_no_predicates() && inline_no_predicates_functions); + + let info = inline_infos.entry(func).or_default(); + info.should_inline = should_inline; + info.weight = this_function_weight; + info.cost = net_cost; +} + +/// Mark Brillig functions that should not be inlined because they are recursive or expensive. +fn mark_brillig_functions_to_retain( + ssa: &Ssa, + inline_no_predicates_functions: bool, + aggressiveness: i64, + times_called: &HashMap, + inline_infos: &mut InlineInfos, +) { + let brillig_entry_points = inline_infos + .iter() + .filter_map(|(id, info)| info.is_brillig_entry_point.then_some(*id)) + .collect::>(); + + for entry_point in brillig_entry_points { + mark_functions_to_retain_recursive( + ssa, + inline_no_predicates_functions, + aggressiveness, + times_called, + inline_infos, + im::HashSet::default(), + entry_point, + ); + } +} diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/preprocess_fns.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/preprocess_fns.rs index ae20c9b8b4a..764fb6dd65b 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/preprocess_fns.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/preprocess_fns.rs @@ -11,10 +11,11 @@ impl Ssa { /// Run pre-processing steps on functions in isolation. pub(crate) fn preprocess_functions(mut self, aggressiveness: i64) -> Ssa { // Bottom-up order, starting with the "leaf" functions, so we inline already optimized code into the ones that call them. - let bottom_up = inlining::compute_bottom_up_order(&self); + let bottom_up = inlining::inline_info::compute_bottom_up_order(&self); // Preliminary inlining decisions. - let inline_infos = inlining::compute_inline_infos(&self, false, aggressiveness); + let inline_infos = + inlining::inline_info::compute_inline_infos(&self, false, aggressiveness); let should_inline_call = |callee: &Function| -> bool { match callee.runtime() { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs index 9c9c0ded867..9c18cc0dd34 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs @@ -4,8 +4,8 @@ use std::fmt::Display; use thiserror::Error; use crate::ast::{ - Ident, ItemVisibility, Path, Pattern, Recoverable, Statement, StatementKind, - UnresolvedTraitConstraint, UnresolvedType, UnresolvedTypeData, Visibility, + Ident, ItemVisibility, Path, Pattern, Statement, StatementKind, UnresolvedTraitConstraint, + UnresolvedType, UnresolvedTypeData, Visibility, }; use crate::node_interner::{ ExprId, InternedExpressionKind, InternedStatementKind, QuotedTypeId, TypeId, @@ -26,6 +26,7 @@ pub enum ExpressionKind { Index(Box), Call(Box), MethodCall(Box), + Constrain(ConstrainExpression), Constructor(Box), MemberAccess(Box), Cast(Box), @@ -226,24 +227,6 @@ impl ExpressionKind { } } -impl Recoverable for ExpressionKind { - fn error(_: Span) -> Self { - ExpressionKind::Error - } -} - -impl Recoverable for Expression { - fn error(span: Span) -> Self { - Expression::new(ExpressionKind::Error, span) - } -} - -impl Recoverable for Option { - fn error(span: Span) -> Self { - Some(Expression::new(ExpressionKind::Error, span)) - } -} - #[derive(Debug, Eq, Clone)] pub struct Expression { pub kind: ExpressionKind, @@ -600,6 +583,55 @@ impl BlockExpression { } } +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct ConstrainExpression { + pub kind: ConstrainKind, + pub arguments: Vec, + pub span: Span, +} + +impl Display for ConstrainExpression { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self.kind { + ConstrainKind::Assert | ConstrainKind::AssertEq => write!( + f, + "{}({})", + self.kind, + vecmap(&self.arguments, |arg| arg.to_string()).join(", ") + ), + ConstrainKind::Constrain => { + write!(f, "constrain {}", &self.arguments[0]) + } + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum ConstrainKind { + Assert, + AssertEq, + Constrain, +} + +impl ConstrainKind { + pub fn required_arguments_count(&self) -> usize { + match self { + ConstrainKind::Assert | ConstrainKind::Constrain => 1, + ConstrainKind::AssertEq => 2, + } + } +} + +impl Display for ConstrainKind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ConstrainKind::Assert => write!(f, "assert"), + ConstrainKind::AssertEq => write!(f, "assert_eq"), + ConstrainKind::Constrain => write!(f, "constrain"), + } + } +} + impl Display for Expression { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { self.kind.fmt(f) @@ -616,6 +648,7 @@ impl Display for ExpressionKind { Index(index) => index.fmt(f), Call(call) => call.fmt(f), MethodCall(call) => call.fmt(f), + Constrain(constrain) => constrain.fmt(f), Cast(cast) => cast.fmt(f), Infix(infix) => infix.fmt(f), If(if_expr) => if_expr.fmt(f), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs index 33f504437c0..b6282da01d6 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs @@ -253,12 +253,6 @@ pub enum UnresolvedTypeExpression { AsTraitPath(Box), } -impl Recoverable for UnresolvedType { - fn error(span: Span) -> Self { - UnresolvedType { typ: UnresolvedTypeData::Error, span } - } -} - impl std::fmt::Display for GenericTypeArg { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/statement.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/statement.rs index 02715e8c2d3..88d1e97a96f 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/statement.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/statement.rs @@ -42,7 +42,6 @@ pub struct Statement { #[derive(Debug, PartialEq, Eq, Clone)] pub enum StatementKind { Let(LetStatement), - Constrain(ConstrainStatement), Expression(Expression), Assign(AssignStatement), For(ForLoopStatement), @@ -88,7 +87,6 @@ impl StatementKind { match self { StatementKind::Let(_) - | StatementKind::Constrain(_) | StatementKind::Assign(_) | StatementKind::Semi(_) | StatementKind::Break @@ -140,12 +138,6 @@ impl StatementKind { } } -impl Recoverable for StatementKind { - fn error(_: Span) -> Self { - StatementKind::Error - } -} - impl StatementKind { pub fn new_let( pattern: Pattern, @@ -284,25 +276,6 @@ impl Ident { } } -impl Recoverable for Ident { - fn error(span: Span) -> Self { - Ident(Spanned::from(span, ERROR_IDENT.to_owned())) - } -} - -impl Recoverable for Vec { - fn error(_: Span) -> Self { - vec![] - } -} - -/// Trait for recoverable nodes during parsing. -/// This is similar to Default but is expected -/// to return an Error node of the appropriate type. -pub trait Recoverable { - fn error(span: Span) -> Self; -} - #[derive(Debug, PartialEq, Eq, Clone)] pub struct ModuleDeclaration { pub visibility: ItemVisibility, @@ -420,9 +393,6 @@ pub struct TypePath { pub turbofish: Option, } -// Note: Path deliberately doesn't implement Recoverable. -// No matter which default value we could give in Recoverable::error, -// it would most likely cause further errors during name resolution #[derive(Debug, PartialEq, Eq, Clone, Hash)] pub struct Path { pub segments: Vec, @@ -593,55 +563,6 @@ pub enum LValue { Interned(InternedExpressionKind, Span), } -#[derive(Debug, PartialEq, Eq, Clone)] -pub struct ConstrainStatement { - pub kind: ConstrainKind, - pub arguments: Vec, - pub span: Span, -} - -impl Display for ConstrainStatement { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self.kind { - ConstrainKind::Assert | ConstrainKind::AssertEq => write!( - f, - "{}({})", - self.kind, - vecmap(&self.arguments, |arg| arg.to_string()).join(", ") - ), - ConstrainKind::Constrain => { - write!(f, "constrain {}", &self.arguments[0]) - } - } - } -} - -#[derive(Debug, PartialEq, Eq, Clone, Copy)] -pub enum ConstrainKind { - Assert, - AssertEq, - Constrain, -} - -impl ConstrainKind { - pub fn required_arguments_count(&self) -> usize { - match self { - ConstrainKind::Assert | ConstrainKind::Constrain => 1, - ConstrainKind::AssertEq => 2, - } - } -} - -impl Display for ConstrainKind { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - ConstrainKind::Assert => write!(f, "assert"), - ConstrainKind::AssertEq => write!(f, "assert_eq"), - ConstrainKind::Constrain => write!(f, "constrain"), - } - } -} - #[derive(Debug, PartialEq, Eq, Clone)] pub enum Pattern { Identifier(Ident), @@ -707,12 +628,6 @@ impl Pattern { } } -impl Recoverable for Pattern { - fn error(span: Span) -> Self { - Pattern::Identifier(Ident::error(span)) - } -} - impl LValue { pub fn as_expression(&self) -> Expression { let kind = match self { @@ -969,7 +884,6 @@ impl Display for StatementKind { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { StatementKind::Let(let_statement) => let_statement.fmt(f), - StatementKind::Constrain(constrain) => constrain.fmt(f), StatementKind::Expression(expression) => expression.fmt(f), StatementKind::Assign(assign) => assign.fmt(f), StatementKind::For(for_loop) => for_loop.fmt(f), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs index a43bd0a5d3d..e40c534c3b9 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs @@ -4,7 +4,7 @@ use noirc_errors::Span; use crate::{ ast::{ ArrayLiteral, AsTraitPath, AssignStatement, BlockExpression, CallExpression, - CastExpression, ConstrainStatement, ConstructorExpression, Expression, ExpressionKind, + CastExpression, ConstrainExpression, ConstructorExpression, Expression, ExpressionKind, ForLoopStatement, ForRange, Ident, IfExpression, IndexExpression, InfixExpression, LValue, Lambda, LetStatement, Literal, MemberAccessExpression, MethodCallExpression, ModuleDeclaration, NoirFunction, NoirStruct, NoirTrait, NoirTraitImpl, NoirTypeAlias, Path, @@ -294,7 +294,7 @@ pub trait Visitor { true } - fn visit_constrain_statement(&mut self, _: &ConstrainStatement) -> bool { + fn visit_constrain_statement(&mut self, _: &ConstrainExpression) -> bool { true } @@ -855,6 +855,9 @@ impl Expression { ExpressionKind::MethodCall(method_call_expression) => { method_call_expression.accept(self.span, visitor); } + ExpressionKind::Constrain(constrain) => { + constrain.accept(visitor); + } ExpressionKind::Constructor(constructor_expression) => { constructor_expression.accept(self.span, visitor); } @@ -1148,9 +1151,6 @@ impl Statement { StatementKind::Let(let_statement) => { let_statement.accept(visitor); } - StatementKind::Constrain(constrain_statement) => { - constrain_statement.accept(visitor); - } StatementKind::Expression(expression) => { expression.accept(visitor); } @@ -1199,7 +1199,7 @@ impl LetStatement { } } -impl ConstrainStatement { +impl ConstrainExpression { pub fn accept(&self, visitor: &mut impl Visitor) { if visitor.visit_constrain_statement(self) { self.accept_children(visitor); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs index 16278995104..8bee7241d43 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs @@ -1,15 +1,15 @@ use acvm::{AcirField, FieldElement}; use iter_extended::vecmap; -use noirc_errors::{Location, Span}; +use noirc_errors::{Location, Span, Spanned}; use rustc_hash::FxHashSet as HashSet; use crate::{ ast::{ - ArrayLiteral, BlockExpression, CallExpression, CastExpression, ConstructorExpression, - Expression, ExpressionKind, Ident, IfExpression, IndexExpression, InfixExpression, - ItemVisibility, Lambda, Literal, MatchExpression, MemberAccessExpression, - MethodCallExpression, Path, PathSegment, PrefixExpression, StatementKind, UnaryOp, - UnresolvedTypeData, UnresolvedTypeExpression, + ArrayLiteral, BinaryOpKind, BlockExpression, CallExpression, CastExpression, + ConstrainExpression, ConstrainKind, ConstructorExpression, Expression, ExpressionKind, + Ident, IfExpression, IndexExpression, InfixExpression, ItemVisibility, Lambda, Literal, + MatchExpression, MemberAccessExpression, MethodCallExpression, Path, PathSegment, + PrefixExpression, StatementKind, UnaryOp, UnresolvedTypeData, UnresolvedTypeExpression, }, hir::{ comptime::{self, InterpreterError}, @@ -21,9 +21,9 @@ use crate::{ hir_def::{ expr::{ HirArrayLiteral, HirBinaryOp, HirBlockExpression, HirCallExpression, HirCastExpression, - HirConstructorExpression, HirExpression, HirIdent, HirIfExpression, HirIndexExpression, - HirInfixExpression, HirLambda, HirLiteral, HirMemberAccess, HirMethodCallExpression, - HirPrefixExpression, + HirConstrainExpression, HirConstructorExpression, HirExpression, HirIdent, + HirIfExpression, HirIndexExpression, HirInfixExpression, HirLambda, HirLiteral, + HirMemberAccess, HirMethodCallExpression, HirPrefixExpression, }, stmt::HirStatement, traits::{ResolvedTraitBound, TraitConstraint}, @@ -37,31 +37,44 @@ use super::{Elaborator, LambdaContext, UnsafeBlockStatus}; impl<'context> Elaborator<'context> { pub(crate) fn elaborate_expression(&mut self, expr: Expression) -> (ExprId, Type) { + self.elaborate_expression_with_target_type(expr, None) + } + + pub(crate) fn elaborate_expression_with_target_type( + &mut self, + expr: Expression, + target_type: Option<&Type>, + ) -> (ExprId, Type) { let (hir_expr, typ) = match expr.kind { ExpressionKind::Literal(literal) => self.elaborate_literal(literal, expr.span), - ExpressionKind::Block(block) => self.elaborate_block(block), + ExpressionKind::Block(block) => self.elaborate_block(block, target_type), ExpressionKind::Prefix(prefix) => return self.elaborate_prefix(*prefix, expr.span), ExpressionKind::Index(index) => self.elaborate_index(*index), ExpressionKind::Call(call) => self.elaborate_call(*call, expr.span), ExpressionKind::MethodCall(call) => self.elaborate_method_call(*call, expr.span), + ExpressionKind::Constrain(constrain) => self.elaborate_constrain(constrain), ExpressionKind::Constructor(constructor) => self.elaborate_constructor(*constructor), ExpressionKind::MemberAccess(access) => { return self.elaborate_member_access(*access, expr.span) } ExpressionKind::Cast(cast) => self.elaborate_cast(*cast, expr.span), ExpressionKind::Infix(infix) => return self.elaborate_infix(*infix, expr.span), - ExpressionKind::If(if_) => self.elaborate_if(*if_), + ExpressionKind::If(if_) => self.elaborate_if(*if_, target_type), ExpressionKind::Match(match_) => self.elaborate_match(*match_), ExpressionKind::Variable(variable) => return self.elaborate_variable(variable), - ExpressionKind::Tuple(tuple) => self.elaborate_tuple(tuple), - ExpressionKind::Lambda(lambda) => self.elaborate_lambda(*lambda, None), - ExpressionKind::Parenthesized(expr) => return self.elaborate_expression(*expr), + ExpressionKind::Tuple(tuple) => self.elaborate_tuple(tuple, target_type), + ExpressionKind::Lambda(lambda) => { + self.elaborate_lambda_with_target_type(*lambda, target_type) + } + ExpressionKind::Parenthesized(expr) => { + return self.elaborate_expression_with_target_type(*expr, target_type) + } ExpressionKind::Quote(quote) => self.elaborate_quote(quote, expr.span), ExpressionKind::Comptime(comptime, _) => { - return self.elaborate_comptime_block(comptime, expr.span) + return self.elaborate_comptime_block(comptime, expr.span, target_type) } ExpressionKind::Unsafe(block_expression, span) => { - self.elaborate_unsafe_block(block_expression, span) + self.elaborate_unsafe_block(block_expression, span, target_type) } ExpressionKind::Resolved(id) => return (id, self.interner.id_type(id)), ExpressionKind::Interned(id) => { @@ -112,18 +125,29 @@ impl<'context> Elaborator<'context> { } } - pub(super) fn elaborate_block(&mut self, block: BlockExpression) -> (HirExpression, Type) { - let (block, typ) = self.elaborate_block_expression(block); + pub(super) fn elaborate_block( + &mut self, + block: BlockExpression, + target_type: Option<&Type>, + ) -> (HirExpression, Type) { + let (block, typ) = self.elaborate_block_expression(block, target_type); (HirExpression::Block(block), typ) } - fn elaborate_block_expression(&mut self, block: BlockExpression) -> (HirBlockExpression, Type) { + fn elaborate_block_expression( + &mut self, + block: BlockExpression, + target_type: Option<&Type>, + ) -> (HirBlockExpression, Type) { self.push_scope(); let mut block_type = Type::Unit; - let mut statements = Vec::with_capacity(block.statements.len()); + let statements_len = block.statements.len(); + let mut statements = Vec::with_capacity(statements_len); for (i, statement) in block.statements.into_iter().enumerate() { - let (id, stmt_type) = self.elaborate_statement(statement); + let statement_target_type = if i == statements_len - 1 { target_type } else { None }; + let (id, stmt_type) = + self.elaborate_statement_with_target_type(statement, statement_target_type); statements.push(id); if let HirStatement::Semi(expr) = self.interner.statement(&id) { @@ -149,6 +173,7 @@ impl<'context> Elaborator<'context> { &mut self, block: BlockExpression, span: Span, + target_type: Option<&Type>, ) -> (HirExpression, Type) { // Before entering the block we cache the old value of `in_unsafe_block` so it can be restored. let old_in_unsafe_block = self.unsafe_block_status; @@ -161,7 +186,7 @@ impl<'context> Elaborator<'context> { self.unsafe_block_status = UnsafeBlockStatus::InUnsafeBlockWithoutUnconstrainedCalls; - let (hir_block_expression, typ) = self.elaborate_block_expression(block); + let (hir_block_expression, typ) = self.elaborate_block_expression(block, target_type); if let UnsafeBlockStatus::InUnsafeBlockWithoutUnconstrainedCalls = self.unsafe_block_status { @@ -559,6 +584,61 @@ impl<'context> Elaborator<'context> { } } + pub(super) fn elaborate_constrain( + &mut self, + mut expr: ConstrainExpression, + ) -> (HirExpression, Type) { + let span = expr.span; + let min_args_count = expr.kind.required_arguments_count(); + let max_args_count = min_args_count + 1; + let actual_args_count = expr.arguments.len(); + + let (message, expr) = if !(min_args_count..=max_args_count).contains(&actual_args_count) { + self.push_err(TypeCheckError::AssertionParameterCountMismatch { + kind: expr.kind, + found: actual_args_count, + span, + }); + + // Given that we already produced an error, let's make this an `assert(true)` so + // we don't get further errors. + let message = None; + let kind = ExpressionKind::Literal(crate::ast::Literal::Bool(true)); + let expr = Expression { kind, span }; + (message, expr) + } else { + let message = + (actual_args_count != min_args_count).then(|| expr.arguments.pop().unwrap()); + let expr = match expr.kind { + ConstrainKind::Assert | ConstrainKind::Constrain => expr.arguments.pop().unwrap(), + ConstrainKind::AssertEq => { + let rhs = expr.arguments.pop().unwrap(); + let lhs = expr.arguments.pop().unwrap(); + let span = Span::from(lhs.span.start()..rhs.span.end()); + let operator = Spanned::from(span, BinaryOpKind::Equal); + let kind = + ExpressionKind::Infix(Box::new(InfixExpression { lhs, operator, rhs })); + Expression { kind, span } + } + }; + (message, expr) + }; + + let expr_span = expr.span; + let (expr_id, expr_type) = self.elaborate_expression(expr); + + // Must type check the assertion message expression so that we instantiate bindings + let msg = message.map(|assert_msg_expr| self.elaborate_expression(assert_msg_expr).0); + + self.unify(&expr_type, &Type::Bool, || TypeCheckError::TypeMismatch { + expr_typ: expr_type.to_string(), + expected_typ: Type::Bool.to_string(), + expr_span, + }); + + (HirExpression::Constrain(HirConstrainExpression(expr_id, self.file, msg)), Type::Unit) + } + /// Elaborates an expression knowing that it has to match a given type. fn elaborate_expression_with_type( &mut self, @@ -572,7 +652,7 @@ impl<'context> Elaborator<'context> { let span = arg.span; let type_hint = if let Some(Type::Function(func_args, _, _, _)) = typ { Some(func_args) } else { None }; - let (hir_expr, typ) = self.elaborate_lambda(*lambda, type_hint); + let (hir_expr, typ) = self.elaborate_lambda_with_parameter_type_hints(*lambda, type_hint); let id = self.interner.push_expr(hir_expr); self.interner.push_expr_location(id, span, self.file); self.interner.push_expr_type(id, typ.clone()); @@ -884,10 +964,16 @@ impl<'context> Elaborator<'context> { } } - fn elaborate_if(&mut self, if_expr: IfExpression) -> (HirExpression, Type) { + fn elaborate_if( + &mut self, + if_expr: IfExpression, + target_type: Option<&Type>, + ) -> (HirExpression, Type) { let expr_span = if_expr.condition.span; + let consequence_span = if_expr.consequence.span; let (condition, cond_type) = self.elaborate_expression(if_expr.condition); - let (consequence, mut ret_type) = self.elaborate_expression(if_expr.consequence); + let (consequence, mut ret_type) = + self.elaborate_expression_with_target_type(if_expr.consequence, target_type); self.unify(&cond_type, &Type::Bool, || TypeCheckError::TypeMismatch { expected_typ: Type::Bool.to_string(), @@ -895,28 +981,30 @@ impl<'context> Elaborator<'context> { expr_span, }); - let alternative = if_expr.alternative.map(|alternative| { - let expr_span = alternative.span; - let (else_, else_type) = self.elaborate_expression(alternative); + let (alternative, else_type, error_span) = if let Some(alternative) = if_expr.alternative { + let (else_, else_type) = + self.elaborate_expression_with_target_type(alternative, target_type); + (Some(else_), else_type, expr_span) + } else { + (None, Type::Unit, consequence_span) + }; - self.unify(&ret_type, &else_type, || { - let err = TypeCheckError::TypeMismatch { - expected_typ: ret_type.to_string(), - expr_typ: else_type.to_string(), - expr_span, - }; + self.unify(&ret_type, &else_type, || { + let err = TypeCheckError::TypeMismatch { + expected_typ: ret_type.to_string(), + expr_typ: else_type.to_string(), + expr_span: error_span, + }; - let context = if ret_type == Type::Unit { - "Are you missing a semicolon at the end of your 'else' branch?" - } else if else_type == Type::Unit { - "Are you missing a semicolon at the end of the first block of this 'if'?" - } else { - "Expected the types of both if branches to be equal" - }; + let context = if ret_type == Type::Unit { + "Are you missing a semicolon at the end of your 'else' branch?" + } else if else_type == Type::Unit { + "Are you missing a semicolon at the end of the first block of this 'if'?" + } else { + "Expected the types of both if branches to be equal" + }; - err.add_context(context) - }); - else_ + err.add_context(context) }); if alternative.is_none() { @@ -931,12 +1019,19 @@ impl<'context> Elaborator<'context> { (HirExpression::Error, Type::Error) } - fn elaborate_tuple(&mut self, tuple: Vec) -> (HirExpression, Type) { + fn elaborate_tuple( + &mut self, + tuple: Vec, + target_type: Option<&Type>, + ) -> (HirExpression, Type) { let mut element_ids = Vec::with_capacity(tuple.len()); let mut element_types = Vec::with_capacity(tuple.len()); - for element in tuple { - let (id, typ) = self.elaborate_expression(element); + for (index, element) in tuple.into_iter().enumerate() { + let target_type = target_type.map(|typ| typ.follow_bindings()); + let expr_target_type = + if let Some(Type::Tuple(types)) = &target_type { types.get(index) } else { None }; + let (id, typ) = self.elaborate_expression_with_target_type(element, expr_target_type); element_ids.push(id); element_types.push(typ); } @@ -944,10 +1039,24 @@ impl<'context> Elaborator<'context> { (HirExpression::Tuple(element_ids), Type::Tuple(element_types)) } + fn elaborate_lambda_with_target_type( + &mut self, + lambda: Lambda, + target_type: Option<&Type>, + ) -> (HirExpression, Type) { + let target_type = target_type.map(|typ| typ.follow_bindings()); + + if let Some(Type::Function(args, _, _, _)) = target_type { + return self.elaborate_lambda_with_parameter_type_hints(lambda, Some(&args)); + } + + self.elaborate_lambda_with_parameter_type_hints(lambda, None) + } + /// For elaborating a lambda we might get `parameters_type_hints`. These come from a potential /// call that has this lambda as the argument. /// The parameter type hints will be the types of the function type corresponding to the lambda argument. - fn elaborate_lambda( + fn elaborate_lambda_with_parameter_type_hints( &mut self, lambda: Lambda, parameters_type_hints: Option<&Vec>, @@ -1013,9 +1122,15 @@ impl<'context> Elaborator<'context> { } } - fn elaborate_comptime_block(&mut self, block: BlockExpression, span: Span) -> (ExprId, Type) { - let (block, _typ) = - self.elaborate_in_comptime_context(|this| this.elaborate_block_expression(block)); + fn elaborate_comptime_block( + &mut self, + block: BlockExpression, + span: Span, + target_type: Option<&Type>, + ) -> (ExprId, Type) { + let (block, _typ) = self.elaborate_in_comptime_context(|this| { + this.elaborate_block_expression(block, target_type) + }); let mut interpreter = self.setup_interpreter(); let value = interpreter.evaluate_block(block); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/lints.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/lints.rs index af80dfaa823..7910d8cebdb 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/lints.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/lints.rs @@ -283,8 +283,7 @@ fn can_return_without_recursing(interner: &NodeInterner, func_id: FuncId, expr_i // Rust doesn't seem to check the for loop body (it's bounds might mean it's never called). HirStatement::For(e) => check(e.start_range) && check(e.end_range), HirStatement::Loop(e) => check(e), - HirStatement::Constrain(_) - | HirStatement::Comptime(_) + HirStatement::Comptime(_) | HirStatement::Break | HirStatement::Continue | HirStatement::Error => true, @@ -310,6 +309,7 @@ fn can_return_without_recursing(interner: &NodeInterner, func_id: FuncId, expr_i HirExpression::MemberAccess(e) => check(e.lhs), HirExpression::Call(e) => check(e.func) && e.arguments.iter().cloned().all(check), HirExpression::MethodCall(e) => check(e.object) && e.arguments.iter().cloned().all(check), + HirExpression::Constrain(e) => check(e.0) && e.2.map(check).unwrap_or(true), HirExpression::Cast(e) => check(e.lhs), HirExpression::If(e) => { check(e.condition) && (check(e.consequence) || e.alternative.map(check).unwrap_or(true)) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs index c895f87ef88..a8e722a9205 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs @@ -500,7 +500,8 @@ impl<'context> Elaborator<'context> { | FunctionKind::Oracle | FunctionKind::TraitFunctionWithoutBody => (HirFunction::empty(), Type::Error), FunctionKind::Normal => { - let (block, body_type) = self.elaborate_block(body); + let return_type = func_meta.return_type(); + let (block, body_type) = self.elaborate_block(body, Some(return_type)); let expr_id = self.intern_expr(block, body_span); self.interner.push_expr_type(expr_id, body_type.clone()); (HirFunction::unchecked_from_expr(expr_id), body_type) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs index a95e260b6a5..c401646332f 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs @@ -1,9 +1,8 @@ -use noirc_errors::{Location, Span, Spanned}; +use noirc_errors::{Location, Span}; use crate::{ ast::{ - AssignStatement, BinaryOpKind, ConstrainKind, ConstrainStatement, Expression, - ExpressionKind, ForLoopStatement, ForRange, Ident, InfixExpression, ItemVisibility, LValue, + AssignStatement, Expression, ForLoopStatement, ForRange, Ident, ItemVisibility, LValue, LetStatement, Path, Statement, StatementKind, }, hir::{ @@ -15,10 +14,7 @@ use crate::{ }, hir_def::{ expr::HirIdent, - stmt::{ - HirAssignStatement, HirConstrainStatement, HirForStatement, HirLValue, HirLetStatement, - HirStatement, - }, + stmt::{HirAssignStatement, HirForStatement, HirLValue, HirLetStatement, HirStatement}, }, node_interner::{DefinitionId, DefinitionKind, GlobalId, StmtId}, DataType, Type, @@ -28,9 +24,16 @@ use super::{lints, Elaborator, Loop}; impl<'context> Elaborator<'context> { fn elaborate_statement_value(&mut self, statement: Statement) -> (HirStatement, Type) { + self.elaborate_statement_value_with_target_type(statement, None) + } + + fn elaborate_statement_value_with_target_type( + &mut self, + statement: Statement, + target_type: Option<&Type>, + ) -> (HirStatement, Type) { match statement.kind { StatementKind::Let(let_stmt) => self.elaborate_local_let(let_stmt), - StatementKind::Constrain(constrain) => self.elaborate_constrain(constrain), StatementKind::Assign(assign) => self.elaborate_assign(assign), StatementKind::For(for_stmt) => self.elaborate_for(for_stmt), StatementKind::Loop(block, span) => self.elaborate_loop(block, span), @@ -38,7 +41,7 @@ impl<'context> Elaborator<'context> { StatementKind::Continue => self.elaborate_jump(false, statement.span), StatementKind::Comptime(statement) => self.elaborate_comptime_statement(*statement), StatementKind::Expression(expr) => { - let (expr, typ) = self.elaborate_expression(expr); + let (expr, typ) = self.elaborate_expression_with_target_type(expr, target_type); (HirStatement::Expression(expr), typ) } StatementKind::Semi(expr) => { @@ -48,15 +51,24 @@ impl<'context> Elaborator<'context> { StatementKind::Interned(id) => { let kind = self.interner.get_statement_kind(id); let statement = Statement { kind: kind.clone(), span: statement.span }; - self.elaborate_statement_value(statement) + self.elaborate_statement_value_with_target_type(statement, target_type) } StatementKind::Error => (HirStatement::Error, Type::Error), } } pub(crate) fn elaborate_statement(&mut self, statement: Statement) -> (StmtId, Type) { + self.elaborate_statement_with_target_type(statement, None) + } + + pub(crate) fn elaborate_statement_with_target_type( + &mut self, + statement: Statement, + target_type: Option<&Type>, + ) -> (StmtId, Type) { let span = statement.span; - let (hir_statement, typ) = self.elaborate_statement_value(statement); + let (hir_statement, typ) = + self.elaborate_statement_value_with_target_type(statement, target_type); let id = self.interner.push_stmt(hir_statement); self.interner.push_stmt_location(id, span, self.file); (id, typ) @@ -75,12 +87,13 @@ impl<'context> Elaborator<'context> { let_stmt: LetStatement, global_id: Option, ) -> (HirStatement, Type) { - let expr_span = let_stmt.expression.span; - let (expression, expr_type) = self.elaborate_expression(let_stmt.expression); - let type_contains_unspecified = let_stmt.r#type.contains_unspecified(); let annotated_type = self.resolve_inferred_type(let_stmt.r#type); + let expr_span = let_stmt.expression.span; + let (expression, expr_type) = + self.elaborate_expression_with_target_type(let_stmt.expression, Some(&annotated_type)); + // Require the top-level of a global's type to be fully-specified if type_contains_unspecified && global_id.is_some() { let span = expr_span; @@ -131,61 +144,6 @@ impl<'context> Elaborator<'context> { (HirStatement::Let(let_), Type::Unit) } - pub(super) fn elaborate_constrain( - &mut self, - mut stmt: ConstrainStatement, - ) -> (HirStatement, Type) { - let span = stmt.span; - let min_args_count = stmt.kind.required_arguments_count(); - let max_args_count = min_args_count + 1; - let actual_args_count = stmt.arguments.len(); - - let (message, expr) = if !(min_args_count..=max_args_count).contains(&actual_args_count) { - self.push_err(TypeCheckError::AssertionParameterCountMismatch { - kind: stmt.kind, - found: actual_args_count, - span, - }); - - // Given that we already produced an error, let's make this an `assert(true)` so - // we don't get further errors. - let message = None; - let kind = ExpressionKind::Literal(crate::ast::Literal::Bool(true)); - let expr = Expression { kind, span }; - (message, expr) - } else { - let message = - (actual_args_count != min_args_count).then(|| stmt.arguments.pop().unwrap()); - let expr = match stmt.kind { - ConstrainKind::Assert | ConstrainKind::Constrain => stmt.arguments.pop().unwrap(), - ConstrainKind::AssertEq => { - let rhs = stmt.arguments.pop().unwrap(); - let lhs = stmt.arguments.pop().unwrap(); - let span = Span::from(lhs.span.start()..rhs.span.end()); - let operator = Spanned::from(span, BinaryOpKind::Equal); - let kind = - ExpressionKind::Infix(Box::new(InfixExpression { lhs, operator, rhs })); - Expression { kind, span } - } - }; - (message, expr) - }; - - let expr_span = expr.span; - let (expr_id, expr_type) = self.elaborate_expression(expr); - - // Must type check the assertion message expression so that we instantiate bindings - let msg = message.map(|assert_msg_expr| self.elaborate_expression(assert_msg_expr).0); - - self.unify(&expr_type, &Type::Bool, || TypeCheckError::TypeMismatch { - expr_typ: expr_type.to_string(), - expected_typ: Type::Bool.to_string(), - expr_span, - }); - - (HirStatement::Constrain(HirConstrainStatement(expr_id, self.file, msg)), Type::Unit) - } - pub(super) fn elaborate_assign(&mut self, assign: AssignStatement) -> (HirStatement, Type) { let expr_span = assign.expression.span; let (expression, expr_type) = self.elaborate_expression(assign.expression); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs index 1be4bbe61ab..a6927ab3fe8 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs @@ -6,7 +6,7 @@ use noirc_errors::Span; use crate::{ ast::{ ArrayLiteral, AsTraitPath, AssignStatement, BlockExpression, CallExpression, - CastExpression, ConstrainStatement, ConstructorExpression, Expression, ExpressionKind, + CastExpression, ConstrainExpression, ConstructorExpression, Expression, ExpressionKind, ForBounds, ForLoopStatement, ForRange, GenericTypeArgs, IfExpression, IndexExpression, InfixExpression, LValue, Lambda, LetStatement, Literal, MatchExpression, MemberAccessExpression, MethodCallExpression, Pattern, PrefixExpression, Statement, @@ -573,6 +573,12 @@ fn remove_interned_in_expression_kind( ..*call })) } + ExpressionKind::Constrain(constrain) => ExpressionKind::Constrain(ConstrainExpression { + arguments: vecmap(constrain.arguments, |expr| { + remove_interned_in_expression(interner, expr) + }), + ..constrain + }), ExpressionKind::Constructor(constructor) => { ExpressionKind::Constructor(Box::new(ConstructorExpression { fields: vecmap(constructor.fields, |(name, expr)| { @@ -728,12 +734,6 @@ fn remove_interned_in_statement_kind( r#type: remove_interned_in_unresolved_type(interner, let_statement.r#type), ..let_statement }), - StatementKind::Constrain(constrain) => StatementKind::Constrain(ConstrainStatement { - arguments: vecmap(constrain.arguments, |expr| { - remove_interned_in_expression(interner, expr) - }), - ..constrain - }), StatementKind::Expression(expr) => { StatementKind::Expression(remove_interned_in_expression(interner, expr)) } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs index d46484d05fa..3ba7ae42950 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs @@ -8,7 +8,7 @@ use crate::ast::{ MemberAccessExpression, MethodCallExpression, Path, PathKind, PathSegment, Pattern, PrefixExpression, UnresolvedType, UnresolvedTypeData, UnresolvedTypeExpression, }; -use crate::ast::{ConstrainStatement, Expression, Statement, StatementKind}; +use crate::ast::{ConstrainExpression, Expression, Statement, StatementKind}; use crate::hir_def::expr::{ HirArrayLiteral, HirBlockExpression, HirExpression, HirIdent, HirLiteral, }; @@ -32,20 +32,6 @@ impl HirStatement { let expression = let_stmt.expression.to_display_ast(interner); StatementKind::new_let(pattern, r#type, expression, let_stmt.attributes.clone()) } - HirStatement::Constrain(constrain) => { - let expr = constrain.0.to_display_ast(interner); - let mut arguments = vec![expr]; - if let Some(message) = constrain.2 { - arguments.push(message.to_display_ast(interner)); - } - - // TODO: Find difference in usage between Assert & AssertEq - StatementKind::Constrain(ConstrainStatement { - kind: ConstrainKind::Assert, - arguments, - span, - }) - } HirStatement::Assign(assign) => StatementKind::Assign(AssignStatement { lvalue: assign.lvalue.to_display_ast(interner), expression: assign.expression.to_display_ast(interner), @@ -180,6 +166,20 @@ impl HirExpression { is_macro_call: false, })) } + HirExpression::Constrain(constrain) => { + let expr = constrain.0.to_display_ast(interner); + let mut arguments = vec![expr]; + if let Some(message) = constrain.2 { + arguments.push(message.to_display_ast(interner)); + } + + // TODO: Find difference in usage between Assert & AssertEq + ExpressionKind::Constrain(ConstrainExpression { + kind: ConstrainKind::Assert, + arguments, + span, + }) + } HirExpression::Cast(cast) => { let lhs = cast.lhs.to_display_ast(interner); let r#type = cast.r#type.to_display_ast(); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs index 33f8e43863e..5f001192dac 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs @@ -14,7 +14,7 @@ use crate::elaborator::Elaborator; use crate::graph::CrateId; use crate::hir::def_map::ModuleId; use crate::hir::type_check::TypeCheckError; -use crate::hir_def::expr::{HirEnumConstructorExpression, ImplKind}; +use crate::hir_def::expr::{HirConstrainExpression, HirEnumConstructorExpression, ImplKind}; use crate::hir_def::function::FunctionBody; use crate::monomorphization::{ perform_impl_bindings, perform_instantiation_bindings, resolve_trait_method, @@ -32,8 +32,8 @@ use crate::{ HirPrefixExpression, }, stmt::{ - HirAssignStatement, HirConstrainStatement, HirForStatement, HirLValue, HirLetStatement, - HirPattern, HirStatement, + HirAssignStatement, HirForStatement, HirLValue, HirLetStatement, HirPattern, + HirStatement, }, types::Kind, }, @@ -532,6 +532,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { HirExpression::MemberAccess(access) => self.evaluate_access(access, id), HirExpression::Call(call) => self.evaluate_call(call, id), HirExpression::MethodCall(call) => self.evaluate_method_call(call, id), + HirExpression::Constrain(constrain) => self.evaluate_constrain(constrain), HirExpression::Cast(cast) => self.evaluate_cast(&cast, id), HirExpression::If(if_) => self.evaluate_if(if_, id), HirExpression::Tuple(tuple) => self.evaluate_tuple(tuple), @@ -1560,7 +1561,6 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { pub fn evaluate_statement(&mut self, statement: StmtId) -> IResult { match self.elaborator.interner.statement(&statement) { HirStatement::Let(let_) => self.evaluate_let(let_), - HirStatement::Constrain(constrain) => self.evaluate_constrain(constrain), HirStatement::Assign(assign) => self.evaluate_assign(assign), HirStatement::For(for_) => self.evaluate_for(for_), HirStatement::Loop(expression) => self.evaluate_loop(expression), @@ -1586,7 +1586,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { Ok(Value::Unit) } - fn evaluate_constrain(&mut self, constrain: HirConstrainStatement) -> IResult { + fn evaluate_constrain(&mut self, constrain: HirConstrainExpression) -> IResult { match self.evaluate(constrain.0)? { Value::Bool(true) => Ok(Value::Unit), Value::Bool(false) => { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs index 9abb1b190d5..6655c8977e2 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs @@ -1540,7 +1540,7 @@ fn expr_as_assert( location: Location, ) -> IResult { expr_as(interner, arguments, return_type.clone(), location, |expr| { - if let ExprValue::Statement(StatementKind::Constrain(mut constrain)) = expr { + if let ExprValue::Expression(ExpressionKind::Constrain(mut constrain)) = expr { if constrain.kind == ConstrainKind::Assert && !constrain.arguments.is_empty() && constrain.arguments.len() <= 2 @@ -1580,7 +1580,7 @@ fn expr_as_assert_eq( location: Location, ) -> IResult { expr_as(interner, arguments, return_type.clone(), location, |expr| { - if let ExprValue::Statement(StatementKind::Constrain(mut constrain)) = expr { + if let ExprValue::Expression(ExpressionKind::Constrain(mut constrain)) = expr { if constrain.kind == ConstrainKind::AssertEq && constrain.arguments.len() >= 2 && constrain.arguments.len() <= 3 diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs index 00b94411fcd..543c13fac9c 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs @@ -34,6 +34,7 @@ pub enum HirExpression { MemberAccess(HirMemberAccess), Call(HirCallExpression), MethodCall(HirMethodCallExpression), + Constrain(HirConstrainExpression), Cast(HirCastExpression), If(HirIfExpression), Tuple(Vec), @@ -200,6 +201,13 @@ pub struct HirMethodCallExpression { pub location: Location, } +/// Corresponds to `assert` and `assert_eq` in the source code. +/// This node also contains the FileId of the file the constrain +/// originates from. This is used later in the SSA pass to issue +/// an error if a constrain is found to be always false. +#[derive(Debug, Clone)] +pub struct HirConstrainExpression(pub ExprId, pub FileId, pub Option); + #[derive(Debug, Clone)] pub enum HirMethodReference { /// A method can be defined in a regular `impl` block, in which case diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/stmt.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/stmt.rs index 8a580e735b1..96ef7161341 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/stmt.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/stmt.rs @@ -3,7 +3,6 @@ use crate::ast::Ident; use crate::node_interner::{ExprId, StmtId}; use crate::token::SecondaryAttribute; use crate::Type; -use fm::FileId; use noirc_errors::{Location, Span}; /// A HirStatement is the result of performing name resolution on @@ -13,7 +12,6 @@ use noirc_errors::{Location, Span}; #[derive(Debug, Clone)] pub enum HirStatement { Let(HirLetStatement), - Constrain(HirConstrainStatement), Assign(HirAssignStatement), For(HirForStatement), Loop(ExprId), @@ -74,13 +72,6 @@ pub struct HirAssignStatement { pub expression: ExprId, } -/// Corresponds to `constrain expr;` in the source code. -/// This node also contains the FileId of the file the constrain -/// originates from. This is used later in the SSA pass to issue -/// an error if a constrain is found to be always false. -#[derive(Debug, Clone)] -pub struct HirConstrainStatement(pub ExprId, pub FileId, pub Option); - #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub enum HirPattern { Identifier(HirIdent), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs index 7ad703523d4..5d81913f4ec 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -557,6 +557,22 @@ impl<'interner> Monomorphizer<'interner> { HirExpression::Call(call) => self.function_call(call, expr)?, + HirExpression::Constrain(constrain) => { + let expr = self.expr(constrain.0)?; + let location = self.interner.expr_location(&constrain.0); + let assert_message = constrain + .2 + .map(|assert_msg_expr| { + self.expr(assert_msg_expr).map(|expr| { + (expr, self.interner.id_type(assert_msg_expr).follow_bindings()) + }) + }) + .transpose()? + .map(Box::new); + + ast::Expression::Constrain(Box::new(expr), location, assert_message) + } + HirExpression::Cast(cast) => { let location = self.interner.expr_location(&expr); let typ = Self::convert_type(&cast.r#type, location)?; @@ -658,21 +674,6 @@ impl<'interner> Monomorphizer<'interner> { fn statement(&mut self, id: StmtId) -> Result { match self.interner.statement(&id) { HirStatement::Let(let_statement) => self.let_statement(let_statement), - HirStatement::Constrain(constrain) => { - let expr = self.expr(constrain.0)?; - let location = self.interner.expr_location(&constrain.0); - let assert_message = constrain - .2 - .map(|assert_msg_expr| { - self.expr(assert_msg_expr).map(|expr| { - (expr, self.interner.id_type(assert_msg_expr).follow_bindings()) - }) - }) - .transpose()? - .map(Box::new); - - Ok(ast::Expression::Constrain(Box::new(expr), location, assert_message)) - } HirStatement::Assign(assign) => self.assign(assign), HirStatement::For(for_loop) => { self.is_range_loop = true; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs index eff309154e3..319eefc190a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs @@ -3,9 +3,10 @@ use noirc_errors::Span; use crate::{ ast::{ - ArrayLiteral, BlockExpression, CallExpression, CastExpression, ConstructorExpression, - Expression, ExpressionKind, Ident, IfExpression, IndexExpression, Literal, MatchExpression, - MemberAccessExpression, MethodCallExpression, Statement, TypePath, UnaryOp, UnresolvedType, + ArrayLiteral, BlockExpression, CallExpression, CastExpression, ConstrainExpression, + ConstrainKind, ConstructorExpression, Expression, ExpressionKind, Ident, IfExpression, + IndexExpression, Literal, MatchExpression, MemberAccessExpression, MethodCallExpression, + Statement, TypePath, UnaryOp, UnresolvedType, }, parser::{labels::ParsingRuleLabel, parser::parse_many::separated_by_comma, ParserErrorReason}, token::{Keyword, Token, TokenKind}, @@ -653,6 +654,7 @@ impl<'a> Parser<'a> { /// | ArrayExpression /// | SliceExpression /// | BlockExpression + /// | ConstrainExpression /// /// QuoteExpression = 'quote' '{' token* '}' /// @@ -696,6 +698,10 @@ impl<'a> Parser<'a> { return Some(ExpressionKind::Block(kind)); } + if let Some(constrain) = self.parse_constrain_expression() { + return Some(ExpressionKind::Constrain(constrain)); + } + None } @@ -800,6 +806,49 @@ impl<'a> Parser<'a> { } } + /// ConstrainExpression + /// = 'constrain' Expression + /// | 'assert' Arguments + /// | 'assert_eq' Arguments + pub(super) fn parse_constrain_expression(&mut self) -> Option { + let start_span = self.current_token_span; + let kind = self.parse_constrain_kind()?; + + Some(match kind { + ConstrainKind::Assert | ConstrainKind::AssertEq => { + let arguments = self.parse_arguments(); + if arguments.is_none() { + self.expected_token(Token::LeftParen); + } + let arguments = arguments.unwrap_or_default(); + + ConstrainExpression { kind, arguments, span: self.span_since(start_span) } + } + ConstrainKind::Constrain => { + self.push_error(ParserErrorReason::ConstrainDeprecated, self.previous_token_span); + + let expression = self.parse_expression_or_error(); + ConstrainExpression { + kind, + arguments: vec![expression], + span: self.span_since(start_span), + } + } + }) + } + + fn parse_constrain_kind(&mut self) -> Option { + if self.eat_keyword(Keyword::Assert) { + Some(ConstrainKind::Assert) + } else if self.eat_keyword(Keyword::AssertEq) { + Some(ConstrainKind::AssertEq) + } else if self.eat_keyword(Keyword::Constrain) { + Some(ConstrainKind::Constrain) + } else { + None + } + } + /// Block = '{' Statement* '}' pub(super) fn parse_block(&mut self) -> Option { if !self.eat_left_brace() { @@ -849,8 +898,8 @@ mod tests { use crate::{ ast::{ - ArrayLiteral, BinaryOpKind, Expression, ExpressionKind, Literal, StatementKind, - UnaryOp, UnresolvedTypeData, + ArrayLiteral, BinaryOpKind, ConstrainKind, Expression, ExpressionKind, Literal, + StatementKind, UnaryOp, UnresolvedTypeData, }, parser::{ parser::tests::{ @@ -1749,4 +1798,45 @@ mod tests { }; assert_eq!(expr.kind.to_string(), "((1 + 2))"); } + + #[test] + fn parses_assert() { + let src = "assert(true, \"good\")"; + let expression = parse_expression_no_errors(src); + let ExpressionKind::Constrain(constrain) = expression.kind else { + panic!("Expected constrain expression"); + }; + assert_eq!(constrain.kind, ConstrainKind::Assert); + assert_eq!(constrain.arguments.len(), 2); + } + + #[test] + fn parses_assert_eq() { + let src = "assert_eq(1, 2, \"bad\")"; + let expression = parse_expression_no_errors(src); + let ExpressionKind::Constrain(constrain) = expression.kind else { + panic!("Expected constrain expression"); + }; + assert_eq!(constrain.kind, ConstrainKind::AssertEq); + assert_eq!(constrain.arguments.len(), 3); + } + + #[test] + fn parses_constrain() { + let src = " + constrain 1 + ^^^^^^^^^ + "; + let (src, span) = get_source_with_error_span(src); + let mut parser = Parser::for_str(&src); + let expression = parser.parse_expression_or_error(); + let ExpressionKind::Constrain(constrain) = expression.kind else { + panic!("Expected constrain expression"); + }; + assert_eq!(constrain.kind, ConstrainKind::Constrain); + assert_eq!(constrain.arguments.len(), 1); + + let reason = get_single_error_reason(&parser.errors, span); + assert!(matches!(reason, ParserErrorReason::ConstrainDeprecated)); + } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/item.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/item.rs index d928d8e82d3..43641935565 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/item.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/item.rs @@ -1,7 +1,7 @@ use iter_extended::vecmap; use crate::{ - parser::{labels::ParsingRuleLabel, Item, ItemKind}, + parser::{labels::ParsingRuleLabel, Item, ItemKind, ParserErrorReason}, token::{Keyword, Token}, }; @@ -94,6 +94,10 @@ impl<'a> Parser<'a> { let kinds = self.parse_item_kind(); let span = self.span_since(start_span); + if kinds.is_empty() && !doc_comments.is_empty() { + self.push_error(ParserErrorReason::DocCommentDoesNotDocumentAnything, start_span); + } + vecmap(kinds, |kind| Item { kind, span, doc_comments: doc_comments.clone() }) } @@ -260,4 +264,18 @@ mod tests { let error = get_single_error(&errors, span); assert_eq!(error.to_string(), "Expected a '}' but found end of input"); } + + #[test] + fn errors_on_trailing_doc_comment() { + let src = " + fn foo() {} + /// doc comment + ^^^^^^^^^^^^^^^ + "; + let (src, span) = get_source_with_error_span(src); + let (module, errors) = parse_program(&src); + assert_eq!(module.items.len(), 1); + let error = get_single_error(&errors, span); + assert!(error.to_string().contains("Documentation comment does not document anything")); + } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/statement.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/statement.rs index 37013e91528..f9cc63a364e 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/statement.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/statement.rs @@ -2,9 +2,9 @@ use noirc_errors::{Span, Spanned}; use crate::{ ast::{ - AssignStatement, BinaryOp, BinaryOpKind, ConstrainKind, ConstrainStatement, Expression, - ExpressionKind, ForBounds, ForLoopStatement, ForRange, Ident, InfixExpression, LValue, - LetStatement, Statement, StatementKind, + AssignStatement, BinaryOp, BinaryOpKind, Expression, ExpressionKind, ForBounds, + ForLoopStatement, ForRange, Ident, InfixExpression, LValue, LetStatement, Statement, + StatementKind, }, parser::{labels::ParsingRuleLabel, ParserErrorReason}, token::{Attribute, Keyword, Token, TokenKind}, @@ -89,7 +89,6 @@ impl<'a> Parser<'a> { /// | ContinueStatement /// | ReturnStatement /// | LetStatement - /// | ConstrainStatement /// | ComptimeStatement /// | ForStatement /// | LoopStatement @@ -145,10 +144,6 @@ impl<'a> Parser<'a> { return Some(StatementKind::Let(let_statement)); } - if let Some(constrain) = self.parse_constrain_statement() { - return Some(StatementKind::Constrain(constrain)); - } - if self.at_keyword(Keyword::Comptime) { return self.parse_comptime_statement(attributes); } @@ -432,58 +427,12 @@ impl<'a> Parser<'a> { is_global_let: false, }) } - - /// ConstrainStatement - /// = 'constrain' Expression - /// | 'assert' Arguments - /// | 'assert_eq' Arguments - fn parse_constrain_statement(&mut self) -> Option { - let start_span = self.current_token_span; - let kind = self.parse_constrain_kind()?; - - Some(match kind { - ConstrainKind::Assert | ConstrainKind::AssertEq => { - let arguments = self.parse_arguments(); - if arguments.is_none() { - self.expected_token(Token::LeftParen); - } - let arguments = arguments.unwrap_or_default(); - - ConstrainStatement { kind, arguments, span: self.span_since(start_span) } - } - ConstrainKind::Constrain => { - self.push_error(ParserErrorReason::ConstrainDeprecated, self.previous_token_span); - - let expression = self.parse_expression_or_error(); - ConstrainStatement { - kind, - arguments: vec![expression], - span: self.span_since(start_span), - } - } - }) - } - - fn parse_constrain_kind(&mut self) -> Option { - if self.eat_keyword(Keyword::Assert) { - Some(ConstrainKind::Assert) - } else if self.eat_keyword(Keyword::AssertEq) { - Some(ConstrainKind::AssertEq) - } else if self.eat_keyword(Keyword::Constrain) { - Some(ConstrainKind::Constrain) - } else { - None - } - } } #[cfg(test)] mod tests { use crate::{ - ast::{ - ConstrainKind, ExpressionKind, ForRange, LValue, Statement, StatementKind, - UnresolvedTypeData, - }, + ast::{ExpressionKind, ForRange, LValue, Statement, StatementKind, UnresolvedTypeData}, parser::{ parser::tests::{ expect_no_errors, get_single_error, get_single_error_reason, @@ -551,47 +500,6 @@ mod tests { assert_eq!(let_statement.pattern.to_string(), "x"); } - #[test] - fn parses_assert() { - let src = "assert(true, \"good\")"; - let statement = parse_statement_no_errors(src); - let StatementKind::Constrain(constrain) = statement.kind else { - panic!("Expected constrain statement"); - }; - assert_eq!(constrain.kind, ConstrainKind::Assert); - assert_eq!(constrain.arguments.len(), 2); - } - - #[test] - fn parses_assert_eq() { - let src = "assert_eq(1, 2, \"bad\")"; - let statement = parse_statement_no_errors(src); - let StatementKind::Constrain(constrain) = statement.kind else { - panic!("Expected constrain statement"); - }; - assert_eq!(constrain.kind, ConstrainKind::AssertEq); - assert_eq!(constrain.arguments.len(), 3); - } - - #[test] - fn parses_constrain() { - let src = " - constrain 1 - ^^^^^^^^^ - "; - let (src, span) = get_source_with_error_span(src); - let mut parser = Parser::for_str(&src); - let statement = parser.parse_statement_or_error(); - let StatementKind::Constrain(constrain) = statement.kind else { - panic!("Expected constrain statement"); - }; - assert_eq!(constrain.kind, ConstrainKind::Constrain); - assert_eq!(constrain.arguments.len(), 1); - - let reason = get_single_error_reason(&parser.errors, span); - assert!(matches!(reason, ParserErrorReason::ConstrainDeprecated)); - } - #[test] fn parses_comptime_block() { let src = "comptime { 1 }"; @@ -851,4 +759,15 @@ mod tests { }; assert_eq!(block.statements.len(), 2); } + + #[test] + fn parses_let_with_assert() { + let src = "let _ = assert(true);"; + let mut parser = Parser::for_str(src); + let statement = parser.parse_statement_or_error(); + let StatementKind::Let(let_statement) = statement.kind else { + panic!("Expected let"); + }; + assert!(matches!(let_statement.expression.kind, ExpressionKind::Constrain(..))); + } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs index b7723ce4242..cda6c267ec7 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs @@ -900,7 +900,6 @@ fn find_lambda_captures(stmts: &[StmtId], interner: &NodeInterner, result: &mut HirStatement::Expression(expr_id) => expr_id, HirStatement::Let(let_stmt) => let_stmt.expression, HirStatement::Assign(assign_stmt) => assign_stmt.expression, - HirStatement::Constrain(constr_stmt) => constr_stmt.0, HirStatement::Semi(semi_expr) => semi_expr, HirStatement::For(for_loop) => for_loop.block, HirStatement::Loop(block) => block, @@ -4053,6 +4052,159 @@ fn infers_lambda_argument_from_call_function_type_in_generic_call() { assert_no_errors(src); } +#[test] +fn infers_lambda_argument_from_call_function_type_as_alias() { + let src = r#" + struct Foo { + value: Field, + } + + type MyFn = fn(Foo) -> Field; + + fn call(f: MyFn) -> Field { + f(Foo { value: 1 }) + } + + fn main() { + let _ = call(|foo| foo.value); + } + "#; + assert_no_errors(src); +} + +#[test] +fn infers_lambda_argument_from_function_return_type() { + let src = r#" + pub struct Foo { + value: Field, + } + + pub fn func() -> fn(Foo) -> Field { + |foo| foo.value + } + + fn main() { + } + "#; + assert_no_errors(src); +} + +#[test] +fn infers_lambda_argument_from_function_return_type_multiple_statements() { + let src = r#" + pub struct Foo { + value: Field, + } + + pub fn func() -> fn(Foo) -> Field { + let _ = 1; + |foo| foo.value + } + + fn main() { + } + "#; + assert_no_errors(src); +} + +#[test] +fn infers_lambda_argument_from_function_return_type_when_inside_if() { + let src = r#" + pub struct Foo { + value: Field, + } + + pub fn func() -> fn(Foo) -> Field { + if true { + |foo| foo.value + } else { + |foo| foo.value + } + } + + fn main() { + } + "#; + assert_no_errors(src); +} + +#[test] +fn infers_lambda_argument_from_variable_type() { + let src = r#" + pub struct Foo { + value: Field, + } + + fn main() { + let _: fn(Foo) -> Field = |foo| foo.value; + } + "#; + assert_no_errors(src); +} + +#[test] +fn infers_lambda_argument_from_variable_alias_type() { + let src = r#" + pub struct Foo { + value: Field, + } + + type FooFn = fn(Foo) -> Field; + + fn main() { + let _: FooFn = |foo| foo.value; + } + "#; + assert_no_errors(src); +} + +#[test] +fn infers_lambda_argument_from_variable_double_alias_type() { + let src = r#" + pub struct Foo { + value: Field, + } + + type FooFn = fn(Foo) -> Field; + type FooFn2 = FooFn; + + fn main() { + let _: FooFn2 = |foo| foo.value; + } + "#; + assert_no_errors(src); +} + +#[test] +fn infers_lambda_argument_from_variable_tuple_type() { + let src = r#" + pub struct Foo { + value: Field, + } + + fn main() { + let _: (fn(Foo) -> Field, _) = (|foo| foo.value, 1); + } + "#; + assert_no_errors(src); +} + +#[test] +fn infers_lambda_argument_from_variable_tuple_type_aliased() { + let src = r#" + pub struct Foo { + value: Field, + } + + type Alias = (fn(Foo) -> Field, Field); + + fn main() { + let _: Alias = (|foo| foo.value, 1); + } + "#; + assert_no_errors(src); +} + #[test] fn regression_7088() { // A test for code that initially broke when implementing inferring @@ -4194,3 +4346,21 @@ fn call_function_alias_type() { "#; assert_no_errors(src); } + +#[test] +fn errors_on_if_without_else_type_mismatch() { + let src = r#" + fn main() { + if true { + 1 + } + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::TypeError(TypeCheckError::Context { err, .. }) = &errors[0].0 else { + panic!("Expected a Context error"); + }; + assert!(matches!(**err, TypeCheckError::TypeMismatch { .. })); +} diff --git a/noir/noir-repo/noir_stdlib/src/cmp.nr b/noir/noir-repo/noir_stdlib/src/cmp.nr index 7f19594c30e..16bcd4390f7 100644 --- a/noir/noir-repo/noir_stdlib/src/cmp.nr +++ b/noir/noir-repo/noir_stdlib/src/cmp.nr @@ -360,13 +360,7 @@ where let mut result = Ordering::equal(); for i in 0..self.len() { if result == Ordering::equal() { - let result_i = self[i].cmp(other[i]); - - if result_i == Ordering::less() { - result = result_i; - } else if result_i == Ordering::greater() { - result = result_i; - } + result = self[i].cmp(other[i]); } } result @@ -383,13 +377,7 @@ where let mut result = self.len().cmp(other.len()); for i in 0..self.len() { if result == Ordering::equal() { - let result_i = self[i].cmp(other[i]); - - if result_i == Ordering::less() { - result = result_i; - } else if result_i == Ordering::greater() { - result = result_i; - } + result = self[i].cmp(other[i]); } } result diff --git a/noir/noir-repo/test_programs/compilation_report.sh b/noir/noir-repo/test_programs/compilation_report.sh index 66f1a53626e..6f7ef254477 100755 --- a/noir/noir-repo/test_programs/compilation_report.sh +++ b/noir/noir-repo/test_programs/compilation_report.sh @@ -8,7 +8,7 @@ base_path="$current_dir/execution_success" # Tests to be profiled for compilation report tests_to_profile=("sha256_regression" "regression_4709" "ram_blowup_regression" "global_var_regression_entry_points") -echo "{\"compilation_reports\": [ " > $current_dir/compilation_report.json +echo "[ " > $current_dir/compilation_report.json # If there is an argument that means we want to generate a report for only the current directory if [ "$1" == "1" ]; then @@ -62,7 +62,7 @@ for dir in ${tests_to_profile[@]}; do printf "%.3f\n", 0 }' <<<"${TIMES[@]}") - jq -rc "{artifact_name: \"$PACKAGE_NAME\", time: \""$AVG_TIME"s\"}" --null-input >> $current_dir/compilation_report.json + jq -rc "{name: \"$PACKAGE_NAME\", value: \""$AVG_TIME"\" | tonumber, unit: \"s\"}" --null-input >> $current_dir/compilation_report.json if (($ITER != $NUM_ARTIFACTS)); then echo "," >> $current_dir/compilation_report.json @@ -73,4 +73,4 @@ for dir in ${tests_to_profile[@]}; do ITER=$(( $ITER + 1 )) done -echo "]}" >> $current_dir/compilation_report.json +echo "]" >> $current_dir/compilation_report.json diff --git a/noir/noir-repo/test_programs/execution_report.sh b/noir/noir-repo/test_programs/execution_report.sh index dcdc1bb8879..5c916ef6bd7 100755 --- a/noir/noir-repo/test_programs/execution_report.sh +++ b/noir/noir-repo/test_programs/execution_report.sh @@ -8,7 +8,7 @@ base_path="$current_dir/execution_success" # Tests to be profiled for execution report tests_to_profile=("sha256_regression" "regression_4709" "ram_blowup_regression" "global_var_regression_entry_points") -echo "{\"execution_reports\": [ " > $current_dir/execution_report.json +echo "[" > $current_dir/execution_report.json # If there is an argument that means we want to generate a report for only the current directory if [ "$1" == "1" ]; then @@ -70,7 +70,7 @@ for dir in ${tests_to_profile[@]}; do printf "%.3f\n", 0 }' <<<"${TIMES[@]}") - jq -rc "{artifact_name: \"$PACKAGE_NAME\", time: \""$AVG_TIME"s\"}" --null-input >> $current_dir/execution_report.json + jq -rc "{name: \"$PACKAGE_NAME\", value: \""$AVG_TIME"\" | tonumber, unit: \"s\"}" --null-input >> $current_dir/execution_report.json if (($ITER != $NUM_ARTIFACTS)); then echo "," >> $current_dir/execution_report.json @@ -81,4 +81,4 @@ for dir in ${tests_to_profile[@]}; do ITER=$(( $ITER + 1 )) done -echo "]}" >> $current_dir/execution_report.json +echo "]" >> $current_dir/execution_report.json diff --git a/noir/noir-repo/test_programs/execution_success/signed_inactive_division_by_zero/Nargo.toml b/noir/noir-repo/test_programs/execution_success/signed_inactive_division_by_zero/Nargo.toml new file mode 100644 index 00000000000..26ed465da76 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/signed_inactive_division_by_zero/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "signed_inactive_division_by_zero" +type = "bin" +authors = [""] + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/signed_inactive_division_by_zero/Prover.toml b/noir/noir-repo/test_programs/execution_success/signed_inactive_division_by_zero/Prover.toml new file mode 100644 index 00000000000..d2ca117fc8c --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/signed_inactive_division_by_zero/Prover.toml @@ -0,0 +1,3 @@ +a = "1" +b = "0" +condition = false \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/signed_inactive_division_by_zero/src/main.nr b/noir/noir-repo/test_programs/execution_success/signed_inactive_division_by_zero/src/main.nr new file mode 100644 index 00000000000..115f8a60e43 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/signed_inactive_division_by_zero/src/main.nr @@ -0,0 +1,8 @@ +fn main(a: i32, b: i32, condition: bool) -> pub i32 { + if condition { + // If `condition` is not set then we should not trigger an assertion failure here. + a / b + } else { + 0 + } +} diff --git a/noir/noir-repo/test_programs/memory_report.sh b/noir/noir-repo/test_programs/memory_report.sh index 00065fbfb36..eb83004affd 100755 --- a/noir/noir-repo/test_programs/memory_report.sh +++ b/noir/noir-repo/test_programs/memory_report.sh @@ -22,7 +22,7 @@ fi FIRST="1" FLAGS=${FLAGS:- ""} -echo "{\"memory_reports\": [ " > memory_report.json +echo "[" > memory_report.json for test_name in ${tests_to_profile[@]}; do cd $base_path/$test_name @@ -57,8 +57,9 @@ for test_name in ${tests_to_profile[@]}; do peak=${consumption:30:len} rm $current_dir/$test_name"_heap_analysis.txt" peak_memory=$($PARSE_MEMORY $peak) - echo -e " {\n \"artifact_name\":\"$test_name\",\n \"peak_memory\":\"$peak_memory\"\n }" >> $current_dir"/memory_report.json" + jq -rc "{name: \"$test_name\", value: \"$peak_memory\" | tonumber, unit: \"MB\"}" --null-input >> $current_dir/memory_report.json + done -echo "]}" >> $current_dir"/memory_report.json" +echo "]" >> $current_dir"/memory_report.json" diff --git a/noir/noir-repo/tooling/lsp/src/requests/inlay_hint.rs b/noir/noir-repo/tooling/lsp/src/requests/inlay_hint.rs index 8e091d1eb04..b9673755da6 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/inlay_hint.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/inlay_hint.rs @@ -575,6 +575,7 @@ fn get_expression_name(expression: &Expression) -> Option { ExpressionKind::Parenthesized(expr) => get_expression_name(expr), ExpressionKind::AsTraitPath(path) => Some(path.impl_item.to_string()), ExpressionKind::TypePath(path) => Some(path.item.to_string()), + ExpressionKind::Constrain(constrain) => Some(constrain.kind.to_string()), ExpressionKind::Constructor(..) | ExpressionKind::Infix(..) | ExpressionKind::Index(..) diff --git a/noir/noir-repo/tooling/lsp/src/requests/signature_help.rs b/noir/noir-repo/tooling/lsp/src/requests/signature_help.rs index 99bd463f44a..4a2609d7ae3 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/signature_help.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/signature_help.rs @@ -8,7 +8,7 @@ use lsp_types::{ use noirc_errors::{Location, Span}; use noirc_frontend::{ ast::{ - CallExpression, ConstrainKind, ConstrainStatement, Expression, FunctionReturnType, + CallExpression, ConstrainExpression, ConstrainKind, Expression, FunctionReturnType, MethodCallExpression, Statement, Visitor, }, hir_def::{function::FuncMeta, stmt::HirPattern}, @@ -383,7 +383,7 @@ impl<'a> Visitor for SignatureFinder<'a> { false } - fn visit_constrain_statement(&mut self, constrain_statement: &ConstrainStatement) -> bool { + fn visit_constrain_statement(&mut self, constrain_statement: &ConstrainExpression) -> bool { constrain_statement.accept_children(self); if self.signature_help.is_some() { diff --git a/noir/noir-repo/tooling/nargo_fmt/src/formatter/expression.rs b/noir/noir-repo/tooling/nargo_fmt/src/formatter/expression.rs index 98eabe10e7e..54d9d2e41f5 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/formatter/expression.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/formatter/expression.rs @@ -1,9 +1,10 @@ use noirc_frontend::{ ast::{ ArrayLiteral, BinaryOpKind, BlockExpression, CallExpression, CastExpression, - ConstructorExpression, Expression, ExpressionKind, IfExpression, IndexExpression, - InfixExpression, Lambda, Literal, MatchExpression, MemberAccessExpression, - MethodCallExpression, PrefixExpression, TypePath, UnaryOp, UnresolvedTypeData, + ConstrainExpression, ConstrainKind, ConstructorExpression, Expression, ExpressionKind, + IfExpression, IndexExpression, InfixExpression, Lambda, Literal, MatchExpression, + MemberAccessExpression, MethodCallExpression, PrefixExpression, TypePath, UnaryOp, + UnresolvedTypeData, }, token::{Keyword, Token}, }; @@ -39,6 +40,9 @@ impl<'a, 'b> ChunkFormatter<'a, 'b> { ExpressionKind::MethodCall(method_call) => { group.group(self.format_method_call(*method_call)); } + ExpressionKind::Constrain(constrain) => { + group.group(self.format_constrain(constrain)); + } ExpressionKind::Constructor(constructor) => { group.group(self.format_constructor(*constructor)); } @@ -1145,6 +1149,40 @@ impl<'a, 'b> ChunkFormatter<'a, 'b> { group } + fn format_constrain(&mut self, constrain_statement: ConstrainExpression) -> ChunkGroup { + let mut group = ChunkGroup::new(); + + let keyword = match constrain_statement.kind { + ConstrainKind::Assert => Keyword::Assert, + ConstrainKind::AssertEq => Keyword::AssertEq, + ConstrainKind::Constrain => { + unreachable!("constrain always produces an error, and the formatter doesn't run when there are errors") + } + }; + + group.text(self.chunk(|formatter| { + formatter.write_keyword(keyword); + formatter.write_left_paren(); + })); + + group.kind = GroupKind::ExpressionList { + prefix_width: group.width(), + expressions_count: constrain_statement.arguments.len(), + }; + + self.format_expressions_separated_by_comma( + constrain_statement.arguments, + false, // force trailing comma + &mut group, + ); + + group.text(self.chunk(|formatter| { + formatter.write_right_paren(); + })); + + group + } + pub(super) fn format_block_expression( &mut self, block: BlockExpression, diff --git a/noir/noir-repo/tooling/nargo_fmt/src/formatter/statement.rs b/noir/noir-repo/tooling/nargo_fmt/src/formatter/statement.rs index 751bc419d4a..ae4177c224b 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/formatter/statement.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/formatter/statement.rs @@ -1,8 +1,7 @@ use noirc_frontend::{ ast::{ - AssignStatement, ConstrainKind, ConstrainStatement, Expression, ExpressionKind, - ForLoopStatement, ForRange, LetStatement, Pattern, Statement, StatementKind, - UnresolvedType, UnresolvedTypeData, + AssignStatement, Expression, ExpressionKind, ForLoopStatement, ForRange, LetStatement, + Pattern, Statement, StatementKind, UnresolvedType, UnresolvedTypeData, }, token::{Keyword, SecondaryAttribute, Token, TokenKind}, }; @@ -49,9 +48,6 @@ impl<'a, 'b> ChunkFormatter<'a, 'b> { StatementKind::Let(let_statement) => { group.group(self.format_let_statement(let_statement)); } - StatementKind::Constrain(constrain_statement) => { - group.group(self.format_constrain_statement(constrain_statement)); - } StatementKind::Expression(expression) => match expression.kind { ExpressionKind::Block(block) => group.group(self.format_block_expression( block, true, // force multiple lines @@ -153,44 +149,6 @@ impl<'a, 'b> ChunkFormatter<'a, 'b> { group } - fn format_constrain_statement( - &mut self, - constrain_statement: ConstrainStatement, - ) -> ChunkGroup { - let mut group = ChunkGroup::new(); - - let keyword = match constrain_statement.kind { - ConstrainKind::Assert => Keyword::Assert, - ConstrainKind::AssertEq => Keyword::AssertEq, - ConstrainKind::Constrain => { - unreachable!("constrain always produces an error, and the formatter doesn't run when there are errors") - } - }; - - group.text(self.chunk(|formatter| { - formatter.write_keyword(keyword); - formatter.write_left_paren(); - })); - - group.kind = GroupKind::ExpressionList { - prefix_width: group.width(), - expressions_count: constrain_statement.arguments.len(), - }; - - self.format_expressions_separated_by_comma( - constrain_statement.arguments, - false, // force trailing comma - &mut group, - ); - - group.text(self.chunk(|formatter| { - formatter.write_right_paren(); - formatter.write_semicolon(); - })); - - group - } - fn format_assign(&mut self, assign_statement: AssignStatement) -> ChunkGroup { let mut group = ChunkGroup::new(); let mut is_op_assign = false; diff --git a/noir/noir-repo/tooling/noirc_abi_wasm/build.sh b/noir/noir-repo/tooling/noirc_abi_wasm/build.sh index c07d2d8a4c1..16fb26e55db 100755 --- a/noir/noir-repo/tooling/noirc_abi_wasm/build.sh +++ b/noir/noir-repo/tooling/noirc_abi_wasm/build.sh @@ -25,7 +25,7 @@ function run_if_available { require_command jq require_command cargo require_command wasm-bindgen -#require_command wasm-opt +require_command wasm-opt self_path=$(dirname "$(readlink -f "$0")") pname=$(cargo read-manifest | jq -r '.name') diff --git a/noir/noir-repo/yarn.lock b/noir/noir-repo/yarn.lock index e831b9c476a..26298a6e6b4 100644 --- a/noir/noir-repo/yarn.lock +++ b/noir/noir-repo/yarn.lock @@ -221,9 +221,9 @@ __metadata: languageName: node linkType: hard -"@aztec/bb.js@portal:../../../../barretenberg/ts::locator=integration-tests%40workspace%3Acompiler%2Fintegration-tests": - version: 0.0.0-use.local - resolution: "@aztec/bb.js@portal:../../../../barretenberg/ts::locator=integration-tests%40workspace%3Acompiler%2Fintegration-tests" +"@aztec/bb.js@npm:0.72.1": + version: 0.72.1 + resolution: "@aztec/bb.js@npm:0.72.1" dependencies: comlink: ^4.4.1 commander: ^12.1.0 @@ -232,9 +232,10 @@ __metadata: pako: ^2.1.0 tslib: ^2.4.0 bin: - bb.js: ./dest/node/main.js + bb.js: dest/node/main.js + checksum: 143f0062a31e262ceff6e454d31e2a2672afd8dcbff0dafb17d5308f8c5312048e5d3503d80e7ad9ff4b17b6c3d36e8ffbff8d2deda2cdb684f19fa64d5a04ab languageName: node - linkType: soft + linkType: hard "@babel/code-frame@npm:^7.0.0, @babel/code-frame@npm:^7.10.4, @babel/code-frame@npm:^7.12.11, @babel/code-frame@npm:^7.16.0, @babel/code-frame@npm:^7.22.13, @babel/code-frame@npm:^7.23.5, @babel/code-frame@npm:^7.8.3": version: 7.23.5 @@ -16397,7 +16398,7 @@ __metadata: version: 0.0.0-use.local resolution: "integration-tests@workspace:compiler/integration-tests" dependencies: - "@aztec/bb.js": "portal:../../../../barretenberg/ts" + "@aztec/bb.js": 0.72.1 "@noir-lang/noir_js": "workspace:*" "@noir-lang/noir_wasm": "workspace:*" "@nomicfoundation/hardhat-chai-matchers": ^2.0.0