diff --git a/.noir-sync-commit b/.noir-sync-commit index 720899a3dd9..ebbb67c979e 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -ed9977a57e0015ed653f54ce9377225434a947df +a0704aa53250aed9c5460a60f5aaffa87772732f diff --git a/noir/noir-repo/.github/workflows/reports.yml b/noir/noir-repo/.github/workflows/reports.yml index c3f04146476..4d8f036a64a 100644 --- a/noir/noir-repo/.github/workflows/reports.yml +++ b/noir/noir-repo/.github/workflows/reports.yml @@ -421,26 +421,6 @@ jobs: retention-days: 3 overwrite: true - - name: Convert to `benchmark-action` format - run: | - jq ".compilation_reports | map({name: .artifact_name, value: (.time[:-1] | tonumber), unit: \"s\"}) " ./compilation_report.json > time_bench.json - - - name: Store benchmark result - continue-on-error: true - uses: benchmark-action/github-action-benchmark@4de1bed97a47495fc4c5404952da0499e31f5c29 - with: - name: "Compilation Time" - tool: "customSmallerIsBetter" - output-file-path: ./time_bench.json - github-token: ${{ secrets.GITHUB_TOKEN }} - # We want this to only run on master to avoid garbage data from PRs being added. - auto-push: ${{ github.ref == 'refs/heads/master' }} - alert-threshold: "110%" - comment-on-alert: true - fail-on-alert: false - alert-comment-cc-users: "@TomAFrench" - max-items-in-chart: 50 - external_repo_memory_report: needs: [build-nargo] runs-on: ubuntu-22.04 @@ -589,7 +569,7 @@ jobs: github-token: ${{ secrets.GITHUB_TOKEN }} # We want this to only run on master to avoid garbage data from PRs being added. auto-push: ${{ github.ref == 'refs/heads/master' }} - alert-threshold: "110%" + alert-threshold: "120%" comment-on-alert: true fail-on-alert: false alert-comment-cc-users: "@TomAFrench" @@ -638,7 +618,7 @@ jobs: github-token: ${{ secrets.GITHUB_TOKEN }} # We want this to only run on master to avoid garbage data from PRs being added. auto-push: ${{ github.ref == 'refs/heads/master' }} - alert-threshold: "110%" + alert-threshold: "120%" comment-on-alert: true fail-on-alert: false alert-comment-cc-users: "@TomAFrench" @@ -689,7 +669,7 @@ jobs: github-token: ${{ secrets.GITHUB_TOKEN }} # We want this to only run on master to avoid garbage data from PRs being added. auto-push: ${{ github.ref == 'refs/heads/master' }} - alert-threshold: "110%" + alert-threshold: "120%" comment-on-alert: true fail-on-alert: false alert-comment-cc-users: "@TomAFrench" @@ -739,7 +719,7 @@ jobs: github-token: ${{ secrets.GITHUB_TOKEN }} # We want this to only run on master to avoid garbage data from PRs being added. auto-push: ${{ github.ref == 'refs/heads/master' }} - alert-threshold: "110%" + alert-threshold: "120%" comment-on-alert: true fail-on-alert: false alert-comment-cc-users: "@TomAFrench" diff --git a/noir/noir-repo/.github/workflows/test-rust-workspace-msrv.yml b/noir/noir-repo/.github/workflows/test-rust-workspace-msrv.yml index f4fbbf79d89..38bc3cba153 100644 --- a/noir/noir-repo/.github/workflows/test-rust-workspace-msrv.yml +++ b/noir/noir-repo/.github/workflows/test-rust-workspace-msrv.yml @@ -52,7 +52,7 @@ jobs: tool: nextest@0.9.67 - name: Build and archive tests - run: cargo nextest archive --workspace --release --archive-file nextest-archive.tar.zst + run: cargo nextest archive --workspace --archive-file nextest-archive.tar.zst - name: Upload archive to workflow uses: actions/upload-artifact@v4 diff --git a/noir/noir-repo/.github/workflows/test-rust-workspace.yml b/noir/noir-repo/.github/workflows/test-rust-workspace.yml index 5d8abbc3e55..fe421361072 100644 --- a/noir/noir-repo/.github/workflows/test-rust-workspace.yml +++ b/noir/noir-repo/.github/workflows/test-rust-workspace.yml @@ -29,7 +29,7 @@ jobs: - uses: Swatinem/rust-cache@v2 with: - key: x86_64-unknown-linux-gnu + key: x86_64-unknown-linux-gnu-debug cache-on-failure: true save-if: ${{ github.event_name != 'merge_group' }} @@ -39,7 +39,7 @@ jobs: tool: nextest@0.9.67 - name: Build and archive tests - run: cargo nextest archive --workspace --release --archive-file nextest-archive.tar.zst + run: cargo nextest archive --workspace --archive-file nextest-archive.tar.zst - name: Upload archive to workflow uses: actions/upload-artifact@v4 diff --git a/noir/noir-repo/Cargo.lock b/noir/noir-repo/Cargo.lock index e82d47d690a..18b357824d2 100644 --- a/noir/noir-repo/Cargo.lock +++ b/noir/noir-repo/Cargo.lock @@ -240,19 +240,20 @@ dependencies = [ [[package]] name = "anstyle-wincon" -version = "3.0.6" +version = "3.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125" +checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e" dependencies = [ "anstyle", + "once_cell", "windows-sys 0.59.0", ] [[package]] name = "anyhow" -version = "1.0.93" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775" +checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" [[package]] name = "ark-bls12-381" @@ -318,7 +319,7 @@ dependencies = [ "ark-std 0.5.0", "educe", "fnv", - "hashbrown 0.15.1", + "hashbrown 0.15.2", "itertools 0.13.0", "num-bigint", "num-integer", @@ -383,7 +384,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62945a2f7e6de02a31fe400aa489f0e0f5b2502e69f95f853adb82a96c7a6b60" dependencies = [ "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] @@ -409,7 +410,7 @@ dependencies = [ "num-traits", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] @@ -449,7 +450,7 @@ dependencies = [ "ark-std 0.5.0", "educe", "fnv", - "hashbrown 0.15.1", + "hashbrown 0.15.2", ] [[package]] @@ -496,7 +497,7 @@ checksum = "213888f660fddcca0d257e88e54ac05bca01885f258ccdf695bafd77031bb69d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] @@ -541,7 +542,7 @@ dependencies = [ "bstr", "doc-comment", "libc", - "predicates 3.1.2", + "predicates 3.1.3", "predicates-core", "predicates-tree", "wait-timeout", @@ -556,7 +557,7 @@ dependencies = [ "anstyle", "doc-comment", "globwalk", - "predicates 3.1.2", + "predicates 3.1.3", "predicates-core", "predicates-tree", "tempfile", @@ -584,13 +585,13 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.83" +version = "0.1.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" +checksum = "3f934833b4b7233644e5848f235df3f57ed8c80f1528a26c3dfa13d2147fa056" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] @@ -682,9 +683,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" +checksum = "1be3f42a67d6d345ecd59f675f3f012d6974981560836e938c22b424b85ce1be" [[package]] name = "bitmaps" @@ -729,9 +730,9 @@ dependencies = [ [[package]] name = "blake3" -version = "1.5.4" +version = "1.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d82033247fd8e890df8f740e407ad4d038debb9eb1f40533fffb32e7d17dc6f7" +checksum = "b8ee0c1824c4dea5b5f81736aff91bae041d2c07ee1192bec91054e10e3e601e" dependencies = [ "arrayref", "arrayvec", @@ -801,12 +802,12 @@ dependencies = [ [[package]] name = "bstr" -version = "1.10.0" +version = "1.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40723b8fb387abc38f4f4a37c09073622e41dd12327033091ef8950659e6dc0c" +checksum = "531a9155a481e2ee699d4f98f43c0ca4ff8ee1bfd55c31e9e98fb29d2b176fe0" dependencies = [ "memchr", - "regex-automata 0.4.8", + "regex-automata 0.4.9", "serde", ] @@ -829,9 +830,9 @@ checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" [[package]] name = "bytemuck" -version = "1.19.0" +version = "1.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8334215b81e418a0a7bdb8ef0849474f40bb10c8b71f1c4ed315cff49f32494d" +checksum = "ef657dfab802224e671f5818e9a4935f9b1957ed18e58292690cc39e7a4092a3" [[package]] name = "byteorder" @@ -841,9 +842,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da" +checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" [[package]] name = "cast" @@ -853,9 +854,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.1.36" +version = "1.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baee610e9452a8f6f0a1b6194ec09ff9e2d85dea54432acdae41aa0761c95d70" +checksum = "c8293772165d9345bdaaa39b45b2109591e63fe5e6fbc23c6ff930a048aa310b" dependencies = [ "shlex", ] @@ -874,9 +875,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.38" +version = "0.4.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" dependencies = [ "android-tzdata", "iana-time-zone", @@ -916,9 +917,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.20" +version = "4.5.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97f376d85a664d5837dbae44bf546e6477a679ff6610010f17276f686d867e8" +checksum = "a8eb5e908ef3a6efbe1ed62520fb7287959888c88485abe072543190ecc66783" dependencies = [ "clap_builder", "clap_derive", @@ -934,9 +935,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.20" +version = "4.5.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19bc80abd44e4bed93ca373a0704ccbd1b710dc5749406201bb018272808dc54" +checksum = "96b01801b5fc6a0a232407abc821660c9c6d25a1cafc0d4f85f29fb8d9afc121" dependencies = [ "anstream", "anstyle", @@ -946,30 +947,30 @@ dependencies = [ [[package]] name = "clap_complete" -version = "4.5.37" +version = "4.5.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11611dca53440593f38e6b25ec629de50b14cdfa63adc0fb856115a2c6d97595" +checksum = "33a7e468e750fa4b6be660e8b5651ad47372e8fb114030b594c2d75d48c5ffd0" dependencies = [ "clap", ] [[package]] name = "clap_derive" -version = "4.5.18" +version = "4.5.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" +checksum = "54b755194d6389280185988721fffba69495eed5ee9feeee9a599b53db80318c" dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] name = "clap_lex" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" [[package]] name = "clipboard-win" @@ -1065,14 +1066,14 @@ checksum = "55b672471b4e9f9e95499ea597ff64941a309b2cdbffcc46f2cc5e2d971fd335" [[package]] name = "console" -version = "0.15.8" +version = "0.15.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" +checksum = "ea3c6ecd8059b57859df5c69830340ed3c41d30e3da0c1cbed90a96ac853041b" dependencies = [ - "encode_unicode 0.3.6", - "lazy_static", + "encode_unicode", "libc", - "windows-sys 0.52.0", + "once_cell", + "windows-sys 0.59.0", ] [[package]] @@ -1099,18 +1100,18 @@ checksum = "3618cccc083bb987a415d85c02ca6c9994ea5b44731ec28b9ecf09658655fba9" [[package]] name = "const_format" -version = "0.2.33" +version = "0.2.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50c655d81ff1114fb0dcdea9225ea9f0cc712a6f8d189378e82bdf62a473a64b" +checksum = "126f97965c8ad46d6d9163268ff28432e8f6a1196a55578867832e3049df63dd" dependencies = [ "const_format_proc_macros", ] [[package]] name = "const_format_proc_macros" -version = "0.2.33" +version = "0.2.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eff1a44b93f47b1bac19a27932f5c591e43d1ba357ee4f61526c8a25603f0eb1" +checksum = "1d57c2eccfb16dbac1f4e61e206105db5820c9d26c3c472bc17c774259ef7744" dependencies = [ "proc-macro2", "quote", @@ -1159,9 +1160,9 @@ dependencies = [ [[package]] name = "cpufeatures" -version = "0.2.14" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "608697df725056feaccfa42cffdaeeec3fccc4ffc38358ecd19b243e716a78e0" +checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" dependencies = [ "libc", ] @@ -1213,18 +1214,18 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.13" +version = "0.5.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" +checksum = "06ba6d68e24814cb8de6bb986db8222d3a027d15872cabc0d18817bc3c0e4471" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-deque" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" dependencies = [ "crossbeam-epoch", "crossbeam-utils", @@ -1241,9 +1242,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.20" +version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crunchy" @@ -1275,9 +1276,9 @@ dependencies = [ [[package]] name = "csv" -version = "1.3.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac574ff4d437a7b5ad237ef331c17ccca63c46479e5b5453eb8e10bb99a759fe" +checksum = "acdc4883a9c96732e4733212c01447ebd805833b7275a73ca3ee080fd77afdaf" dependencies = [ "csv-core", "itoa", @@ -1326,7 +1327,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] @@ -1337,7 +1338,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] @@ -1460,7 +1461,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] @@ -1505,7 +1506,7 @@ dependencies = [ "enum-ordinalize", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] @@ -1534,12 +1535,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "encode_unicode" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" - [[package]] name = "encode_unicode" version = "1.0.0" @@ -1569,23 +1564,23 @@ checksum = "0d28318a75d4aead5c4db25382e8ef717932d0346600cacae6357eb5941bc5ff" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] name = "env_filter" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f2c92ceda6ceec50f43169f9ee8424fe2db276791afde7b2cd8bc084cb376ab" +checksum = "186e05a59d4c50738528153b83b0b0194d3a29507dfec16eccd4b342903397d0" dependencies = [ "log", ] [[package]] name = "env_logger" -version = "0.11.5" +version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13fa619b91fb2381732789fc5de83b45675e882f66623b7d8cb4f643017018d" +checksum = "dcaee3d8e3cfc3fd92428d477bc97fc29ec8716d180c0d74c643bb26166660e0" dependencies = [ "env_filter", "log", @@ -1608,7 +1603,7 @@ checksum = "3bf679796c0322556351f287a51b49e48f7c4986e727b5dd78c972d30e2e16cc" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] @@ -1619,12 +1614,12 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -1649,9 +1644,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "486f806e73c5707928240ddc295403b1b93c96a02038563881c4a2fd84b81ac4" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "fd-lock" @@ -1736,12 +1731,12 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.34" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1b589b4dc103969ad3cf85c950899926ec64300a1a46d76c03a6072957036f0" +checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" dependencies = [ "crc32fast", - "miniz_oxide 0.8.0", + "miniz_oxide 0.8.3", ] [[package]] @@ -1852,7 +1847,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] @@ -1925,9 +1920,9 @@ checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" [[package]] name = "glob" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" +checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" [[package]] name = "globset" @@ -1938,7 +1933,7 @@ dependencies = [ "aho-corasick", "bstr", "log", - "regex-automata 0.4.8", + "regex-automata 0.4.9", "regex-syntax 0.8.5", ] @@ -1948,7 +1943,7 @@ version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bf760ebf69878d9fd8f110c89703d90ce35095324d1f1edcb595c63945ee757" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.7.0", "ignore", "walkdir", ] @@ -2001,7 +1996,7 @@ dependencies = [ "futures-core", "futures-sink", "http", - "indexmap 2.6.0", + "indexmap 2.7.0", "slab", "tokio", "tokio-util", @@ -2064,9 +2059,9 @@ checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" [[package]] name = "hashbrown" -version = "0.15.1" +version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a9bfc1af68b1726ea47d3d5109de126281def866b33970e10fbab11b5dafab3" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" dependencies = [ "allocator-api2", "equivalent", @@ -2094,12 +2089,6 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" -[[package]] -name = "hermit-abi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" - [[package]] name = "hermit-abi" version = "0.4.0" @@ -2190,9 +2179,9 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.27.3" +version = "0.27.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" +checksum = "2d191583f3da1305256f22463b9bb0471acad48a4e534a5218b9963e9c1f59b2" dependencies = [ "futures-util", "http", @@ -2369,7 +2358,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] @@ -2409,7 +2398,7 @@ dependencies = [ "globset", "log", "memchr", - "regex-automata 0.4.8", + "regex-automata 0.4.9", "same-file", "walkdir", "winapi-util", @@ -2473,12 +2462,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" +checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" dependencies = [ "equivalent", - "hashbrown 0.15.1", + "hashbrown 0.15.2", "serde", ] @@ -2494,7 +2483,7 @@ dependencies = [ "crossbeam-utils", "dashmap", "env_logger", - "indexmap 2.6.0", + "indexmap 2.7.0", "is-terminal", "itoa", "log", @@ -2540,7 +2529,7 @@ version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "261f68e344040fbd0edea105bef17c66edf46f984ddb1115b775ce31be948f4b" dependencies = [ - "hermit-abi 0.4.0", + "hermit-abi", "libc", "windows-sys 0.52.0", ] @@ -2575,9 +2564,29 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.11" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" + +[[package]] +name = "jni" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6df18c2e3db7e453d3c6ac5b3e9d5182664d28788126d39b91f2d1e22b017ec" +dependencies = [ + "cesu8", + "combine", + "jni-sys", + "log", + "thiserror", + "walkdir", +] + +[[package]] +name = "jni-sys" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" [[package]] name = "jni" @@ -2681,7 +2690,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] @@ -2795,9 +2804,9 @@ checksum = "82903360c009b816f5ab72a9b68158c27c301ee2c3f20655b55c5e589e7d3bb7" [[package]] name = "libc" -version = "0.2.162" +version = "0.2.169" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18d287de67fe55fd7e1581fe933d965a5a9477b38e949cfa9f8574ef01506398" +checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" [[package]] name = "libredox" @@ -2805,7 +2814,7 @@ version = "0.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3af92c55d7d839293953fcd0fda5ecfe93297cfde6ffbdec13b41d99c0ba6607" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.7.0", "libc", "redox_syscall 0.4.1", ] @@ -2816,9 +2825,9 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.7.0", "libc", - "redox_syscall 0.5.7", + "redox_syscall 0.5.8", ] [[package]] @@ -2835,15 +2844,15 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.4.14" +version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" [[package]] name = "litemap" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "643cb0b8d4fcc284004d5fd0d67ccf61dfffadb7f75e1e71bc420f4688a3a704" +checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" [[package]] name = "lock_api" @@ -2857,9 +2866,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.22" +version = "0.4.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" +checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f" [[package]] name = "louds-rs" @@ -2876,7 +2885,7 @@ version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" dependencies = [ - "hashbrown 0.15.1", + "hashbrown 0.15.2", ] [[package]] @@ -2955,9 +2964,9 @@ dependencies = [ [[package]] name = "miniz_oxide" -version = "0.8.0" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" +checksum = "b8402cab7aefae129c6977bb0ff1b8fd9a04eb5b51efc50a70bea51cda0c7924" dependencies = [ "adler2", ] @@ -2976,11 +2985,10 @@ dependencies = [ [[package]] name = "mio" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" dependencies = [ - "hermit-abi 0.3.9", "libc", "wasi", "windows-sys 0.52.0", @@ -3046,6 +3054,7 @@ dependencies = [ "noir_lsp", "noirc_abi", "noirc_artifacts", + "noirc_artifacts_info", "noirc_driver", "noirc_errors", "noirc_frontend", @@ -3187,6 +3196,20 @@ dependencies = [ "rand", ] +[[package]] +name = "noir_inspector" +version = "1.0.0-beta.1" +dependencies = [ + "acir", + "clap", + "color-eyre", + "const_format", + "noirc_artifacts", + "noirc_artifacts_info", + "serde", + "serde_json", +] + [[package]] name = "noir_lsp" version = "1.0.0-beta.1" @@ -3319,6 +3342,21 @@ dependencies = [ "tempfile", ] +[[package]] +name = "noirc_artifacts_info" +version = "1.0.0-beta.1" +dependencies = [ + "acir", + "acvm", + "clap", + "iter-extended", + "noirc_artifacts", + "prettytable-rs", + "rayon", + "serde", + "serde_json", +] + [[package]] name = "noirc_driver" version = "1.0.0-beta.1" @@ -3436,7 +3474,7 @@ version = "6.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6205bd8bb1e454ad2e27422015fb5e4f2bcc7e08fa8f27058670d208324a4d2d" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.7.0", "crossbeam-channel", "filetime", "fsevent-sys", @@ -3628,7 +3666,7 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.7", + "redox_syscall 0.5.8", "smallvec", "windows-targets 0.52.6", ] @@ -3682,7 +3720,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", - "indexmap 2.6.0", + "indexmap 2.7.0", ] [[package]] @@ -3731,29 +3769,29 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.1.7" +version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be57f64e946e500c8ee36ef6331845d40a93055567ec57e8fae13efd33759b95" +checksum = "1e2ec53ad785f4d35dac0adea7f7dc6f1bb277ad84a680c7afefeae05d1f5916" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.7" +version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" +checksum = "d56a66c0c55993aa927429d0f8a0abfd74f084e4d9c192cffed01e418d83eefb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] name = "pin-project-lite" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] name = "pin-utils" @@ -3859,9 +3897,9 @@ dependencies = [ [[package]] name = "predicates" -version = "3.1.2" +version = "3.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e9086cc7640c29a356d1a29fd134380bee9d8f79a17410aa76e7ad295f42c97" +checksum = "a5d19ee57562043d37e82899fade9a22ebab7be9cef5026b07fda9cdd4293573" dependencies = [ "anstyle", "difflib", @@ -3870,15 +3908,15 @@ dependencies = [ [[package]] name = "predicates-core" -version = "1.0.8" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae8177bee8e75d6846599c6b9ff679ed51e882816914eec639944d7c9aa11931" +checksum = "727e462b119fe9c93fd0eb1429a5f7647394014cf3c04ab2c0350eeb09095ffa" [[package]] name = "predicates-tree" -version = "1.0.11" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41b740d195ed3166cd147c8047ec98db0e22ec019eb8eeb76d343b795304fb13" +checksum = "72dd2d6d381dfb73a193c7fca536518d7caee39fc8503f74e7dc0be0531b425c" dependencies = [ "predicates-core", "termtree", @@ -3891,7 +3929,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eea25e07510aa6ab6547308ebe3c036016d162b8da920dbb079e3ba8acf3d95a" dependencies = [ "csv", - "encode_unicode 1.0.0", + "encode_unicode", "is-terminal", "lazy_static", "term", @@ -3915,9 +3953,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" [[package]] name = "proc-macro2" -version = "1.0.89" +version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e" +checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" dependencies = [ "unicode-ident", ] @@ -3930,7 +3968,7 @@ checksum = "14cae93065090804185d3b75f0bf93b8eeda30c7a9b4a33d3bdb3988d6229e50" dependencies = [ "bit-set", "bit-vec", - "bitflags 2.6.0", + "bitflags 2.7.0", "lazy_static", "num-traits", "rand", @@ -3944,13 +3982,13 @@ dependencies = [ [[package]] name = "proptest-derive" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ff7ff745a347b87471d859a377a9a404361e7efc2a971d73424a6d183c0fc77" +checksum = "4ee1c9ac207483d5e7db4940700de86a9aae46ef90c48b57f99fe7edb8345e49" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] @@ -3970,9 +4008,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.37" +version = "1.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" +checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" dependencies = [ "proc-macro2", ] @@ -4078,11 +4116,11 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f" +checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.7.0", ] [[package]] @@ -4110,7 +4148,7 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.8", + "regex-automata 0.4.9", "regex-syntax 0.8.5", ] @@ -4125,9 +4163,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", @@ -4220,7 +4258,7 @@ dependencies = [ "proc-macro2", "quote", "rust-embed-utils", - "syn 2.0.87", + "syn 2.0.96", "walkdir", ] @@ -4263,15 +4301,96 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.39" +version = "0.38.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "375116bee2be9ed569afe2154ea6a99dfdffd257f533f187498c2a8f5feaf4ee" +checksum = "a78891ee6bf2340288408954ac787aa063d8e8817e9f53abb37c695c6d834ef6" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.7.0", "errno", "libc", "linux-raw-sys", - "windows-sys 0.52.0", + "windows-sys 0.59.0", +] + +[[package]] +name = "rustls" +version = "0.23.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f287924602bf649d949c63dc8ac8b235fa5387d394020705b80c4eb597ce5b8" +dependencies = [ + "log", + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-native-certs" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5bfb394eeed242e909609f56089eecfe5fda225042e8b171791b9c95f5931e5" +dependencies = [ + "openssl-probe", + "rustls-pemfile", + "rustls-pki-types", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pemfile" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2bf47e6ff922db3825eb750c4e2ff784c6ff8fb9e13046ef6a1d1c5401b0b37" + +[[package]] +name = "rustls-platform-verifier" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afbb878bdfdf63a336a5e63561b1835e7a8c91524f51621db870169eac84b490" +dependencies = [ + "core-foundation", + "core-foundation-sys", + "jni", + "log", + "once_cell", + "rustls", + "rustls-native-certs", + "rustls-platform-verifier-android", + "rustls-webpki", + "security-framework", + "security-framework-sys", + "webpki-roots", + "winapi", +] + +[[package]] +name = "rustls-platform-verifier-android" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" + +[[package]] +name = "rustls-webpki" +version = "0.102.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", ] [[package]] @@ -4357,9 +4476,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.18" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" +checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" [[package]] name = "rusty-fork" @@ -4516,7 +4635,7 @@ version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.7.0", "core-foundation", "core-foundation-sys", "libc", @@ -4526,9 +4645,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.12.1" +version = "2.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa39c7303dc58b5543c94d22c1766b0d31f2ee58306363ea622b10bbc075eaa2" +checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" dependencies = [ "core-foundation-sys", "libc", @@ -4536,15 +4655,15 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.23" +version = "1.0.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" +checksum = "3cb6eb87a131f756572d7fb904f6e7b68633f09cca868c5df1c4b8d1a694bbba" [[package]] name = "serde" -version = "1.0.214" +version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f55c3193aca71c12ad7890f1785d2b73e1b9f63a0bbc353c08ef26fe03fc56b5" +checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" dependencies = [ "serde_derive", ] @@ -4585,20 +4704,20 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.214" +version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de523f781f095e28fa605cdce0f8307e451cc0fd14e2eb4cd2e98a355b147766" +checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] name = "serde_json" -version = "1.0.132" +version = "1.0.135" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03" +checksum = "2b0d7ba2887406110130a978386c4e1befb98c674b4fba677954e4db976630d9" dependencies = [ "itoa", "memchr", @@ -4614,7 +4733,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] @@ -4628,15 +4747,15 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.11.0" +version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e28bdad6db2b8340e449f7108f020b3b092e8583a9e3fb82713e1d4e71fe817" +checksum = "d6b6f7f2fcb69f747921f79f3926bd1e203fce4fef62c268dd3abfb6d86029aa" dependencies = [ "base64 0.22.1", "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.6.0", + "indexmap 2.7.0", "serde", "serde_derive", "serde_json", @@ -4646,14 +4765,25 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.11.0" +version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d846214a9854ef724f3da161b426242d8de7c1fc7de2f89bb1efcb154dca79d" +checksum = "8d00caa5193a3c8362ac2b73be6b9e768aa5a4b2f721d8f4b339600c3cb51f8e" dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", ] [[package]] @@ -4799,9 +4929,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" dependencies = [ "libc", "windows-sys 0.52.0", @@ -4902,9 +5032,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "symbolic-common" -version = "12.12.1" +version = "12.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d4d73159efebfb389d819fd479afb2dbd57dcb3e3f4b7fcfa0e675f5a46c1cb" +checksum = "8150eae9699e3c73a3e6431dc1f80d87748797c0457336af23e94c1de619ed24" dependencies = [ "debugid", "memmap2", @@ -4914,9 +5044,9 @@ dependencies = [ [[package]] name = "symbolic-demangle" -version = "12.12.1" +version = "12.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a767859f6549c665011970874c3f541838b4835d5aaaa493d3ee383918be9f10" +checksum = "95f4a9846f7a8933b6d198c022faa2c9bd89e1a970bed9d9a98d25708bf8de17" dependencies = [ "cpp_demangle", "rustc-demangle", @@ -4936,9 +5066,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.87" +version = "2.0.96" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" +checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80" dependencies = [ "proc-macro2", "quote", @@ -4953,7 +5083,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] @@ -4964,12 +5094,13 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.14.0" +version = "3.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" +checksum = "9a8a559c81686f576e8cd0290cd2a24a2a9ad80c98b3478856500fcbd7acd704" dependencies = [ "cfg-if", "fastrand", + "getrandom", "once_cell", "rustix", "windows-sys 0.59.0", @@ -5009,9 +5140,9 @@ dependencies = [ [[package]] name = "termtree" -version = "0.4.1" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" +checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683" [[package]] name = "test-case" @@ -5031,7 +5162,7 @@ dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] @@ -5042,7 +5173,7 @@ checksum = "5c89e72a01ed4c579669add59014b9a524d609c0c88c6a585ce37485879f6ffb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", "test-case-core", ] @@ -5069,22 +5200,22 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.68" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02dd99dc800bbb97186339685293e1cc5d9df1f8fae2d0aecd9ff1c77efea892" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.68" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7c61ec9a6f64d2793d8a45faba21efbe3ced62a886d44c36a009b2b519b4c7e" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] @@ -5099,9 +5230,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.36" +version = "0.3.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" dependencies = [ "deranged", "itoa", @@ -5120,9 +5251,9 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.18" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" dependencies = [ "num-conv", "time-core", @@ -5150,14 +5281,14 @@ dependencies = [ [[package]] name = "tokio" -version = "1.42.0" +version = "1.43.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cec9b21b0450273377fc97bd4c33a8acffc8c996c987a7c5b319a0083707551" +checksum = "3d61fa4ffa3de412bfea335c6ecff681de2b609ba3c77ef3e00e521813a9ed9e" dependencies = [ "backtrace", "bytes", "libc", - "mio 1.0.2", + "mio 1.0.3", "pin-project-lite", "socket2", "tokio-macros", @@ -5166,13 +5297,13 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" +checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] @@ -5191,17 +5322,16 @@ version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" dependencies = [ - "futures-core", - "pin-project-lite", + "rustls", "tokio", "tokio-util", ] [[package]] name = "tokio-util" -version = "0.7.12" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" +checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" dependencies = [ "bytes", "futures-core", @@ -5238,7 +5368,7 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.6.0", + "indexmap 2.7.0", "serde", "serde_spanned", "toml_datetime", @@ -5251,9 +5381,9 @@ version = "0.22.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" dependencies = [ - "indexmap 2.6.0", + "indexmap 2.7.0", "toml_datetime", - "winnow 0.6.20", + "winnow 0.6.24", ] [[package]] @@ -5285,9 +5415,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "log", "pin-project-lite", @@ -5309,20 +5439,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", "valuable", @@ -5330,9 +5460,9 @@ dependencies = [ [[package]] name = "tracing-error" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d686ec1c0f384b1277f097b2f279a2ecc11afe8c133c1aabf036a27cb4cd206e" +checksum = "8b1581020d7a273442f5b45074a6a57d5757ad0a47dac0e9f0bd57b81936f3db" dependencies = [ "tracing", "tracing-subscriber", @@ -5351,9 +5481,9 @@ dependencies = [ [[package]] name = "tracing-serde" -version = "0.1.3" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" +checksum = "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1" dependencies = [ "serde", "tracing-core", @@ -5361,9 +5491,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" dependencies = [ "matchers", "nu-ansi-term", @@ -5398,7 +5528,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04659ddb06c87d233c566112c1c9c5b9e98256d9af50ec3bc9c8327f873a7568" dependencies = [ "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] @@ -5443,9 +5573,9 @@ checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" [[package]] name = "unicode-ident" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" +checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" [[package]] name = "unicode-linebreak" @@ -5509,9 +5639,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" +checksum = "744018581f9a3454a9e15beb8a33b017183f1e7c0cd170232a2d1453b23a51c4" [[package]] name = "valuable" @@ -5607,7 +5737,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", "wasm-bindgen-shared", ] @@ -5641,7 +5771,7 @@ checksum = "e128beba882dd1eb6200e1dc92ae6c5dbaa4311aa7bb211ca035779e5efc39f8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5894,9 +6024,9 @@ dependencies = [ [[package]] name = "winnow" -version = "0.6.20" +version = "0.6.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" +checksum = "c8d71a593cc5c42ad7876e2c1fda56f314f3754c084128833e64f1345ff8a03a" dependencies = [ "memchr", ] @@ -5924,9 +6054,9 @@ dependencies = [ [[package]] name = "yoke" -version = "0.7.4" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c5b1314b079b0930c31e3af543d8ee1757b1951ae1e1565ec704403a7240ca5" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" dependencies = [ "serde", "stable_deref_trait", @@ -5936,13 +6066,13 @@ dependencies = [ [[package]] name = "yoke-derive" -version = "0.7.4" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", "synstructure", ] @@ -5964,27 +6094,27 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] name = "zerofrom" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ec111ce797d0e0784a1116d0ddcdbea84322cd79e5d5ad173daeba4f93ab55" +checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", "synstructure", ] @@ -6005,7 +6135,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] @@ -6027,7 +6157,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.96", ] [[package]] diff --git a/noir/noir-repo/Cargo.toml b/noir/noir-repo/Cargo.toml index 567f1db085b..58ca7665c0c 100644 --- a/noir/noir-repo/Cargo.toml +++ b/noir/noir-repo/Cargo.toml @@ -19,10 +19,12 @@ members = [ "tooling/nargo_cli", "tooling/nargo_toml", "tooling/noirc_artifacts", + "tooling/noirc_artifacts_info", "tooling/noirc_abi", "tooling/noirc_abi_wasm", "tooling/acvm_cli", "tooling/profiler", + "tooling/inspector", # ACVM "acvm-repo/acir_field", "acvm-repo/acir", @@ -35,7 +37,12 @@ members = [ # Utility crates "utils/iter-extended", ] -default-members = ["tooling/nargo_cli", "tooling/acvm_cli", "tooling/profiler"] +default-members = [ + "tooling/nargo_cli", + "tooling/acvm_cli", + "tooling/profiler", + "tooling/inspector", +] resolver = "2" [workspace.package] @@ -83,6 +90,7 @@ noir_lsp = { path = "tooling/lsp" } noir_debugger = { path = "tooling/debugger" } noirc_abi = { path = "tooling/noirc_abi" } noirc_artifacts = { path = "tooling/noirc_artifacts" } +noirc_artifacts_info = { path = "tooling/noirc_artifacts_info" } # Arkworks ark-bn254 = { version = "^0.5.0", default-features = false, features = [ diff --git a/noir/noir-repo/compiler/fm/src/file_map.rs b/noir/noir-repo/compiler/fm/src/file_map.rs index 857c7460fb9..f078ecb8545 100644 --- a/noir/noir-repo/compiler/fm/src/file_map.rs +++ b/noir/noir-repo/compiler/fm/src/file_map.rs @@ -19,6 +19,10 @@ impl PathString { pub fn from_path(p: PathBuf) -> Self { PathString(p) } + + pub fn into_path_buf(self) -> PathBuf { + self.0 + } } impl From for PathString { fn from(pb: PathBuf) -> PathString { @@ -82,7 +86,7 @@ impl FileMap { } pub fn get_name(&self, file_id: FileId) -> Result { - let name = self.files.get(file_id.as_usize())?.name().clone(); + let name = self.get_absolute_name(file_id)?; // See if we can make the file name a bit shorter/easier to read if it starts with the current directory if let Some(current_dir) = &self.current_dir { @@ -93,6 +97,11 @@ impl FileMap { Ok(name) } + + pub fn get_absolute_name(&self, file_id: FileId) -> Result { + let name = self.files.get(file_id.as_usize())?.name().clone(); + Ok(name) + } } impl Default for FileMap { fn default() -> Self { diff --git a/noir/noir-repo/compiler/noirc_driver/src/abi_gen.rs b/noir/noir-repo/compiler/noirc_driver/src/abi_gen.rs index 625a35c8d15..59b3faf1a4e 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/abi_gen.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/abi_gen.rs @@ -110,7 +110,7 @@ pub(super) fn abi_type_from_hir_type(context: &Context, typ: &Type) -> AbiType { AbiType::String { length: size } } - Type::Struct(def, args) => { + Type::DataType(def, args) => { let struct_type = def.borrow(); let fields = struct_type.get_fields(args); let fields = diff --git a/noir/noir-repo/compiler/noirc_driver/src/lib.rs b/noir/noir-repo/compiler/noirc_driver/src/lib.rs index a7e7e2d4e2f..be5cde1e0ea 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/lib.rs @@ -549,7 +549,7 @@ fn compile_contract_inner( let structs = structs .into_iter() .map(|struct_id| { - let typ = context.def_interner.get_struct(struct_id); + let typ = context.def_interner.get_type(struct_id); let typ = typ.borrow(); let fields = vecmap(typ.get_fields(&[]), |(name, typ)| { (name, abi_type_from_hir_type(context, &typ)) diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/acir/generated_acir.rs b/noir/noir-repo/compiler/noirc_evaluator/src/acir/generated_acir.rs index 14ceac62461..8b29888a7ad 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/acir/generated_acir.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/acir/generated_acir.rs @@ -362,12 +362,17 @@ impl GeneratedAcir { bit_size: u32, ) -> Result, RuntimeError> { let radix_big = BigUint::from(radix); + let radix_range = BigUint::from(2u128)..=BigUint::from(256u128); + assert!( + radix_range.contains(&radix_big), + "ICE: Radix must be in the range 2..=256, but found: {:?}", + radix + ); assert_eq!( BigUint::from(2u128).pow(bit_size), radix_big, "ICE: Radix must be a power of 2" ); - let limb_witnesses = self.brillig_to_radix(input_expr, radix, limb_count); let mut composed_limbs = Expression::default(); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/acir/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/acir/mod.rs index e9e887e6bac..6789cbafb76 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/acir/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/acir/mod.rs @@ -1069,8 +1069,7 @@ impl<'a> Context<'a> { // Ensure that array id is fully resolved. let array = dfg.resolve(array); - let array_id = dfg.resolve(array); - let array_typ = dfg.type_of_value(array_id); + let array_typ = dfg.type_of_value(array); // Compiler sanity checks assert!(!array_typ.is_nested_slice(), "ICE: Nested slice type has reached ACIR generation"); let (Type::Array(_, _) | Type::Slice(_)) = &array_typ else { @@ -1125,15 +1124,7 @@ impl<'a> Context<'a> { index: ValueId, store_value: Option, ) -> Result { - let array_id = dfg.resolve(array); - let array_typ = dfg.type_of_value(array_id); - // Compiler sanity checks - assert!(!array_typ.is_nested_slice(), "ICE: Nested slice type has reached ACIR generation"); - let (Type::Array(_, _) | Type::Slice(_)) = &array_typ else { - unreachable!("ICE: expected array or slice type"); - }; - - match self.convert_value(array_id, dfg) { + match self.convert_value(array, dfg) { AcirValue::Var(acir_var, _) => { Err(RuntimeError::InternalError(InternalError::Unexpected { expected: "an array value".to_string(), @@ -2231,45 +2222,41 @@ impl<'a> Context<'a> { Intrinsic::AsSlice => { let slice_contents = arguments[0]; let slice_typ = dfg.type_of_value(slice_contents); - let block_id = self.ensure_array_is_initialized(slice_contents, dfg)?; assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); - let result_block_id = self.block_id(&result_ids[1]); let acir_value = self.convert_value(slice_contents, dfg); + let (slice_length, result) = match acir_value { + AcirValue::Var(_, _) => { + unreachable!("ICE: cannot call `as_slice` on non-array type") + } + array @ AcirValue::Array(_) => { + let array_len = if !slice_typ.contains_slice_element() { + slice_typ.flattened_size() as usize + } else { + self.flattened_slice_size(slice_contents, dfg) + }; + (array_len, array) + } + AcirValue::DynamicArray(source_array) => { + let result_block_id = self.block_id(&result_ids[1]); + self.copy_dynamic_array( + source_array.block_id, + result_block_id, + source_array.len, + )?; - let array_len = if !slice_typ.contains_slice_element() { - slice_typ.flattened_size() as usize - } else { - self.flattened_slice_size(slice_contents, dfg) - }; - let slice_length = self.acir_context.add_constant(array_len); - self.copy_dynamic_array(block_id, result_block_id, array_len)?; + let array = AcirValue::DynamicArray(AcirDynamicArray { + block_id: result_block_id, + len: source_array.len, + value_types: source_array.value_types, + element_type_sizes: source_array.element_type_sizes, + }); - let element_type_sizes = if !can_omit_element_sizes_array(&slice_typ) { - Some(self.init_element_type_sizes_array( - &slice_typ, - slice_contents, - Some(&acir_value), - dfg, - )?) - } else { - None + (source_array.len, array) + } }; - let value_types = self.convert_value(slice_contents, dfg).flat_numeric_types(); - assert!( - array_len == value_types.len(), - "AsSlice: unexpected length difference: {:?} != {:?}", - array_len, - value_types.len() - ); - - let result = AcirValue::DynamicArray(AcirDynamicArray { - block_id: result_block_id, - len: value_types.len(), - value_types, - element_type_sizes, - }); + let slice_length = self.acir_context.add_constant(slice_length); Ok(vec![AcirValue::Var(slice_length, AcirType::field()), result]) } Intrinsic::SlicePushBack => { @@ -3709,4 +3696,35 @@ mod test { } } } + + #[test] + fn does_not_generate_memory_blocks_without_dynamic_accesses() { + let src = " + acir(inline) fn main f0 { + b0(v0: [Field; 2]): + v2, v3 = call as_slice(v0) -> (u32, [Field]) + call f1(u32 2, v3) + v7 = array_get v0, index u32 0 -> Field + constrain v7 == Field 0 + return + } + + brillig(inline) fn foo f1 { + b0(v0: u32, v1: [Field]): + return + } + "; + let ssa = Ssa::from_str(src).unwrap(); + let brillig = ssa.to_brillig(false); + + let (acir_functions, _brillig_functions, _, _) = ssa + .into_acir(&brillig, ExpressionWidth::default()) + .expect("Should compile manually written SSA into ACIR"); + + assert_eq!(acir_functions.len(), 1); + + // Check that no memory opcodes were emitted. + let main = &acir_functions[0]; + assert!(!main.opcodes().iter().any(|opcode| matches!(opcode, Opcode::MemoryOp { .. }))); + } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen.rs index b51a3445a1b..a6117a8f2da 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen.rs @@ -63,6 +63,7 @@ pub(crate) fn gen_brillig_for( FunctionContext::return_values(func), func.id(), true, + brillig.globals_memory_size, ); entry_point.name = func.name().to_string(); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 698d4cd05be..97de1aea8c7 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -142,7 +142,7 @@ impl<'block, Registers: RegisterAllocator> BrilligBlock<'block, Registers> { /// Making the assumption that the block ID passed in belongs to this /// function. fn create_block_label_for_current_function(&self, block_id: BasicBlockId) -> Label { - Self::create_block_label(self.function_context.function_id, block_id) + Self::create_block_label(self.function_context.function_id(), block_id) } /// Creates a unique label for a block using the function Id and the block ID. /// @@ -1344,8 +1344,8 @@ impl<'block, Registers: RegisterAllocator> BrilligBlock<'block, Registers> { result_variable: SingleAddrVariable, ) { let binary_type = type_of_binary_operation( - dfg[binary.lhs].get_type().as_ref(), - dfg[binary.rhs].get_type().as_ref(), + dfg[dfg.resolve(binary.lhs)].get_type().as_ref(), + dfg[dfg.resolve(binary.rhs)].get_type().as_ref(), binary.operator, ); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs index 3dea7b3e7f5..6e406e2b3cb 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs @@ -17,8 +17,11 @@ use fxhash::FxHashMap as HashMap; use super::{constant_allocation::ConstantAllocation, variable_liveness::VariableLiveness}; +#[derive(Default)] pub(crate) struct FunctionContext { - pub(crate) function_id: FunctionId, + /// A `FunctionContext` is necessary for using a Brillig block's code gen, but sometimes + /// such as with globals, we are not within a function and do not have a function id. + function_id: Option, /// Map from SSA values its allocation. Since values can be only defined once in SSA form, we insert them here on when we allocate them at their definition. pub(crate) ssa_value_allocations: HashMap, /// The block ids of the function in reverse post order. @@ -42,7 +45,7 @@ impl FunctionContext { let liveness = VariableLiveness::from_function(function, &constants); Self { - function_id: id, + function_id: Some(id), ssa_value_allocations: HashMap::default(), blocks: reverse_post_order, liveness, @@ -50,6 +53,10 @@ impl FunctionContext { } } + pub(crate) fn function_id(&self) -> FunctionId { + self.function_id.expect("ICE: function_id should already be set") + } + pub(crate) fn ssa_type_to_parameter(typ: &Type) -> BrilligParameter { match typ { Type::Numeric(_) | Type::Reference(_) => { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_globals.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_globals.rs index 99c8ee0fded..9f9d271283d 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_globals.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_globals.rs @@ -1,22 +1,23 @@ use acvm::FieldElement; use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; -use super::{ - BrilligArtifact, BrilligBlock, BrilligVariable, Function, FunctionContext, Label, ValueId, +use super::{BrilligArtifact, BrilligBlock, BrilligVariable, FunctionContext, Label, ValueId}; +use crate::{ + brillig::{brillig_ir::BrilligContext, DataFlowGraph}, + ssa::ir::dfg::GlobalsGraph, }; -use crate::brillig::{brillig_ir::BrilligContext, DataFlowGraph}; pub(crate) fn convert_ssa_globals( enable_debug_trace: bool, - globals: &Function, + globals: GlobalsGraph, used_globals: &HashSet, -) -> (BrilligArtifact, HashMap) { +) -> (BrilligArtifact, HashMap, usize) { let mut brillig_context = BrilligContext::new_for_global_init(enable_debug_trace); // The global space does not have globals itself let empty_globals = HashMap::default(); // We can use any ID here as this context is only going to be used for globals which does not differentiate // by functions and blocks. The only Label that should be used in the globals context is `Label::globals_init()` - let mut function_context = FunctionContext::new(globals); + let mut function_context = FunctionContext::default(); brillig_context.enter_context(Label::globals_init()); let block_id = DataFlowGraph::default().make_block(); @@ -30,10 +31,13 @@ pub(crate) fn convert_ssa_globals( building_globals: true, }; - brillig_block.compile_globals(&globals.dfg, used_globals); + let globals_dfg = DataFlowGraph::from(globals); + brillig_block.compile_globals(&globals_dfg, used_globals); + + let globals_size = brillig_block.brillig_context.global_space_size(); brillig_context.return_instruction(); let artifact = brillig_context.artifact(); - (artifact, function_context.ssa_value_allocations) + (artifact, function_context.ssa_value_allocations, globals_size) } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir.rs index 06f61948337..ad09f73e90f 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir.rs @@ -95,6 +95,8 @@ pub(crate) struct BrilligContext { /// Whether this context can call procedures or not. /// This is used to prevent a procedure from calling another procedure. can_call_procedures: bool, + + globals_memory_size: Option, } /// Regular brillig context to codegen user defined functions @@ -108,6 +110,7 @@ impl BrilligContext { next_section: 1, debug_show: DebugShow::new(enable_debug_trace), can_call_procedures: true, + globals_memory_size: None, } } } @@ -211,6 +214,7 @@ impl BrilligContext { next_section: 1, debug_show: DebugShow::new(enable_debug_trace), can_call_procedures: false, + globals_memory_size: None, } } } @@ -226,8 +230,14 @@ impl BrilligContext { next_section: 1, debug_show: DebugShow::new(enable_debug_trace), can_call_procedures: false, + globals_memory_size: None, } } + + pub(crate) fn global_space_size(&self) -> usize { + // `GlobalSpace::start()` is inclusive so we must add one to get the accurate total global memory size + (self.registers.max_memory_address() + 1) - GlobalSpace::start() + } } impl BrilligContext { @@ -321,6 +331,7 @@ pub(crate) mod tests { returns, FunctionId::test_new(0), false, + 0, ); entry_point_artifact.link_with(&artifact); while let Some(unresolved_fn_label) = entry_point_artifact.first_unresolved_function_call() diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs index b84a15db4ad..ac2c2fbab7a 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs @@ -24,9 +24,12 @@ impl BrilligContext { return_parameters: Vec, target_function: FunctionId, globals_init: bool, + globals_memory_size: usize, ) -> BrilligArtifact { let mut context = BrilligContext::new(false); + context.globals_memory_size = Some(globals_memory_size); + context.codegen_entry_point(&arguments, &return_parameters); if globals_init { @@ -39,12 +42,15 @@ impl BrilligContext { context.artifact() } - fn calldata_start_offset() -> usize { - ReservedRegisters::len() + MAX_STACK_SIZE + MAX_SCRATCH_SPACE + MAX_GLOBAL_SPACE + fn calldata_start_offset(&self) -> usize { + ReservedRegisters::len() + + MAX_STACK_SIZE + + MAX_SCRATCH_SPACE + + self.globals_memory_size.expect("The memory size of globals should be set") } - fn return_data_start_offset(calldata_size: usize) -> usize { - Self::calldata_start_offset() + calldata_size + fn return_data_start_offset(&self, calldata_size: usize) -> usize { + self.calldata_start_offset() + calldata_size } /// Adds the instructions needed to handle entry point parameters @@ -70,7 +76,7 @@ impl BrilligContext { // Set initial value of free memory pointer: calldata_start_offset + calldata_size + return_data_size self.const_instruction( SingleAddrVariable::new_usize(ReservedRegisters::free_memory_pointer()), - (Self::calldata_start_offset() + calldata_size + return_data_size).into(), + (self.calldata_start_offset() + calldata_size + return_data_size).into(), ); // Set initial value of stack pointer: ReservedRegisters.len() @@ -82,7 +88,7 @@ impl BrilligContext { // Copy calldata self.copy_and_cast_calldata(arguments); - let mut current_calldata_pointer = Self::calldata_start_offset(); + let mut current_calldata_pointer = self.calldata_start_offset(); // Initialize the variables with the calldata for (argument_variable, argument) in argument_variables.iter_mut().zip(arguments) { @@ -158,7 +164,7 @@ impl BrilligContext { fn copy_and_cast_calldata(&mut self, arguments: &[BrilligParameter]) { let calldata_size = Self::flattened_tuple_size(arguments); self.calldata_copy_instruction( - MemoryAddress::direct(Self::calldata_start_offset()), + MemoryAddress::direct(self.calldata_start_offset()), calldata_size, 0, ); @@ -178,11 +184,11 @@ impl BrilligContext { if bit_size < F::max_num_bits() { self.cast_instruction( SingleAddrVariable::new( - MemoryAddress::direct(Self::calldata_start_offset() + i), + MemoryAddress::direct(self.calldata_start_offset() + i), bit_size, ), SingleAddrVariable::new_field(MemoryAddress::direct( - Self::calldata_start_offset() + i, + self.calldata_start_offset() + i, )), ); } @@ -336,7 +342,7 @@ impl BrilligContext { let return_data_size = Self::flattened_tuple_size(return_parameters); // Return data has a reserved space after calldata - let return_data_offset = Self::return_data_start_offset(calldata_size); + let return_data_offset = self.return_data_start_offset(calldata_size); let mut return_data_index = return_data_offset; for (return_param, returned_variable) in return_parameters.iter().zip(&returned_variables) { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/registers.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/registers.rs index b83c03b297a..0113d6e7866 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/registers.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/registers.rs @@ -152,16 +152,32 @@ impl RegisterAllocator for ScratchSpace { /// and is read-only. pub(crate) struct GlobalSpace { storage: DeallocationListAllocator, + max_memory_address: usize, } impl GlobalSpace { - pub(crate) fn new() -> Self { - Self { storage: DeallocationListAllocator::new(Self::start()) } + pub(super) fn new() -> Self { + Self { + storage: DeallocationListAllocator::new(Self::start()), + max_memory_address: Self::start(), + } } fn is_within_bounds(register: MemoryAddress) -> bool { let index = register.unwrap_direct(); - index >= Self::start() && index < Self::end() + index >= Self::start() + } + + fn update_max_address(&mut self, register: MemoryAddress) { + let index = register.unwrap_direct(); + assert!(index >= Self::start(), "Global space malformed"); + if index > self.max_memory_address { + self.max_memory_address = index; + } + } + + pub(super) fn max_memory_address(&self) -> usize { + self.max_memory_address } } @@ -171,12 +187,12 @@ impl RegisterAllocator for GlobalSpace { } fn end() -> usize { - Self::start() + MAX_GLOBAL_SPACE + unreachable!("The global space is set by the program"); } fn allocate_register(&mut self) -> MemoryAddress { let allocated = MemoryAddress::direct(self.storage.allocate_register()); - assert!(Self::is_within_bounds(allocated), "Global space too deep"); + self.update_max_address(allocated); allocated } @@ -185,7 +201,7 @@ impl RegisterAllocator for GlobalSpace { } fn ensure_register_is_allocated(&mut self, register: MemoryAddress) { - assert!(Self::is_within_bounds(register), "Register out of global space bounds"); + self.update_max_address(register); self.storage.ensure_register_is_allocated(register.unwrap_direct()); } @@ -199,6 +215,7 @@ impl RegisterAllocator for GlobalSpace { Self::start(), vecmap(preallocated_registers, |r| r.unwrap_direct()), ), + max_memory_address: Self::start(), } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/mod.rs index 3d96a855aa0..b74c519f61a 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/mod.rs @@ -32,6 +32,7 @@ pub struct Brillig { /// Maps SSA function labels to their brillig artifact ssa_function_to_brillig: HashMap>, globals: BrilligArtifact, + globals_memory_size: usize, } impl Brillig { @@ -84,9 +85,17 @@ impl Ssa { let mut brillig = Brillig::default(); - let (artifact, brillig_globals) = - convert_ssa_globals(enable_debug_trace, &self.globals, &self.used_global_values); + if brillig_reachable_function_ids.is_empty() { + return brillig; + } + + // Globals are computed once at compile time and shared across all functions, + // thus we can just fetch globals from the main function. + let globals = (*self.functions[&self.main_id].dfg.globals).clone(); + let (artifact, brillig_globals, globals_size) = + convert_ssa_globals(enable_debug_trace, globals, &self.used_global_values); brillig.globals = artifact; + brillig.globals_memory_size = globals_size; for brillig_function_id in brillig_reachable_function_ids { let func = &self.functions[&brillig_function_id]; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs index 12ea04daebd..4cefce1d647 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs @@ -152,6 +152,8 @@ fn optimize_all(builder: SsaBuilder, options: &SsaEvaluatorOptions) -> Result Result Result, // Map keeping track of values stored at memory locations memory_slots: HashMap, - // Map of values resulting from array get instructions - // to the actual array values - array_elements: HashMap, - // Map of brillig call ids to sets of the value ids descending + // Value currently affecting every instruction (i.e. being + // considered a parent of every value id met) because + // of its involvement in an EnableSideEffectsIf condition + side_effects_condition: Option, + // Map of Brillig call ids to sets of the value ids descending // from their arguments and results tainted: BTreeMap, + // Map of argument value ids to the Brillig call ids employing them + call_arguments: HashMap>, + // Maintains count of calls being tracked + tracking_count: usize, + // Map of block indices to Brillig call ids that should not be + // followed after meeting them + search_limits: HashMap, } /// Structure keeping track of value ids descending from Brillig calls' @@ -116,8 +128,12 @@ struct BrilligTaintedIds { arguments: HashSet, // Results status results: Vec, - // Initial result value ids + // Indices of the array elements in the results vector + array_elements: HashMap>, + // Initial result value ids, along with element ids for arrays root_results: HashSet, + // The flag signaling that the call should be now tracked + tracking: bool, } #[derive(Clone, Debug)] @@ -128,17 +144,60 @@ enum ResultStatus { } impl BrilligTaintedIds { - fn new(arguments: &[ValueId], results: &[ValueId]) -> Self { + fn new(function: &Function, arguments: &[ValueId], results: &[ValueId]) -> Self { + // Exclude numeric constants + let arguments: Vec = arguments + .iter() + .filter(|value| function.dfg.get_numeric_constant(**value).is_none()) + .copied() + .map(|value| function.dfg.resolve(value)) + .collect(); + let results: Vec = results + .iter() + .filter(|value| function.dfg.get_numeric_constant(**value).is_none()) + .copied() + .map(|value| function.dfg.resolve(value)) + .collect(); + + let mut results_status: Vec = vec![]; + let mut array_elements: HashMap> = HashMap::new(); + + for result in &results { + match function.dfg.try_get_array_length(*result) { + // If the result value is an array, create an empty descendant set for + // every element to be accessed further on and record the indices + // of the resulting sets for future reference + Some(length) => { + array_elements.insert(*result, vec![]); + for _ in 0..length { + array_elements[result].push(results_status.len()); + results_status + .push(ResultStatus::Unconstrained { descendants: HashSet::new() }); + } + } + // Otherwise initialize a descendant set with the current value + None => { + results_status.push(ResultStatus::Unconstrained { + descendants: HashSet::from([*result]), + }); + } + } + } + BrilligTaintedIds { arguments: HashSet::from_iter(arguments.iter().copied()), - results: results - .iter() - .map(|result| ResultStatus::Unconstrained { descendants: HashSet::from([*result]) }) - .collect(), + results: results_status, + array_elements, root_results: HashSet::from_iter(results.iter().copied()), + tracking: false, } } + /// Check if the call being tracked is a simple wrapper of another call + fn is_wrapper(&self, other: &BrilligTaintedIds) -> bool { + other.root_results == self.arguments + } + /// Add children of a given parent to the tainted value set /// (for arguments one set is enough, for results we keep them /// separate as the forthcoming check considers the call covered @@ -147,12 +206,11 @@ impl BrilligTaintedIds { if self.arguments.intersection(parents).next().is_some() { self.arguments.extend(children); } - for result_status in &mut self.results.iter_mut() { - match result_status { + + for result in &mut self.results.iter_mut() { + match result { // Skip updating results already found covered - ResultStatus::Constrained => { - continue; - } + ResultStatus::Constrained => {} ResultStatus::Unconstrained { descendants } => { if descendants.intersection(parents).next().is_some() { descendants.extend(children); @@ -162,6 +220,20 @@ impl BrilligTaintedIds { } } + /// Update children of all the results (helper function for + /// chained Brillig call handling) + fn update_results_children(&mut self, children: &[ValueId]) { + for result in &mut self.results.iter_mut() { + match result { + // Skip updating results already found covered + ResultStatus::Constrained => {} + ResultStatus::Unconstrained { descendants } => { + descendants.extend(children); + } + } + } + } + /// If Brillig call is properly constrained by the given ids, return true fn check_constrained(&self) -> bool { // If every result has now been constrained, @@ -181,9 +253,7 @@ impl BrilligTaintedIds { for (i, result_status) in self.results.iter().enumerate() { match result_status { // Skip checking already covered results - ResultStatus::Constrained => { - continue; - } + ResultStatus::Constrained => {} ResultStatus::Unconstrained { descendants } => { if descendants.intersection(constrained_values).next().is_some() { results_involved.push(i); @@ -205,6 +275,21 @@ impl BrilligTaintedIds { results_involved.iter().for_each(|i| self.results[*i] = ResultStatus::Constrained); } } + + /// When an ArrayGet instruction occurs, place the resulting ValueId into + /// the corresponding sets of the call's array element result values + fn process_array_get(&mut self, array: ValueId, index: usize, element_results: &[ValueId]) { + if let Some(element_indices) = self.array_elements.get(&array) { + if let Some(result_index) = element_indices.get(index) { + if let Some(ResultStatus::Unconstrained { descendants }) = + self.results.get_mut(*result_index) + { + descendants.extend(element_results); + self.root_results.extend(element_results); + } + } + } + } } impl DependencyContext { @@ -231,9 +316,57 @@ impl DependencyContext { ) { trace!("processing instructions of block {} of function {}", block, function.id()); - for instruction in function.dfg[block].instructions() { + // First, gather information on all Brillig calls in the block + // to be able to follow their arguments first appearing in the + // flow graph before the calls themselves + function.dfg[block].instructions().iter().enumerate().for_each( + |(block_index, instruction)| { + if let Instruction::Call { func, arguments } = &function.dfg[*instruction] { + if let Value::Function(callee) = &function.dfg[*func] { + if all_functions[&callee].runtime().is_brillig() { + let results = function.dfg.instruction_results(*instruction); + let current_tainted = + BrilligTaintedIds::new(function, arguments, results); + + // Record arguments/results for each Brillig call for the check. + // + // Do not track Brillig calls acting as simple wrappers over + // another registered Brillig call, update the tainted sets of + // the wrapped call instead + let mut wrapped_call_found = false; + for (_, tainted_call) in self.tainted.iter_mut() { + if current_tainted.is_wrapper(tainted_call) { + tainted_call.update_results_children(results); + wrapped_call_found = true; + break; + } + } + + if !wrapped_call_found { + // Record the current call, remember the argument values involved + self.tainted.insert(*instruction, current_tainted); + arguments.iter().for_each(|value| { + self.call_arguments + .entry(*value) + .or_default() + .push(*instruction); + }); + + // Set the constraint search limit for the call + self.search_limits.insert( + block_index + BRILLIG_CONSTRAINT_SEARCH_DEPTH, + *instruction, + ); + } + } + } + } + }, + ); + + //Then, go over the instructions + for (block_index, instruction) in function.dfg[block].instructions().iter().enumerate() { let mut arguments = Vec::new(); - let mut results = Vec::new(); // Collect non-constant instruction arguments function.dfg[*instruction].for_each_value(|value_id| { @@ -242,137 +375,172 @@ impl DependencyContext { } }); - // Collect non-constant instruction results - for value_id in function.dfg.instruction_results(*instruction).iter() { - if function.dfg.get_numeric_constant(*value_id).is_none() { - results.push(function.dfg.resolve(*value_id)); + // Start tracking calls when their argument value ids first appear, + // or when their instruction id comes up (in case there were + // no non-constant arguments) + for argument in &arguments { + if let Some(calls) = self.call_arguments.get(argument) { + for call in calls { + if let Some(tainted_ids) = self.tainted.get_mut(call) { + tainted_ids.tracking = true; + self.tracking_count += 1; + } + } } } + if let Some(tainted_ids) = self.tainted.get_mut(instruction) { + tainted_ids.tracking = true; + self.tracking_count += 1; + } - // Process instructions - match &function.dfg[*instruction] { - // For memory operations, we have to link up the stored value as a parent - // of one loaded from the same memory slot - Instruction::Store { address, value } => { - self.memory_slots.insert(*address, function.dfg.resolve(*value)); + // Stop tracking calls when their search limit is hit + if let Some(call) = self.search_limits.get(&block_index) { + if let Some(tainted_ids) = self.tainted.get_mut(call) { + tainted_ids.tracking = false; + self.tracking_count -= 1; } - Instruction::Load { address } => { - // Recall the value stored at address as parent for the results - if let Some(value_id) = self.memory_slots.get(address) { - self.update_children(&[*value_id], &results); - } else { - panic!("load instruction {} has attempted to access previously unused memory location", - instruction); + } + + // We can skip over instructions while nothing is being tracked + if self.tracking_count > 0 { + let mut results = Vec::new(); + + // Collect non-constant instruction results + for value_id in function.dfg.instruction_results(*instruction).iter() { + if function.dfg.get_numeric_constant(*value_id).is_none() { + results.push(function.dfg.resolve(*value_id)); } } - // Check the constrain instruction arguments against those - // involved in Brillig calls, remove covered calls - Instruction::Constrain(value_id1, value_id2, _) - | Instruction::ConstrainNotEqual(value_id1, value_id2, _) => { - self.clear_constrained( - &[function.dfg.resolve(*value_id1), function.dfg.resolve(*value_id2)], - function, - ); - } - // Consider range check to also be constraining - Instruction::RangeCheck { value, .. } => { - self.clear_constrained(&[function.dfg.resolve(*value)], function); - } - Instruction::Call { func: func_id, .. } => { - // For functions, we remove the first element of arguments, - // as .for_each_value() used previously also includes func_id - arguments.remove(0); - match &function.dfg[*func_id] { - Value::Intrinsic(intrinsic) => match intrinsic { - Intrinsic::ApplyRangeConstraint | Intrinsic::AssertConstant => { - // Consider these intrinsic arguments constrained - self.clear_constrained(&arguments, function); - } - Intrinsic::AsWitness | Intrinsic::IsUnconstrained => { - // These intrinsics won't affect the dependency graph + match &function.dfg[*instruction] { + // For memory operations, we have to link up the stored value as a parent + // of one loaded from the same memory slot + Instruction::Store { address, value } => { + self.memory_slots.insert(*address, function.dfg.resolve(*value)); + } + Instruction::Load { address } => { + // Recall the value stored at address as parent for the results + if let Some(value_id) = self.memory_slots.get(address) { + self.update_children(&[*value_id], &results); + } else { + panic!("load instruction {} has attempted to access previously unused memory location", + instruction); + } + } + // Record the condition to set as future parent for the following values + Instruction::EnableSideEffectsIf { condition: value } => { + self.side_effects_condition = + match function.dfg.get_numeric_constant(*value) { + None => Some(function.dfg.resolve(*value)), + Some(_) => None, } - Intrinsic::ArrayLen - | Intrinsic::ArrayRefCount - | Intrinsic::ArrayAsStrUnchecked - | Intrinsic::AsSlice - | Intrinsic::BlackBox(..) - | Intrinsic::DerivePedersenGenerators - | Intrinsic::Hint(..) - | Intrinsic::SlicePushBack - | Intrinsic::SlicePushFront - | Intrinsic::SlicePopBack - | Intrinsic::SlicePopFront - | Intrinsic::SliceRefCount - | Intrinsic::SliceInsert - | Intrinsic::SliceRemove - | Intrinsic::StaticAssert - | Intrinsic::StrAsBytes - | Intrinsic::ToBits(..) - | Intrinsic::ToRadix(..) - | Intrinsic::FieldLessThan => { - // Record all the function arguments as parents of the results - self.update_children(&arguments, &results); + } + // Check the constrain instruction arguments against those + // involved in Brillig calls, remove covered calls + Instruction::Constrain(value_id1, value_id2, _) + | Instruction::ConstrainNotEqual(value_id1, value_id2, _) => { + self.clear_constrained( + &[function.dfg.resolve(*value_id1), function.dfg.resolve(*value_id2)], + function, + ); + } + // Consider range check to also be constraining + Instruction::RangeCheck { value, .. } => { + self.clear_constrained(&[function.dfg.resolve(*value)], function); + } + Instruction::Call { func: func_id, .. } => { + // For functions, we remove the first element of arguments, + // as .for_each_value() used previously also includes func_id + arguments.remove(0); + + match &function.dfg[*func_id] { + Value::Intrinsic(intrinsic) => match intrinsic { + Intrinsic::ApplyRangeConstraint | Intrinsic::AssertConstant => { + // Consider these intrinsic arguments constrained + self.clear_constrained(&arguments, function); + } + Intrinsic::AsWitness | Intrinsic::IsUnconstrained => { + // These intrinsics won't affect the dependency graph + } + Intrinsic::ArrayLen + | Intrinsic::ArrayRefCount + | Intrinsic::ArrayAsStrUnchecked + | Intrinsic::AsSlice + | Intrinsic::BlackBox(..) + | Intrinsic::DerivePedersenGenerators + | Intrinsic::Hint(..) + | Intrinsic::SlicePushBack + | Intrinsic::SlicePushFront + | Intrinsic::SlicePopBack + | Intrinsic::SlicePopFront + | Intrinsic::SliceRefCount + | Intrinsic::SliceInsert + | Intrinsic::SliceRemove + | Intrinsic::StaticAssert + | Intrinsic::StrAsBytes + | Intrinsic::ToBits(..) + | Intrinsic::ToRadix(..) + | Intrinsic::FieldLessThan => { + // Record all the function arguments as parents of the results + self.update_children(&arguments, &results); + } + }, + Value::Function(callee) => match all_functions[&callee].runtime() { + // Only update tainted sets for non-Brillig calls, as + // the chained Brillig case should already be covered + RuntimeType::Acir(..) => { + self.update_children(&arguments, &results); + } + RuntimeType::Brillig(..) => {} + }, + Value::ForeignFunction(..) => { + panic!("should not be able to reach foreign function from non-Brillig functions, {func_id} in function {}", function.name()); } - }, - Value::Function(callee) => match all_functions[callee].runtime() { - RuntimeType::Brillig(_) => { - // Record arguments/results for each Brillig call for the check - self.tainted.insert( - *instruction, - BrilligTaintedIds::new(&arguments, &results), + Value::Instruction { .. } + | Value::NumericConstant { .. } + | Value::Param { .. } + | Value::Global(_) => { + panic!( + "calling non-function value with ID {func_id} in function {}", + function.name() ); } - RuntimeType::Acir(..) => { - // Record all the function arguments as parents of the results - self.update_children(&arguments, &results); - } - }, - Value::ForeignFunction(..) => { - panic!("should not be able to reach foreign function from non-Brillig functions, {func_id} in function {}", function.name()); - } - Value::Instruction { .. } - | Value::NumericConstant { .. } - | Value::Param { .. } - | Value::Global(_) => { - panic!( - "calling non-function value with ID {func_id} in function {}", - function.name() - ); } } - } - // For array get operations, we link the resulting values to - // the corresponding array value ids - // (this is required later because for now we consider array elements - // being constrained as valid as the whole arrays being constrained) - Instruction::ArrayGet { array, .. } => { - for result in &results { - self.array_elements.insert(*result, function.dfg.resolve(*array)); + // For array get operations, we check the Brillig calls for + // results involving the array in question, to properly + // populate the array element tainted sets + Instruction::ArrayGet { array, index } => { + self.process_array_get(function, *array, *index, &results); + // Record all the used arguments as parents of the results + self.update_children(&arguments, &results); } - // Record all the used arguments as parents of the results - self.update_children(&arguments, &results); - } - Instruction::ArraySet { .. } - | Instruction::Binary(..) - | Instruction::Cast(..) - | Instruction::IfElse { .. } - | Instruction::Not(..) - | Instruction::Truncate { .. } => { - // Record all the used arguments as parents of the results - self.update_children(&arguments, &results); + Instruction::ArraySet { .. } + | Instruction::Binary(..) + | Instruction::Cast(..) + | Instruction::IfElse { .. } + | Instruction::Not(..) + | Instruction::Truncate { .. } => { + // Record all the used arguments as parents of the results + self.update_children(&arguments, &results); + } + // These instructions won't affect the dependency graph + Instruction::Allocate { .. } + | Instruction::DecrementRc { .. } + | Instruction::IncrementRc { .. } + | Instruction::MakeArray { .. } + | Instruction::Noop => {} } - // These instructions won't affect the dependency graph - Instruction::Allocate { .. } - | Instruction::DecrementRc { .. } - | Instruction::EnableSideEffectsIf { .. } - | Instruction::IncrementRc { .. } - | Instruction::Noop - | Instruction::MakeArray { .. } => {} } } - trace!("Number tainted Brillig calls: {}", self.tainted.len()); + if !self.tainted.is_empty() { + trace!( + "number of Brillig calls in function {} left unchecked: {}", + function, + self.tainted.len() + ); + } } /// Every Brillig call not properly constrained should remain in the tainted set @@ -382,6 +550,7 @@ impl DependencyContext { .tainted .keys() .map(|brillig_call| { + trace!("tainted structure for {}: {:?}", brillig_call, self.tainted[brillig_call]); SsaReport::Bug(InternalBug::UncheckedBrilligCall { call_stack: function.dfg.get_instruction_call_stack(*brillig_call), }) @@ -389,7 +558,7 @@ impl DependencyContext { .collect(); trace!( - "making {} under constrained reports for function {}", + "making {} reports on underconstrained Brillig calls for function {}", warnings.len(), function.name() ); @@ -398,9 +567,17 @@ impl DependencyContext { /// Update sets of value ids that can be traced back to the Brillig calls being tracked fn update_children(&mut self, parents: &[ValueId], children: &[ValueId]) { - let parents: HashSet<_> = HashSet::from_iter(parents.iter().copied()); + let mut parents: HashSet<_> = HashSet::from_iter(parents.iter().copied()); + + // Also include the current EnableSideEffectsIf condition in parents + // (as it would affect every following statement) + self.side_effects_condition.map(|v| parents.insert(v)); + + // Don't update sets for the calls not yet being tracked for (_, tainted_ids) in self.tainted.iter_mut() { - tainted_ids.update_children(&parents, children); + if tainted_ids.tracking { + tainted_ids.update_children(&parents, children); + } } } @@ -408,28 +585,44 @@ impl DependencyContext { /// by given values after recording partial constraints, if so stop tracking them fn clear_constrained(&mut self, constrained_values: &[ValueId], function: &Function) { // Remove numeric constants - let constrained_values = - constrained_values.iter().filter(|v| function.dfg.get_numeric_constant(**v).is_none()); - - // For now, consider array element constraints to be array constraints - // TODO(https://github.com/noir-lang/noir/issues/6698): - // This probably has to be further looked into, to ensure _every_ element - // of an array result of a Brillig call has been constrained let constrained_values: HashSet<_> = constrained_values - .map(|v| { - if let Some(parent_array) = self.array_elements.get(v) { - *parent_array - } else { - *v - } - }) + .iter() + .filter(|v| function.dfg.get_numeric_constant(**v).is_none()) + .copied() .collect(); - self.tainted.iter_mut().for_each(|(_, tainted_ids)| { - tainted_ids.store_partial_constraints(&constrained_values); - }); + // Skip untracked calls + for (_, tainted_ids) in self.tainted.iter_mut() { + if tainted_ids.tracking { + tainted_ids.store_partial_constraints(&constrained_values); + } + } + self.tainted.retain(|_, tainted_ids| !tainted_ids.check_constrained()); } + + /// Process ArrayGet instruction for tracked Brillig calls + fn process_array_get( + &mut self, + function: &Function, + array: ValueId, + index: ValueId, + element_results: &[ValueId], + ) { + use acvm::acir::AcirField; + + // Only allow numeric constant indices + if let Some(value) = function.dfg.get_numeric_constant(index) { + if let Some(index) = value.try_to_u32() { + // Skip untracked calls + for (_, tainted_ids) in self.tainted.iter_mut() { + if tainted_ids.tracking { + tainted_ids.process_array_get(array, index as usize, element_results); + } + } + } + } + } } #[derive(Default)] @@ -499,7 +692,7 @@ impl Context { function: &Function, ) -> Vec { let mut warnings = Vec::new(); - // Find brillig-generated values in the set + // Find Brillig-generated values in the set let intersection = all_brillig_generated_values.intersection(current_set).copied(); // Go through all Brillig outputs in the set @@ -1018,4 +1211,189 @@ mod test { let ssa_level_warnings = ssa.check_for_missing_brillig_constraints(); assert_eq!(ssa_level_warnings.len(), 2); } + + #[test] + #[traced_test] + /// Test EnableSideEffectsIf conditions affecting the dependency graph + /// (SSA a bit convoluted to work around simplification breaking the flow + /// of the parsed test code) + fn test_enable_side_effects_if_affecting_following_statements() { + let program = r#" + acir(inline) fn main f0 { + b0(v0: Field, v1: Field): + v3 = call f1(v0, v1) -> Field + v5 = add v0, v1 + v6 = eq v3, v5 + v7 = add u1 1, u1 0 + enable_side_effects v6 + v8 = add v7, u1 1 + enable_side_effects u1 1 + constrain v8 == u1 2 + return v3 + } + + brillig(inline) fn foo f1 { + b0(v0: Field, v1: Field): + v2 = add v0, v1 + return v2 + } + "#; + + let mut ssa = Ssa::from_str(program).unwrap(); + let ssa_level_warnings = ssa.check_for_missing_brillig_constraints(); + assert_eq!(ssa_level_warnings.len(), 0); + } + + #[test] + #[traced_test] + /// Test call result array elements being underconstrained + fn test_brillig_result_array_missing_element_constraint() { + let program = r#" + acir(inline) fn main f0 { + b0(v0: u32): + v16 = call f1(v0) -> [u32; 3] + v17 = array_get v16, index u32 0 -> u32 + constrain v17 == v0 + v19 = array_get v16, index u32 2 -> u32 + constrain v19 == v0 + return v17 + } + + brillig(inline) fn into_array f1 { + b0(v0: u32): + v4 = make_array [v0, v0, v0] : [u32; 3] + return v4 + } + "#; + + let mut ssa = Ssa::from_str(program).unwrap(); + let ssa_level_warnings = ssa.check_for_missing_brillig_constraints(); + assert_eq!(ssa_level_warnings.len(), 1); + } + + #[test] + #[traced_test] + /// Test call result array elements being constrained properly + fn test_brillig_result_array_all_elements_constrained() { + let program = r#" + acir(inline) fn main f0 { + b0(v0: u32): + v16 = call f1(v0) -> [u32; 3] + v17 = array_get v16, index u32 0 -> u32 + constrain v17 == v0 + v20 = array_get v16, index u32 1 -> u32 + constrain v20 == v0 + v19 = array_get v16, index u32 2 -> u32 + constrain v19 == v0 + return v17 + } + + brillig(inline) fn into_array f1 { + b0(v0: u32): + v4 = make_array [v0, v0, v0] : [u32; 3] + return v4 + } + "#; + + let mut ssa = Ssa::from_str(program).unwrap(); + let ssa_level_warnings = ssa.check_for_missing_brillig_constraints(); + assert_eq!(ssa_level_warnings.len(), 0); + } + + #[test] + #[traced_test] + /// Test chained (wrapper) Brillig calls not producing a false positive + fn test_chained_brillig_calls_constrained() { + /* + struct Animal { + legs: Field, + eyes: u8, + tag: Tag, + } + + struct Tag { + no: Field, + } + + unconstrained fn foo(x: Field) -> Animal { + Animal { + legs: 4, + eyes: 2, + tag: Tag { no: x } + } + } + + unconstrained fn bar(x: Animal) -> Animal { + Animal { + legs: x.legs, + eyes: x.eyes, + tag: Tag { no: x.tag.no + 1 } + } + } + + fn main(x: Field) -> pub Animal { + let dog = bar(foo(x)); + assert(dog.legs == 4); + assert(dog.eyes == 2); + assert(dog.tag.no == x + 1); + + dog + } + */ + + let program = r#" + acir(inline) fn main f0 { + b0(v0: Field): + v27, v28, v29 = call f2(v0) -> (Field, u8, Field) + v30, v31, v32 = call f1(v27, v28, v29) -> (Field, u8, Field) + constrain v30 == Field 4 + constrain v31 == u8 2 + v35 = add v0, Field 1 + constrain v32 == v35 + return v30, v31, v32 + } + + brillig(inline) fn foo f2 { + b0(v0: Field): + return Field 4, u8 2, v0 + } + + brillig(inline) fn bar f1 { + b0(v0: Field, v1: u8, v2: Field): + v7 = add v2, Field 1 + return v0, v1, v7 + } + + "#; + + let mut ssa = Ssa::from_str(program).unwrap(); + let ssa_level_warnings = ssa.check_for_missing_brillig_constraints(); + assert_eq!(ssa_level_warnings.len(), 0); + } + + #[test] + #[traced_test] + /// Test for the argument descendants coming before Brillig calls themselves being + /// registered as such + fn test_brillig_argument_descendants_preceding_call() { + let program = r#" + acir(inline) fn main f0 { + b0(v0: Field, v1: Field): + v3 = add v0, v1 + v5 = call f1(v0, v1) -> Field + constrain v3 == v5 + return v3 + } + + brillig(inline) fn foo f1 { + b0(v0: Field, v1: Field): + v2 = add v0, v1 + return v2 + } + "#; + + let mut ssa = Ssa::from_str(program).unwrap(); + let ssa_level_warnings = ssa.check_for_missing_brillig_constraints(); + assert_eq!(ssa_level_warnings.len(), 0); + } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index 83b8f2a57ff..8e87db15caf 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -109,6 +109,7 @@ pub(crate) struct DataFlowGraph { /// The GlobalsGraph contains the actual global data. /// Global data is expected to only be numeric constants or array constants (which are represented by Instruction::MakeArray). /// The global's data will shared across functions and should be accessible inside of a function's DataFlowGraph. +#[serde_as] #[derive(Debug, Clone, Serialize, Deserialize, Default)] pub(crate) struct GlobalsGraph { /// Storage for all of the global values @@ -116,11 +117,20 @@ pub(crate) struct GlobalsGraph { /// All of the instructions in the global value space. /// These are expected to all be Instruction::MakeArray instructions: DenseMap, + #[serde_as(as = "HashMap")] + results: HashMap>, + #[serde(skip)] + constants: HashMap<(FieldElement, NumericType), ValueId>, } impl GlobalsGraph { pub(crate) fn from_dfg(dfg: DataFlowGraph) -> Self { - Self { values: dfg.values, instructions: dfg.instructions } + Self { + values: dfg.values, + instructions: dfg.instructions, + results: dfg.results, + constants: dfg.constants, + } } /// Iterate over every Value in this DFG in no particular order, including unused Values @@ -129,6 +139,17 @@ impl GlobalsGraph { } } +impl From for DataFlowGraph { + fn from(value: GlobalsGraph) -> Self { + DataFlowGraph { + values: value.values, + instructions: value.instructions, + results: value.results, + ..Default::default() + } + } +} + impl DataFlowGraph { /// Runtime type of the function. pub(crate) fn runtime(&self) -> RuntimeType { @@ -386,6 +407,9 @@ impl DataFlowGraph { if let Some(id) = self.constants.get(&(constant, typ)) { return *id; } + if let Some(id) = self.globals.constants.get(&(constant, typ)) { + return *id; + } let id = self.values.insert(Value::NumericConstant { constant, typ }); self.constants.insert((constant, typ), id); id @@ -484,7 +508,7 @@ impl DataFlowGraph { /// Should `value` be a numeric constant then this function will return the exact number of bits required, /// otherwise it will return the minimum number of bits based on type information. pub(crate) fn get_value_max_num_bits(&self, value: ValueId) -> u32 { - match self[value] { + match self[self.resolve(value)] { Value::Instruction { instruction, .. } => { let value_bit_size = self.type_of_value(value).bit_size(); if let Instruction::Cast(original_value, _) = self[instruction] { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function.rs index b21a84d16dc..516cd8e318e 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function.rs @@ -143,10 +143,7 @@ impl Function { } pub(crate) fn is_no_predicates(&self) -> bool { - match self.runtime() { - RuntimeType::Acir(inline_type) => matches!(inline_type, InlineType::NoPredicates), - RuntimeType::Brillig(_) => false, - } + self.runtime().is_no_predicates() } /// Retrieves the entry block of a function. diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 992c633ffcd..6ee7aa0192c 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -650,7 +650,12 @@ fn constant_to_radix( ) -> SimplifyResult { let bit_size = u32::BITS - (radix - 1).leading_zeros(); let radix_big = BigUint::from(radix); - assert_eq!(BigUint::from(2u128).pow(bit_size), radix_big, "ICE: Radix must be a power of 2"); + let radix_range = BigUint::from(2u128)..=BigUint::from(256u128); + if !radix_range.contains(&radix_big) || BigUint::from(2u128).pow(bit_size) != radix_big { + // NOTE: expect an error to be thrown later in + // acir::generated_acir::radix_le_decompose + return SimplifyResult::None; + } let big_integer = BigUint::from_bytes_be(&field.to_be_bytes()); // Decompose the integer into its radix digits in little endian form. diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs index 88bee0799a3..e9c465d264f 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs @@ -20,13 +20,16 @@ use super::{ impl Display for Ssa { fn fmt(&self, f: &mut Formatter<'_>) -> Result { - for (id, global_value) in self.globals.dfg.values_iter() { + let globals = (*self.functions[&self.main_id].dfg.globals).clone(); + let globals_dfg = DataFlowGraph::from(globals); + + for (id, global_value) in globals_dfg.values_iter() { match global_value { Value::NumericConstant { constant, typ } => { writeln!(f, "g{} = {typ} {constant}", id.to_u32())?; } Value::Instruction { instruction, .. } => { - display_instruction(&self.globals.dfg, *instruction, true, f)?; + display_instruction(&globals_dfg, *instruction, true, f)?; } Value::Global(_) => { panic!("Value::Global should only be in the function dfg"); @@ -35,7 +38,7 @@ impl Display for Ssa { }; } - if self.globals.dfg.values_iter().next().is_some() { + if globals_dfg.values_iter().next().is_some() { writeln!(f)?; } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs index 3b5537aceb4..a8f0659f8db 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs @@ -36,11 +36,12 @@ impl Ssa { .flat_map(|(_, func)| func.dead_instruction_elimination(true, flattened)) .collect(); + let globals = &self.functions[&self.main_id].dfg.globals; // Check which globals are used across all functions - for (id, value) in self.globals.dfg.values_iter().rev() { + for (id, value) in globals.values_iter().rev() { if used_global_values.contains(&id) { if let Value::Instruction { instruction, .. } = &value { - let instruction = &self.globals.dfg[*instruction]; + let instruction = &globals[*instruction]; instruction.for_each_value(|value_id| { used_global_values.insert(value_id); }); @@ -143,14 +144,16 @@ impl Context { let block = &function.dfg[block_id]; self.mark_terminator_values_as_used(function, block); - let instructions_len = block.instructions().len(); - let mut rc_tracker = RcTracker::default(); + rc_tracker.mark_terminator_arrays_as_used(function, block); + + let instructions_len = block.instructions().len(); // Indexes of instructions that might be out of bounds. // We'll remove those, but before that we'll insert bounds checks for them. let mut possible_index_out_of_bounds_indexes = Vec::new(); + // Going in reverse so we know if a result of an instruction was used. for (instruction_index, instruction_id) in block.instructions().iter().rev().enumerate() { let instruction = &function.dfg[*instruction_id]; @@ -240,6 +243,8 @@ impl Context { } } + /// Go through the RC instructions collected when we figured out which values were unused; + /// for each RC that refers to an unused value, remove the RC as well. fn remove_rc_instructions(&self, dfg: &mut DataFlowGraph) { let unused_rc_values_by_block: HashMap> = self.rc_instructions.iter().fold(HashMap::default(), |mut acc, (rc, block)| { @@ -579,10 +584,12 @@ struct RcTracker { // with the same value but no array set in between. // If we see an inc/dec RC pair within a block we can safely remove both instructions. rcs_with_possible_pairs: HashMap>, + // Tracks repeated RC instructions: if there are two `inc_rc` for the same value in a row, the 2nd one is redundant. rc_pairs_to_remove: HashSet, // We also separately track all IncrementRc instructions and all array types which have been mutably borrowed. // If an array is the same type as one of those non-mutated array types, we can safely remove all IncrementRc instructions on that array. inc_rcs: HashMap>, + // When tracking mutations we consider arrays with the same type as all being possibly mutated. mutated_array_types: HashSet, // The SSA often creates patterns where after simplifications we end up with repeat // IncrementRc instructions on the same value. We track whether the previous instruction was an IncrementRc, @@ -592,9 +599,19 @@ struct RcTracker { } impl RcTracker { + fn mark_terminator_arrays_as_used(&mut self, function: &Function, block: &BasicBlock) { + block.unwrap_terminator().for_each_value(|value| { + let typ = function.dfg.type_of_value(value); + if matches!(&typ, Type::Array(_, _) | Type::Slice(_)) { + self.mutated_array_types.insert(typ); + } + }); + } + fn track_inc_rcs_to_remove(&mut self, instruction_id: InstructionId, function: &Function) { let instruction = &function.dfg[instruction_id]; + // Deduplicate IncRC instructions. if let Instruction::IncrementRc { value } = instruction { if let Some(previous_value) = self.previous_inc_rc { if previous_value == *value { @@ -603,6 +620,7 @@ impl RcTracker { } self.previous_inc_rc = Some(*value); } else { + // Reset the deduplication. self.previous_inc_rc = None; } @@ -610,6 +628,8 @@ impl RcTracker { // when we see a DecrementRc and check whether it was possibly mutated when we see an IncrementRc. match instruction { Instruction::IncrementRc { value } => { + // Get any RC instruction recorded further down the block for this array; + // if it exists and not marked as mutated, then both RCs can be removed. if let Some(inc_rc) = pop_rc_for(*value, function, &mut self.rcs_with_possible_pairs) { @@ -618,7 +638,7 @@ impl RcTracker { self.rc_pairs_to_remove.insert(instruction_id); } } - + // Remember that this array was RC'd by this instruction. self.inc_rcs.entry(*value).or_default().insert(instruction_id); } Instruction::DecrementRc { value } => { @@ -631,12 +651,12 @@ impl RcTracker { } Instruction::ArraySet { array, .. } => { let typ = function.dfg.type_of_value(*array); + // We mark all RCs that refer to arrays with a matching type as the one being set, as possibly mutated. if let Some(dec_rcs) = self.rcs_with_possible_pairs.get_mut(&typ) { for dec_rc in dec_rcs { dec_rc.possibly_mutated = true; } } - self.mutated_array_types.insert(typ); } Instruction::Store { value, .. } => { @@ -647,6 +667,9 @@ impl RcTracker { } } Instruction::Call { arguments, .. } => { + // Treat any array-type arguments to calls as possible sources of mutation. + // During the preprocessing of functions in isolation we don't want to + // get rid of IncRCs arrays that can potentially be mutated outside. for arg in arguments { let typ = function.dfg.type_of_value(*arg); if matches!(&typ, Type::Array(..) | Type::Slice(..)) { @@ -658,6 +681,7 @@ impl RcTracker { } } + /// Get all RC instructions which work on arrays whose type has not been marked as mutated. fn get_non_mutated_arrays(&self, dfg: &DataFlowGraph) -> HashSet { self.inc_rcs .keys() @@ -856,16 +880,6 @@ mod test { #[test] fn keep_inc_rc_on_borrowed_array_set() { - // brillig(inline) fn main f0 { - // b0(v0: [u32; 2]): - // inc_rc v0 - // v3 = array_set v0, index u32 0, value u32 1 - // inc_rc v0 - // inc_rc v0 - // inc_rc v0 - // v4 = array_get v3, index u32 1 - // return v4 - // } let src = " brillig(inline) fn main f0 { b0(v0: [u32; 2]): @@ -950,6 +964,36 @@ mod test { assert_normalized_ssa_equals(ssa, expected); } + #[test] + fn do_not_remove_inc_rcs_for_arrays_in_terminator() { + let src = " + brillig(inline) fn main f0 { + b0(v0: [Field; 2]): + inc_rc v0 + inc_rc v0 + inc_rc v0 + v2 = array_get v0, index u32 0 -> Field + inc_rc v0 + return v0, v2 + } + "; + + let ssa = Ssa::from_str(src).unwrap(); + + let expected = " + brillig(inline) fn main f0 { + b0(v0: [Field; 2]): + inc_rc v0 + v2 = array_get v0, index u32 0 -> Field + inc_rc v0 + return v0, v2 + } + "; + + let ssa = ssa.dead_instruction_elimination(); + assert_normalized_ssa_equals(ssa, expected); + } + #[test] fn do_not_remove_inc_rc_if_used_as_call_arg() { // We do not want to remove inc_rc instructions on values diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs index c3b771d9102..f9d8682a59b 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs @@ -13,7 +13,7 @@ use crate::ssa::{ ir::{ basic_block::BasicBlockId, call_stack::CallStackId, - dfg::InsertInstructionResult, + dfg::{GlobalsGraph, InsertInstructionResult}, function::{Function, FunctionId, RuntimeType}, instruction::{Instruction, InstructionId, TerminatorInstruction}, value::{Value, ValueId}, @@ -64,16 +64,70 @@ impl Ssa { let inline_targets = inline_infos.iter().filter_map(|(id, info)| info.is_inline_target().then_some(*id)); + let should_inline_call = |callee: &Function| -> bool { + match callee.runtime() { + RuntimeType::Acir(_) => { + // If we have not already finished the flattening pass, functions marked + // to not have predicates should be preserved. + let preserve_function = + !inline_no_predicates_functions && callee.is_no_predicates(); + !preserve_function + } + RuntimeType::Brillig(_) => { + // We inline inline if the function called wasn't ruled out as too costly or recursive. + InlineInfo::should_inline(inline_infos, callee.id()) + } + } + }; + // NOTE: Functions are processed independently of each other, with the final mapping replacing the original, // instead of inlining the "leaf" functions, moving up towards the entry point. self.functions = btree_map(inline_targets, |entry_point| { let function = &self.functions[&entry_point]; - let new_function = - function.inlined(&self, inline_no_predicates_functions, inline_infos); + let new_function = function.inlined(&self, &should_inline_call); (entry_point, new_function) }); self } + + pub(crate) fn inline_simple_functions(mut self: Ssa) -> Ssa { + let should_inline_call = |callee: &Function| { + if let RuntimeType::Acir(_) = callee.runtime() { + // Functions marked to not have predicates should be preserved. + if callee.is_no_predicates() { + return false; + } + } + + let entry_block_id = callee.entry_block(); + let entry_block = &callee.dfg[entry_block_id]; + + // Only inline functions with a single block + if entry_block.successors().next().is_some() { + return false; + } + + // Only inline functions with 0 or 1 instructions + entry_block.instructions().len() <= 1 + }; + + self.functions = btree_map(self.functions.iter(), |(id, function)| { + (*id, function.inlined(&self, &should_inline_call)) + }); + + self + } +} + +impl Function { + /// Create a new function which has the functions called by this one inlined into its body. + pub(super) fn inlined( + &self, + ssa: &Ssa, + should_inline_call: &impl Fn(&Function) -> bool, + ) -> Function { + InlineContext::new(ssa, self.id()).inline_all(ssa, &should_inline_call) + } } impl Function { @@ -146,6 +200,9 @@ struct InlineContext { /// inline into. The same goes for ValueIds, InstructionIds, and for storing other data like /// parameter to argument mappings. struct PerFunctionContext<'function> { + /// The function that we are inlining calls into. + entry_function: &'function Function, + /// The source function is the function we're currently inlining into the function being built. source_function: &'function Function, @@ -170,7 +227,7 @@ struct PerFunctionContext<'function> { /// True if we're currently working on the entry point function. inlining_entry: bool, - globals: &'function Function, + globals: &'function GlobalsGraph, } /// Utility function to find out the direct calls of a function. @@ -205,7 +262,7 @@ pub(super) struct InlineInfo { is_brillig_entry_point: bool, is_acir_entry_point: bool, is_recursive: bool, - should_inline: bool, + pub(super) should_inline: bool, weight: i64, cost: i64, } @@ -218,6 +275,10 @@ impl InlineInfo { || self.is_recursive || !self.should_inline } + + pub(super) fn should_inline(inline_infos: &InlineInfos, called_func_id: FunctionId) -> bool { + inline_infos.get(&called_func_id).map(|info| info.should_inline).unwrap_or_default() + } } type InlineInfos = BTreeMap; @@ -519,7 +580,7 @@ fn mark_brillig_functions_to_retain( inline_no_predicates_functions: bool, aggressiveness: i64, times_called: &HashMap, - inline_infos: &mut BTreeMap, + inline_infos: &mut InlineInfos, ) { let brillig_entry_points = inline_infos .iter() @@ -574,11 +635,12 @@ impl InlineContext { fn inline_all( mut self, ssa: &Ssa, - should_inline_call: &impl Fn(&PerFunctionContext, &Ssa, FunctionId) -> bool, + should_inline_call: &impl Fn(&Function) -> bool, ) -> Function { let entry_point = &ssa.functions[&self.entry_point]; - let mut context = PerFunctionContext::new(&mut self, entry_point, &ssa.globals); + let globals = &entry_point.dfg.globals; + let mut context = PerFunctionContext::new(&mut self, entry_point, entry_point, globals); context.inlining_entry = true; for (_, value) in entry_point.dfg.globals.values_iter() { @@ -617,7 +679,7 @@ impl InlineContext { ssa: &Ssa, id: FunctionId, arguments: &[ValueId], - should_inline_call: &impl Fn(&PerFunctionContext, &Ssa, FunctionId) -> bool, + should_inline_call: &impl Fn(&Function) -> bool, ) -> Vec { self.recursion_level += 1; @@ -629,7 +691,9 @@ impl InlineContext { ); } - let mut context = PerFunctionContext::new(self, source_function, &ssa.globals); + let entry_point = &ssa.functions[&self.entry_point]; + let globals = &source_function.dfg.globals; + let mut context = PerFunctionContext::new(self, entry_point, source_function, globals); let parameters = source_function.parameters(); assert_eq!(parameters.len(), arguments.len()); @@ -651,11 +715,13 @@ impl<'function> PerFunctionContext<'function> { /// the arguments of the destination function. fn new( context: &'function mut InlineContext, + entry_function: &'function Function, source_function: &'function Function, - globals: &'function Function, + globals: &'function GlobalsGraph, ) -> Self { Self { context, + entry_function, source_function, blocks: HashMap::default(), values: HashMap::default(), @@ -679,8 +745,7 @@ impl<'function> PerFunctionContext<'function> { value @ Value::Instruction { instruction, .. } => { if self.source_function.dfg.is_global(id) { if self.context.builder.current_function.dfg.runtime().is_acir() { - let Instruction::MakeArray { elements, typ } = - &self.globals.dfg[*instruction] + let Instruction::MakeArray { elements, typ } = &self.globals[*instruction] else { panic!("Only expect Instruction::MakeArray for a global"); }; @@ -777,7 +842,7 @@ impl<'function> PerFunctionContext<'function> { fn inline_blocks( &mut self, ssa: &Ssa, - should_inline_call: &impl Fn(&PerFunctionContext, &Ssa, FunctionId) -> bool, + should_inline_call: &impl Fn(&Function) -> bool, ) -> Vec { let mut seen_blocks = HashSet::new(); let mut block_queue = VecDeque::new(); @@ -844,7 +909,7 @@ impl<'function> PerFunctionContext<'function> { &mut self, ssa: &Ssa, block_id: BasicBlockId, - should_inline_call: &impl Fn(&PerFunctionContext, &Ssa, FunctionId) -> bool, + should_inline_call: &impl Fn(&Function) -> bool, ) { let mut side_effects_enabled: Option = None; @@ -853,19 +918,29 @@ impl<'function> PerFunctionContext<'function> { match &self.source_function.dfg[*id] { Instruction::Call { func, arguments } => match self.get_function(*func) { Some(func_id) => { - if should_inline_call(self, ssa, func_id) { - self.inline_function(ssa, *id, func_id, arguments, should_inline_call); - - // This is only relevant during handling functions with `InlineType::NoPredicates` as these - // can pollute the function they're being inlined into with `Instruction::EnabledSideEffects`, - // resulting in predicates not being applied properly. - // - // Note that this doesn't cover the case in which there exists an `Instruction::EnabledSideEffects` - // within the function being inlined whilst the source function has not encountered one yet. - // In practice this isn't an issue as the last `Instruction::EnabledSideEffects` in the - // function being inlined will be to turn off predicates rather than to create one. - if let Some(condition) = side_effects_enabled { - self.context.builder.insert_enable_side_effects_if(condition); + if let Some(callee) = self.should_inline_call(ssa, func_id) { + if should_inline_call(callee) { + self.inline_function( + ssa, + *id, + func_id, + arguments, + should_inline_call, + ); + + // This is only relevant during handling functions with `InlineType::NoPredicates` as these + // can pollute the function they're being inlined into with `Instruction::EnabledSideEffects`, + // resulting in predicates not being applied properly. + // + // Note that this doesn't cover the case in which there exists an `Instruction::EnabledSideEffects` + // within the function being inlined whilst the source function has not encountered one yet. + // In practice this isn't an issue as the last `Instruction::EnabledSideEffects` in the + // function being inlined will be to turn off predicates rather than to create one. + if let Some(condition) = side_effects_enabled { + self.context.builder.insert_enable_side_effects_if(condition); + } + } else { + self.push_instruction(*id); } } else { self.push_instruction(*id); @@ -882,6 +957,38 @@ impl<'function> PerFunctionContext<'function> { } } + fn should_inline_call<'a>( + &self, + ssa: &'a Ssa, + called_func_id: FunctionId, + ) -> Option<&'a Function> { + // Do not inline self-recursive functions on the top level. + // Inlining a self-recursive function works when there is something to inline into + // by importing all the recursive blocks, but for the entry function there is no wrapper. + if self.entry_function.id() == called_func_id { + return None; + } + + let callee = &ssa.functions[&called_func_id]; + + match callee.runtime() { + RuntimeType::Acir(inline_type) => { + // If the called function is acir, we inline if it's not an entry point + if inline_type.is_entry_point() { + return None; + } + } + RuntimeType::Brillig(_) => { + if self.entry_function.runtime().is_acir() { + // We never inline a brillig function into an ACIR function. + return None; + } + } + } + + Some(callee) + } + /// Inline a function call and remember the inlined return values in the values map fn inline_function( &mut self, @@ -889,7 +996,7 @@ impl<'function> PerFunctionContext<'function> { call_id: InstructionId, function: FunctionId, arguments: &[ValueId], - should_inline_call: &impl Fn(&PerFunctionContext, &Ssa, FunctionId) -> bool, + should_inline_call: &impl Fn(&Function) -> bool, ) { let old_results = self.source_function.dfg.instruction_results(call_id); let arguments = vecmap(arguments, |arg| self.translate_value(*arg)); @@ -1093,6 +1200,7 @@ mod test { map::Id, types::{NumericType, Type}, }, + opt::assert_normalized_ssa_equals, Ssa, }; @@ -1567,4 +1675,76 @@ mod test { ); assert!(tws[3] > max(tws[1], tws[2]), "ideally 'main' has the most weight"); } + + #[test] + fn inline_simple_functions_with_zero_instructions() { + let src = " + acir(inline) fn main f0 { + b0(v0: Field): + v2 = call f1(v0) -> Field + v3 = call f1(v0) -> Field + v4 = add v2, v3 + return v4 + } + + acir(inline) fn foo f1 { + b0(v0: Field): + return v0 + } + "; + let ssa = Ssa::from_str(src).unwrap(); + + let expected = " + acir(inline) fn main f0 { + b0(v0: Field): + v1 = add v0, v0 + return v1 + } + acir(inline) fn foo f1 { + b0(v0: Field): + return v0 + } + "; + + let ssa = ssa.inline_simple_functions(); + assert_normalized_ssa_equals(ssa, expected); + } + + #[test] + fn inline_simple_functions_with_one_instruction() { + let src = " + acir(inline) fn main f0 { + b0(v0: Field): + v2 = call f1(v0) -> Field + v3 = call f1(v0) -> Field + v4 = add v2, v3 + return v4 + } + + acir(inline) fn foo f1 { + b0(v0: Field): + v2 = add v0, Field 1 + return v2 + } + "; + let ssa = Ssa::from_str(src).unwrap(); + + let expected = " + acir(inline) fn main f0 { + b0(v0: Field): + v2 = add v0, Field 1 + v3 = add v0, Field 1 + v4 = add v2, v3 + return v4 + } + acir(inline) fn foo f1 { + b0(v0: Field): + v2 = add v0, Field 1 + return v2 + } + "; + + let ssa = ssa.inline_simple_functions(); + assert_normalized_ssa_equals(ssa, expected); + } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs index 224916c95e9..1e2e783d516 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs @@ -16,6 +16,7 @@ use crate::ssa::{ function::Function, function_inserter::FunctionInserter, instruction::{binary::eval_constant_binary_op, BinaryOp, Instruction, InstructionId}, + post_order::PostOrder, types::Type, value::ValueId, }, @@ -272,8 +273,10 @@ impl<'f> LoopInvariantContext<'f> { /// correct new value IDs based upon the `FunctionInserter` internal map. /// Leaving out this mapping could lead to instructions with values that do not exist. fn map_dependent_instructions(&mut self) { - let blocks = self.inserter.function.reachable_blocks(); - for block in blocks { + let mut block_order = PostOrder::with_function(self.inserter.function).into_vec(); + block_order.reverse(); + + for block in block_order { for instruction_id in self.inserter.function.dfg[block].take_instructions() { self.inserter.push_instruction(instruction_id, block); } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/preprocess_fns.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/preprocess_fns.rs index 439c2da5a2d..ae20c9b8b4a 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/preprocess_fns.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/preprocess_fns.rs @@ -1,8 +1,11 @@ //! Pre-process functions before inlining them into others. -use crate::ssa::Ssa; +use crate::ssa::{ + ir::function::{Function, RuntimeType}, + Ssa, +}; -use super::inlining; +use super::inlining::{self, InlineInfo}; impl Ssa { /// Run pre-processing steps on functions in isolation. @@ -10,31 +13,40 @@ impl Ssa { // Bottom-up order, starting with the "leaf" functions, so we inline already optimized code into the ones that call them. let bottom_up = inlining::compute_bottom_up_order(&self); - // As a heuristic to avoid optimizing functions near the entry point, find a cutoff weight. - let total_weight = - bottom_up.iter().fold(0usize, |acc, (_, (_, w))| (acc.saturating_add(*w))); - let mean_weight = total_weight / bottom_up.len(); - let cutoff_weight = mean_weight; - // Preliminary inlining decisions. let inline_infos = inlining::compute_inline_infos(&self, false, aggressiveness); + let should_inline_call = |callee: &Function| -> bool { + match callee.runtime() { + RuntimeType::Acir(_) => { + // Functions marked to not have predicates should be preserved. + !callee.is_no_predicates() + } + RuntimeType::Brillig(_) => { + // We inline inline if the function called wasn't ruled out as too costly or recursive. + InlineInfo::should_inline(&inline_infos, callee.id()) + } + } + }; + for (id, (own_weight, transitive_weight)) in bottom_up { - // Skip preprocessing heavy functions that gained most of their weight from transitive accumulation. + let function = &self.functions[&id]; + + // Skip preprocessing heavy functions that gained most of their weight from transitive accumulation, which tend to be near the entry. // These can be processed later by the regular SSA passes. - if transitive_weight >= cutoff_weight && transitive_weight > own_weight * 2 { - continue; - } + let is_heavy = transitive_weight > own_weight * 10; + // Functions which are inline targets will be processed in later passes. // Here we want to treat the functions which will be inlined into them. - if let Some(info) = inline_infos.get(&id) { - if info.is_inline_target() { - continue; - } + let is_target = + inline_infos.get(&id).map(|info| info.is_inline_target()).unwrap_or_default(); + + if is_heavy || is_target { + continue; } - let function = &self.functions[&id]; + // Start with an inline pass. - let mut function = function.inlined(&self, false, &inline_infos); + let mut function = function.inlined(&self, &should_inline_call); // Help unrolling determine bounds. function.as_slice_optimization(); // Prepare for unrolling diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs index a6e5c96d638..eb0bbd8c532 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs @@ -79,11 +79,11 @@ impl Ssa { if has_unrolled && is_brillig { if let Some(max_incr_pct) = max_bytecode_increase_percent { if global_cache.is_none() { + let globals = (*function.dfg.globals).clone(); // DIE is run at the end of our SSA optimizations, so we mark all globals as in use here. - let used_globals = - &self.globals.dfg.values_iter().map(|(id, _)| id).collect(); - let (_, brillig_globals) = - convert_ssa_globals(false, &self.globals, used_globals); + let used_globals = &globals.values_iter().map(|(id, _)| id).collect(); + let (_, brillig_globals, _) = + convert_ssa_globals(false, globals, used_globals); global_cache = Some(brillig_globals); } let brillig_globals = global_cache.as_ref().unwrap(); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/ast.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/ast.rs index 6c7608a2f16..05743ffd7ca 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/ast.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/ast.rs @@ -7,9 +7,28 @@ use crate::ssa::ir::{function::RuntimeType, instruction::BinaryOp, types::Type}; #[derive(Debug)] pub(crate) struct ParsedSsa { + pub(crate) globals: Vec, pub(crate) functions: Vec, } +#[derive(Debug)] +pub(crate) struct ParsedGlobal { + pub(crate) name: Identifier, + pub(crate) value: ParsedGlobalValue, +} + +#[derive(Debug)] +pub(crate) enum ParsedGlobalValue { + NumericConstant(ParsedNumericConstant), + MakeArray(ParsedMakeArray), +} + +#[derive(Debug)] +pub(crate) struct ParsedMakeArray { + pub(crate) elements: Vec, + pub(crate) typ: Type, +} + #[derive(Debug)] pub(crate) struct ParsedFunction { pub(crate) runtime_type: RuntimeType, @@ -145,6 +164,12 @@ pub(crate) enum ParsedTerminator { #[derive(Debug, Clone)] pub(crate) enum ParsedValue { - NumericConstant { constant: FieldElement, typ: Type }, + NumericConstant(ParsedNumericConstant), Variable(Identifier), } + +#[derive(Debug, Clone)] +pub(crate) struct ParsedNumericConstant { + pub(crate) value: FieldElement, + pub(crate) typ: Type, +} diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/into_ssa.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/into_ssa.rs index e2eea234dc7..37d2cd720f9 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/into_ssa.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/into_ssa.rs @@ -1,4 +1,4 @@ -use std::collections::HashMap; +use std::{collections::HashMap, sync::Arc}; use acvm::acir::circuit::ErrorSelector; @@ -6,15 +6,17 @@ use crate::ssa::{ function_builder::FunctionBuilder, ir::{ basic_block::BasicBlockId, + call_stack::CallStackId, + dfg::GlobalsGraph, function::{Function, FunctionId}, - instruction::ConstrainError, + instruction::{ConstrainError, Instruction}, value::ValueId, }, }; use super::{ - ast::AssertMessage, Identifier, ParsedBlock, ParsedFunction, ParsedInstruction, ParsedSsa, - ParsedTerminator, ParsedValue, RuntimeType, Ssa, SsaError, + ast::AssertMessage, Identifier, ParsedBlock, ParsedFunction, ParsedGlobal, ParsedGlobalValue, + ParsedInstruction, ParsedSsa, ParsedTerminator, ParsedValue, RuntimeType, Ssa, SsaError, Type, }; impl ParsedSsa { @@ -39,6 +41,17 @@ struct Translator { /// will recreate the SSA step by step, which can result in a new ID layout. variables: HashMap>, + /// The function that will hold the actual SSA globals. + globals_function: Function, + + /// The types of globals in the parsed SSA, in the order they were defined. + global_types: Vec, + + /// Maps names (e.g. "g0") in the parsed SSA to global IDs. + global_values: HashMap, + + globals_graph: Arc, + error_selector_counter: u64, } @@ -74,13 +87,26 @@ impl Translator { functions.insert(function.internal_name.clone(), function_id); } + // Does not matter what ID we use here. + let globals = Function::new("globals".to_owned(), main_id); + let mut translator = Self { builder, functions, variables: HashMap::new(), blocks: HashMap::new(), + globals_function: globals, + global_types: Vec::new(), + global_values: HashMap::new(), + globals_graph: Arc::new(GlobalsGraph::default()), error_selector_counter: 0, }; + + translator.translate_globals(std::mem::take(&mut parsed_ssa.globals))?; + + translator.globals_graph = + Arc::new(GlobalsGraph::from_dfg(translator.globals_function.dfg.clone())); + translator.translate_function_body(main_function)?; Ok(translator) @@ -103,6 +129,8 @@ impl Translator { } fn translate_function_body(&mut self, function: ParsedFunction) -> Result<(), SsaError> { + self.builder.set_globals(self.globals_graph.clone()); + // First define all blocks so that they are known (a block might jump to a block that comes next) for (index, block) in function.blocks.iter().enumerate() { // The first block is the entry block and it was automatically created by the builder @@ -297,8 +325,8 @@ impl Translator { fn translate_value(&mut self, value: ParsedValue) -> Result { match value { - ParsedValue::NumericConstant { constant, typ } => { - Ok(self.builder.numeric_constant(constant, typ.unwrap_numeric())) + ParsedValue::NumericConstant(constant) => { + Ok(self.builder.numeric_constant(constant.value, constant.typ.unwrap_numeric())) } ParsedValue::Variable(identifier) => self.lookup_variable(&identifier).or_else(|e| { self.lookup_function(&identifier) @@ -311,6 +339,45 @@ impl Translator { } } + fn translate_globals(&mut self, globals: Vec) -> Result<(), SsaError> { + for global in globals { + self.translate_global(global)?; + } + Ok(()) + } + + fn translate_global(&mut self, global: ParsedGlobal) -> Result<(), SsaError> { + let value_id = match global.value { + ParsedGlobalValue::NumericConstant(constant) => self + .globals_function + .dfg + .make_constant(constant.value, constant.typ.unwrap_numeric()), + ParsedGlobalValue::MakeArray(make_array) => { + let mut elements = im::Vector::new(); + for element in make_array.elements { + let element_id = match element { + ParsedValue::NumericConstant(constant) => self + .globals_function + .dfg + .make_constant(constant.value, constant.typ.unwrap_numeric()), + ParsedValue::Variable(identifier) => self.lookup_global(identifier)?, + }; + elements.push_back(element_id); + } + + let instruction = Instruction::MakeArray { elements, typ: make_array.typ.clone() }; + let block = self.globals_function.entry_block(); + let call_stack = CallStackId::root(); + self.globals_function + .dfg + .insert_instruction_and_results(instruction, block, None, call_stack) + .first() + } + }; + + self.define_global(global.name, value_id) + } + fn define_variable( &mut self, identifier: Identifier, @@ -329,13 +396,40 @@ impl Translator { } fn lookup_variable(&mut self, identifier: &Identifier) -> Result { - if let Some(value_id) = self.variables[&self.current_function_id()].get(&identifier.name) { + if let Some(value_id) = self + .variables + .get(&self.current_function_id()) + .and_then(|hash| hash.get(&identifier.name)) + { + Ok(*value_id) + } else if let Some(value_id) = self.global_values.get(&identifier.name) { Ok(*value_id) } else { Err(SsaError::UnknownVariable(identifier.clone())) } } + fn define_global(&mut self, identifier: Identifier, value_id: ValueId) -> Result<(), SsaError> { + if self.global_values.contains_key(&identifier.name) { + return Err(SsaError::GlobalAlreadyDefined(identifier)); + } + + self.global_values.insert(identifier.name, value_id); + + let typ = self.globals_function.dfg.type_of_value(value_id); + self.global_types.push(typ); + + Ok(()) + } + + fn lookup_global(&mut self, identifier: Identifier) -> Result { + if let Some(value_id) = self.global_values.get(&identifier.name) { + Ok(*value_id) + } else { + Err(SsaError::UnknownGlobal(identifier)) + } + } + fn lookup_block(&mut self, identifier: &Identifier) -> Result { if let Some(block_id) = self.blocks[&self.current_function_id()].get(&identifier.name) { Ok(*block_id) @@ -354,13 +448,13 @@ impl Translator { fn finish(self) -> Ssa { let mut ssa = self.builder.finish(); + // Normalize the IDs so we have a better chance of matching the SSA we parsed // after the step-by-step reconstruction done during translation. This assumes // that the SSA we parsed was printed by the `SsaBuilder`, which normalizes // before each print. ssa.normalize_ids(); - // Does not matter what ID we use here. - ssa.globals = Function::new("globals".to_owned(), ssa.main_id); + ssa } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/mod.rs index 143ba511879..cc660355bbd 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/mod.rs @@ -13,8 +13,9 @@ use super::{ use acvm::{AcirField, FieldElement}; use ast::{ - AssertMessage, Identifier, ParsedBlock, ParsedFunction, ParsedInstruction, ParsedParameter, - ParsedSsa, ParsedValue, + AssertMessage, Identifier, ParsedBlock, ParsedFunction, ParsedGlobal, ParsedGlobalValue, + ParsedInstruction, ParsedMakeArray, ParsedNumericConstant, ParsedParameter, ParsedSsa, + ParsedValue, }; use lexer::{Lexer, LexerError}; use noirc_errors::Span; @@ -99,6 +100,8 @@ pub(crate) enum SsaError { ParserError(ParserError), #[error("Unknown variable '{0}'")] UnknownVariable(Identifier), + #[error("Unknown global '{0}'")] + UnknownGlobal(Identifier), #[error("Unknown block '{0}'")] UnknownBlock(Identifier), #[error("Unknown function '{0}'")] @@ -107,6 +110,8 @@ pub(crate) enum SsaError { MismatchedReturnValues { returns: Vec, expected: usize }, #[error("Variable '{0}' already defined")] VariableAlreadyDefined(Identifier), + #[error("Global '{0}' already defined")] + GlobalAlreadyDefined(Identifier), } impl SsaError { @@ -114,8 +119,10 @@ impl SsaError { match self { SsaError::ParserError(parser_error) => parser_error.span(), SsaError::UnknownVariable(identifier) + | SsaError::UnknownGlobal(identifier) | SsaError::UnknownBlock(identifier) | SsaError::VariableAlreadyDefined(identifier) + | SsaError::GlobalAlreadyDefined(identifier) | SsaError::UnknownFunction(identifier) => identifier.span, SsaError::MismatchedReturnValues { returns, expected: _ } => returns[0].span, } @@ -138,12 +145,39 @@ impl<'a> Parser<'a> { } pub(crate) fn parse_ssa(&mut self) -> ParseResult { + let globals = self.parse_globals()?; + let mut functions = Vec::new(); while !self.at(Token::Eof) { let function = self.parse_function()?; functions.push(function); } - Ok(ParsedSsa { functions }) + Ok(ParsedSsa { globals, functions }) + } + + fn parse_globals(&mut self) -> ParseResult> { + let mut globals = Vec::new(); + + while let Some(name) = self.eat_identifier()? { + self.eat_or_error(Token::Assign)?; + + let value = self.parse_global_value()?; + globals.push(ParsedGlobal { name, value }); + } + + Ok(globals) + } + + fn parse_global_value(&mut self) -> ParseResult { + if let Some(constant) = self.parse_numeric_constant()? { + return Ok(ParsedGlobalValue::NumericConstant(constant)); + } + + if let Some(make_array) = self.parse_make_array()? { + return Ok(ParsedGlobalValue::MakeArray(make_array)); + } + + self.expected_global_value() } fn parse_function(&mut self) -> ParseResult { @@ -461,40 +495,12 @@ impl<'a> Parser<'a> { return Ok(ParsedInstruction::Load { target, value, typ }); } - if self.eat_keyword(Keyword::MakeArray)? { - if self.eat(Token::Ampersand)? { - let Some(string) = self.eat_byte_str()? else { - return self.expected_byte_string(); - }; - let u8 = Type::Numeric(NumericType::Unsigned { bit_size: 8 }); - let typ = Type::Slice(Arc::new(vec![u8.clone()])); - let elements = string - .bytes() - .map(|byte| ParsedValue::NumericConstant { - constant: FieldElement::from(byte as u128), - typ: u8.clone(), - }) - .collect(); - return Ok(ParsedInstruction::MakeArray { target, elements, typ }); - } else if let Some(string) = self.eat_byte_str()? { - let u8 = Type::Numeric(NumericType::Unsigned { bit_size: 8 }); - let typ = Type::Array(Arc::new(vec![u8.clone()]), string.len() as u32); - let elements = string - .bytes() - .map(|byte| ParsedValue::NumericConstant { - constant: FieldElement::from(byte as u128), - typ: u8.clone(), - }) - .collect(); - return Ok(ParsedInstruction::MakeArray { target, elements, typ }); - } else { - self.eat_or_error(Token::LeftBracket)?; - let elements = self.parse_comma_separated_values()?; - self.eat_or_error(Token::RightBracket)?; - self.eat_or_error(Token::Colon)?; - let typ = self.parse_type()?; - return Ok(ParsedInstruction::MakeArray { target, elements, typ }); - } + if let Some(make_array) = self.parse_make_array()? { + return Ok(ParsedInstruction::MakeArray { + target, + elements: make_array.elements, + typ: make_array.typ, + }); } if self.eat_keyword(Keyword::Not)? { @@ -524,6 +530,52 @@ impl<'a> Parser<'a> { self.expected_instruction_or_terminator() } + fn parse_make_array(&mut self) -> ParseResult> { + if !self.eat_keyword(Keyword::MakeArray)? { + return Ok(None); + } + + let make_array = if self.eat(Token::Ampersand)? { + let Some(string) = self.eat_byte_str()? else { + return self.expected_byte_string(); + }; + let u8 = Type::Numeric(NumericType::Unsigned { bit_size: 8 }); + let typ = Type::Slice(Arc::new(vec![u8.clone()])); + let elements = string + .bytes() + .map(|byte| { + ParsedValue::NumericConstant(ParsedNumericConstant { + value: FieldElement::from(byte as u128), + typ: u8.clone(), + }) + }) + .collect(); + ParsedMakeArray { elements, typ } + } else if let Some(string) = self.eat_byte_str()? { + let u8 = Type::Numeric(NumericType::Unsigned { bit_size: 8 }); + let typ = Type::Array(Arc::new(vec![u8.clone()]), string.len() as u32); + let elements = string + .bytes() + .map(|byte| { + ParsedValue::NumericConstant(ParsedNumericConstant { + value: FieldElement::from(byte as u128), + typ: u8.clone(), + }) + }) + .collect(); + ParsedMakeArray { elements, typ } + } else { + self.eat_or_error(Token::LeftBracket)?; + let elements = self.parse_comma_separated_values()?; + self.eat_or_error(Token::RightBracket)?; + self.eat_or_error(Token::Colon)?; + let typ = self.parse_type()?; + ParsedMakeArray { elements, typ } + }; + + Ok(Some(make_array)) + } + fn parse_terminator(&mut self) -> ParseResult { if let Some(terminator) = self.parse_return()? { return Ok(terminator); @@ -617,12 +669,8 @@ impl<'a> Parser<'a> { } fn parse_value(&mut self) -> ParseResult> { - if let Some(value) = self.parse_field_value()? { - return Ok(Some(value)); - } - - if let Some(value) = self.parse_int_value()? { - return Ok(Some(value)); + if let Some(constant) = self.parse_numeric_constant()? { + return Ok(Some(ParsedValue::NumericConstant(constant))); } if let Some(identifier) = self.eat_identifier()? { @@ -632,23 +680,35 @@ impl<'a> Parser<'a> { Ok(None) } - fn parse_field_value(&mut self) -> ParseResult> { + fn parse_numeric_constant(&mut self) -> ParseResult> { + if let Some(constant) = self.parse_field_value()? { + return Ok(Some(constant)); + } + + if let Some(constant) = self.parse_int_value()? { + return Ok(Some(constant)); + } + + Ok(None) + } + + fn parse_field_value(&mut self) -> ParseResult> { if self.eat_keyword(Keyword::Field)? { - let constant = self.eat_int_or_error()?; - Ok(Some(ParsedValue::NumericConstant { constant, typ: Type::field() })) + let value = self.eat_int_or_error()?; + Ok(Some(ParsedNumericConstant { value, typ: Type::field() })) } else { Ok(None) } } - fn parse_int_value(&mut self) -> ParseResult> { + fn parse_int_value(&mut self) -> ParseResult> { if let Some(int_type) = self.eat_int_type()? { - let constant = self.eat_int_or_error()?; + let value = self.eat_int_or_error()?; let typ = match int_type { IntType::Unsigned(bit_size) => Type::unsigned(bit_size), IntType::Signed(bit_size) => Type::signed(bit_size), }; - Ok(Some(ParsedValue::NumericConstant { constant, typ })) + Ok(Some(ParsedNumericConstant { value, typ })) } else { Ok(None) } @@ -932,6 +992,13 @@ impl<'a> Parser<'a> { }) } + fn expected_global_value(&mut self) -> ParseResult { + Err(ParserError::ExpectedGlobalValue { + found: self.token.token().clone(), + span: self.token.to_span(), + }) + } + fn expected_token(&mut self, token: Token) -> ParseResult { Err(ParserError::ExpectedToken { token, @@ -971,6 +1038,10 @@ pub(crate) enum ParserError { ExpectedByteString { found: Token, span: Span }, #[error("Expected a value, found '{found}'")] ExpectedValue { found: Token, span: Span }, + #[error( + "Expected a global value (Field literal, integer literal or make_array), found '{found}'" + )] + ExpectedGlobalValue { found: Token, span: Span }, #[error("Multiple return values only allowed for call")] MultipleReturnValuesOnlyAllowedForCall { second_target: Identifier }, } @@ -987,7 +1058,8 @@ impl ParserError { | ParserError::ExpectedInstructionOrTerminator { span, .. } | ParserError::ExpectedStringOrData { span, .. } | ParserError::ExpectedByteString { span, .. } - | ParserError::ExpectedValue { span, .. } => *span, + | ParserError::ExpectedValue { span, .. } + | ParserError::ExpectedGlobalValue { span, .. } => *span, ParserError::MultipleReturnValuesOnlyAllowedForCall { second_target, .. } => { second_target.span } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs index 8c24b2ec458..c803e2a94fe 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs @@ -530,3 +530,19 @@ fn test_does_not_simplify() { "; assert_ssa_roundtrip(src); } + +#[test] +fn parses_globals() { + let src = " + g0 = Field 0 + g1 = u32 1 + g2 = make_array [] : [Field; 0] + g3 = make_array [g2] : [[Field; 0]; 1] + + acir(inline) fn main f0 { + b0(): + return g3 + } + "; + assert_ssa_roundtrip(src); +} diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index fbb2b306bdf..d31d3865d4b 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -48,7 +48,10 @@ pub(crate) fn generate_ssa(program: Program) -> Result { let is_return_data = matches!(program.return_visibility, Visibility::ReturnData); let return_location = program.return_location; - let context = SharedContext::new(program); + let mut context = SharedContext::new(program); + + let globals_dfg = std::mem::take(&mut context.globals_context.dfg); + let globals = GlobalsGraph::from_dfg(globals_dfg); let globals = GlobalsGraph::from_dfg(context.globals_context.dfg.clone()); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/program.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/program.rs index 7cd5c5c3990..04986bd8db1 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/program.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/program.rs @@ -20,7 +20,6 @@ use super::ValueId; pub(crate) struct Ssa { #[serde_as(as = "Vec<(_, _)>")] pub(crate) functions: BTreeMap, - pub(crate) globals: Function, pub(crate) used_global_values: HashSet, pub(crate) main_id: FunctionId, #[serde(skip)] @@ -58,9 +57,6 @@ impl Ssa { next_id: AtomicCounter::starting_after(max_id), entry_point_to_generated_index: BTreeMap::new(), error_selector_to_type: error_types, - // These fields should be set afterwards as globals are generated - // outside of the FunctionBuilder, which is where the `Ssa` is instantiated. - globals: Function::new_for_globals(), // This field is set only after running DIE and is utilized // for optimizing implementation of globals post-SSA. used_global_values: HashSet::default(), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs index 9d521545e7a..1f7a37428b2 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs @@ -8,7 +8,7 @@ use crate::ast::{ UnresolvedTraitConstraint, UnresolvedType, UnresolvedTypeData, Visibility, }; use crate::node_interner::{ - ExprId, InternedExpressionKind, InternedStatementKind, QuotedTypeId, StructId, + ExprId, InternedExpressionKind, InternedStatementKind, QuotedTypeId, TypeId, }; use crate::token::{Attributes, FmtStrFragment, FunctionAttribute, Token, Tokens}; use crate::{Kind, Type}; @@ -559,7 +559,7 @@ pub struct ConstructorExpression { /// This may be filled out during macro expansion /// so that we can skip re-resolving the type name since it /// would be lost at that point. - pub struct_type: Option, + pub struct_type: Option, } #[derive(Debug, PartialEq, Eq, Clone)] diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/statement.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/statement.rs index 57572e80d1e..02715e8c2d3 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/statement.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/statement.rs @@ -12,6 +12,7 @@ use super::{ }; use crate::ast::UnresolvedTypeData; use crate::elaborator::types::SELF_TYPE_NAME; +use crate::elaborator::Turbofish; use crate::lexer::token::SpannedToken; use crate::node_interner::{ InternedExpressionKind, InternedPattern, InternedStatementKind, NodeInterner, @@ -45,7 +46,7 @@ pub enum StatementKind { Expression(Expression), Assign(AssignStatement), For(ForLoopStatement), - Loop(Expression), + Loop(Expression, Span /* loop keyword span */), Break, Continue, /// This statement should be executed at compile-time @@ -307,6 +308,7 @@ pub struct ModuleDeclaration { pub visibility: ItemVisibility, pub ident: Ident, pub outer_attributes: Vec, + pub has_semicolon: bool, } impl std::fmt::Display for ModuleDeclaration { @@ -535,6 +537,12 @@ impl PathSegment { pub fn turbofish_span(&self) -> Span { Span::from(self.ident.span().end()..self.span.end()) } + + pub fn turbofish(&self) -> Option { + self.generics + .as_ref() + .map(|generics| Turbofish { span: self.turbofish_span(), generics: generics.clone() }) + } } impl From for PathSegment { @@ -965,7 +973,7 @@ impl Display for StatementKind { StatementKind::Expression(expression) => expression.fmt(f), StatementKind::Assign(assign) => assign.fmt(f), StatementKind::For(for_loop) => for_loop.fmt(f), - StatementKind::Loop(block) => write!(f, "loop {}", block), + StatementKind::Loop(block, _) => write!(f, "loop {}", block), StatementKind::Break => write!(f, "break"), StatementKind::Continue => write!(f, "continue"), StatementKind::Comptime(statement) => write!(f, "comptime {}", statement.kind), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs index 5c4781df7a5..d7fe63a6a45 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs @@ -1135,7 +1135,7 @@ impl Statement { StatementKind::For(for_loop_statement) => { for_loop_statement.accept(visitor); } - StatementKind::Loop(block) => { + StatementKind::Loop(block, _) => { if visitor.visit_loop_statement(block) { block.accept(visitor); } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs index 9f5eef6e785..c13c74f44cb 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs @@ -19,7 +19,7 @@ use crate::{ resolution::errors::ResolverError, }, hir_def::expr::{HirExpression, HirIdent}, - node_interner::{DefinitionKind, DependencyId, FuncId, NodeInterner, StructId, TraitId}, + node_interner::{DefinitionKind, DependencyId, FuncId, NodeInterner, TraitId, TypeId}, parser::{Item, ItemKind}, token::{MetaAttribute, SecondaryAttribute}, Type, TypeBindings, UnificationError, @@ -512,7 +512,7 @@ impl<'context> Elaborator<'context> { pub(super) fn run_attributes( &mut self, traits: &BTreeMap, - types: &BTreeMap, + types: &BTreeMap, functions: &[UnresolvedFunctions], module_attributes: &[ModuleAttribute], ) { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs index ef2ae9c4df0..0cee880e781 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs @@ -29,7 +29,7 @@ use crate::{ }, node_interner::{DefinitionKind, ExprId, FuncId, InternedStatementKind, TraitMethodId}, token::{FmtStrFragment, Tokens}, - Kind, QuotedType, Shared, StructType, Type, + DataType, Kind, QuotedType, Shared, Type, }; use super::{Elaborator, LambdaContext, UnsafeBlockStatus}; @@ -614,12 +614,12 @@ impl<'context> Elaborator<'context> { let is_self_type = last_segment.ident.is_self_type_name(); let (r#type, struct_generics) = if let Some(struct_id) = constructor.struct_type { - let typ = self.interner.get_struct(struct_id); + let typ = self.interner.get_type(struct_id); let generics = typ.borrow().instantiate(self.interner); (typ, generics) } else { match self.lookup_type_or_error(path) { - Some(Type::Struct(r#type, struct_generics)) => (r#type, struct_generics), + Some(Type::DataType(r#type, struct_generics)) => (r#type, struct_generics), Some(typ) => { self.push_err(ResolverError::NonStructUsedInConstructor { typ: typ.to_string(), @@ -659,10 +659,10 @@ impl<'context> Elaborator<'context> { let reference_location = Location::new(last_segment.ident.span(), self.file); self.interner.add_struct_reference(struct_id, reference_location, is_self_type); - (expr, Type::Struct(struct_type, generics)) + (expr, Type::DataType(struct_type, generics)) } - pub(super) fn mark_struct_as_constructed(&mut self, struct_type: Shared) { + pub(super) fn mark_struct_as_constructed(&mut self, struct_type: Shared) { let struct_type = struct_type.borrow(); let parent_module_id = struct_type.id.parent_module_id(self.def_maps); self.usage_tracker.mark_as_used(parent_module_id, &struct_type.name); @@ -673,7 +673,7 @@ impl<'context> Elaborator<'context> { /// are part of the struct. fn resolve_constructor_expr_fields( &mut self, - struct_type: Shared, + struct_type: Shared, field_types: Vec<(String, ItemVisibility, Type)>, fields: Vec<(Ident, Expression)>, span: Span, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs index 79f6be444ce..1e792ada677 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs @@ -32,7 +32,7 @@ use crate::{ }, node_interner::{ DefinitionKind, DependencyId, ExprId, FuncId, FunctionModifiers, GlobalId, NodeInterner, - ReferenceId, StructId, TraitId, TraitImplId, TypeAliasId, + ReferenceId, TraitId, TraitImplId, TypeAliasId, TypeId, }, token::SecondaryAttribute, Shared, Type, TypeVariable, @@ -43,7 +43,7 @@ use crate::{ hir_def::traits::ResolvedTraitBound, node_interner::GlobalValue, usage_tracker::UsageTracker, - StructField, StructType, TypeBindings, + DataType, StructField, TypeBindings, }; mod comptime; @@ -61,6 +61,7 @@ mod unquote; use fm::FileId; use iter_extended::vecmap; use noirc_errors::{Location, Span, Spanned}; +pub use path_resolution::Turbofish; use path_resolution::{PathResolution, PathResolutionItem}; use types::bind_ordered_generics; @@ -93,6 +94,11 @@ enum UnsafeBlockStatus { InUnsafeBlockWithConstrainedCalls, } +pub struct Loop { + pub is_for: bool, + pub has_break: bool, +} + pub struct Elaborator<'context> { scopes: ScopeForest, @@ -106,7 +112,7 @@ pub struct Elaborator<'context> { pub(crate) file: FileId, unsafe_block_status: UnsafeBlockStatus, - nested_loops: usize, + current_loop: Option, /// Contains a mapping of the current struct or functions's generics to /// unique type variables if we're resolving a struct. Empty otherwise. @@ -146,7 +152,7 @@ pub struct Elaborator<'context> { /// struct Wrapped { /// } /// ``` - resolving_ids: BTreeSet, + resolving_ids: BTreeSet, /// Each constraint in the `where` clause of the function currently being resolved. trait_bounds: Vec, @@ -229,7 +235,7 @@ impl<'context> Elaborator<'context> { crate_graph, file: FileId::dummy(), unsafe_block_status: UnsafeBlockStatus::NotInUnsafeBlock, - nested_loops: 0, + current_loop: None, generics: Vec::new(), lambda_stack: Vec::new(), self_type: None, @@ -976,7 +982,7 @@ impl<'context> Elaborator<'context> { let statements = std::mem::take(&mut func.def.body.statements); let body = BlockExpression { statements }; - let struct_id = if let Some(Type::Struct(struct_type, _)) = &self.self_type { + let struct_id = if let Some(Type::DataType(struct_type, _)) = &self.self_type { Some(struct_type.borrow().id) } else { None @@ -1024,7 +1030,7 @@ impl<'context> Elaborator<'context> { self.mark_type_as_used(typ); } } - Type::Struct(struct_type, generics) => { + Type::DataType(struct_type, generics) => { self.mark_struct_as_constructed(struct_type.clone()); for generic in generics { self.mark_type_as_used(generic); @@ -1501,7 +1507,7 @@ impl<'context> Elaborator<'context> { let function_ids = functions.function_ids(); - if let Type::Struct(struct_type, _) = &self_type { + if let Type::DataType(struct_type, _) = &self_type { let struct_ref = struct_type.borrow(); // `impl`s are only allowed on types defined within the current crate @@ -1596,7 +1602,7 @@ impl<'context> Elaborator<'context> { } /// Find the struct in the parent module so we can know its visibility - fn find_struct_visibility(&self, struct_type: &StructType) -> Option { + fn find_struct_visibility(&self, struct_type: &DataType) -> Option { let parent_module_id = struct_type.id.parent_module_id(self.def_maps); let parent_module_data = self.get_module(parent_module_id); let per_ns = parent_module_data.find_name(&struct_type.name); @@ -1619,7 +1625,7 @@ impl<'context> Elaborator<'context> { } // Public struct functions should not expose private types. if let Some(struct_visibility) = func_meta.struct_id.and_then(|id| { - let struct_def = self.get_struct(id); + let struct_def = self.get_type(id); let struct_def = struct_def.borrow(); self.find_struct_visibility(&struct_def) }) { @@ -1638,7 +1644,7 @@ impl<'context> Elaborator<'context> { span: Span, ) { match typ { - Type::Struct(struct_type, generics) => { + Type::DataType(struct_type, generics) => { let struct_type = struct_type.borrow(); let struct_module_id = struct_type.id.module_id(); @@ -1708,7 +1714,7 @@ impl<'context> Elaborator<'context> { } } - fn collect_struct_definitions(&mut self, structs: &BTreeMap) { + fn collect_struct_definitions(&mut self, structs: &BTreeMap) { // This is necessary to avoid cloning the entire struct map // when adding checks after each struct field is resolved. let struct_ids = structs.keys().copied().collect::>(); @@ -1760,7 +1766,7 @@ impl<'context> Elaborator<'context> { // We need to check after all structs are resolved to // make sure every struct's fields is accurately set. for id in struct_ids { - let struct_type = self.interner.get_struct(id); + let struct_type = self.interner.get_type(id); // Only handle structs without generics as any generics args will be checked // after monomorphization when performing SSA codegen @@ -1780,14 +1786,14 @@ impl<'context> Elaborator<'context> { pub fn resolve_struct_fields( &mut self, unresolved: &NoirStruct, - struct_id: StructId, + struct_id: TypeId, ) -> Vec { self.recover_generics(|this| { this.current_item = Some(DependencyId::Struct(struct_id)); this.resolving_ids.insert(struct_id); - let struct_def = this.interner.get_struct(struct_id); + let struct_def = this.interner.get_type(struct_id); this.add_existing_generics(&unresolved.generics, &struct_def.borrow().generics); let fields = vecmap(&unresolved.fields, |field| { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/path_resolution.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/path_resolution.rs index 0d0b153b6b6..bae26535e01 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/path_resolution.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/path_resolution.rs @@ -9,7 +9,7 @@ use crate::hir::resolution::errors::ResolverError; use crate::hir::resolution::visibility::item_in_module_is_visible; use crate::locations::ReferencesTracker; -use crate::node_interner::{FuncId, GlobalId, StructId, TraitId, TypeAliasId}; +use crate::node_interner::{FuncId, GlobalId, TraitId, TypeAliasId, TypeId}; use crate::{Shared, Type, TypeAlias}; use super::types::SELF_TYPE_NAME; @@ -27,12 +27,12 @@ pub(crate) struct PathResolution { #[derive(Debug, Clone)] pub enum PathResolutionItem { Module(ModuleId), - Struct(StructId), + Type(TypeId), TypeAlias(TypeAliasId), Trait(TraitId), Global(GlobalId), ModuleFunction(FuncId), - StructFunction(StructId, Option, FuncId), + Method(TypeId, Option, FuncId), TypeAliasFunction(TypeAliasId, Option, FuncId), TraitFunction(TraitId, Option, FuncId), } @@ -41,7 +41,7 @@ impl PathResolutionItem { pub fn function_id(&self) -> Option { match self { PathResolutionItem::ModuleFunction(func_id) - | PathResolutionItem::StructFunction(_, _, func_id) + | PathResolutionItem::Method(_, _, func_id) | PathResolutionItem::TypeAliasFunction(_, _, func_id) | PathResolutionItem::TraitFunction(_, _, func_id) => Some(*func_id), _ => None, @@ -58,12 +58,12 @@ impl PathResolutionItem { pub fn description(&self) -> &'static str { match self { PathResolutionItem::Module(..) => "module", - PathResolutionItem::Struct(..) => "type", + PathResolutionItem::Type(..) => "type", PathResolutionItem::TypeAlias(..) => "type alias", PathResolutionItem::Trait(..) => "trait", PathResolutionItem::Global(..) => "global", PathResolutionItem::ModuleFunction(..) - | PathResolutionItem::StructFunction(..) + | PathResolutionItem::Method(..) | PathResolutionItem::TypeAliasFunction(..) | PathResolutionItem::TraitFunction(..) => "function", } @@ -80,19 +80,19 @@ pub struct Turbofish { #[derive(Debug)] enum IntermediatePathResolutionItem { Module, - Struct(StructId, Option), + Type(TypeId, Option), TypeAlias(TypeAliasId, Option), Trait(TraitId, Option), } pub(crate) type PathResolutionResult = Result; -enum StructMethodLookupResult { +enum MethodLookupResult { /// The method could not be found. There might be trait methods that could be imported, /// but none of them are. NotFound(Vec), - /// Found a struct method. - FoundStructMethod(PerNs), + /// Found a method. + FoundMethod(PerNs), /// Found a trait method and it's currently in scope. FoundTraitMethod(PerNs, TraitId), /// There's only one trait method that matches, but it's not in scope @@ -124,16 +124,16 @@ impl<'context> Elaborator<'context> { let mut module_id = self.module_id(); if path.kind == PathKind::Plain && path.first_name() == Some(SELF_TYPE_NAME) { - if let Some(Type::Struct(struct_type, _)) = &self.self_type { - let struct_type = struct_type.borrow(); + if let Some(Type::DataType(datatype, _)) = &self.self_type { + let datatype = datatype.borrow(); if path.segments.len() == 1 { return Ok(PathResolution { - item: PathResolutionItem::Struct(struct_type.id), + item: PathResolutionItem::Type(datatype.id), errors: Vec::new(), }); } - module_id = struct_type.id.module_id(); + module_id = datatype.id.module_id(); path.segments.remove(0); } } @@ -211,9 +211,9 @@ impl<'context> Elaborator<'context> { last_segment.ident.is_self_type_name(), ); - let current_module_id_is_struct; + let current_module_id_is_type; - (current_module_id, current_module_id_is_struct, intermediate_item) = match typ { + (current_module_id, current_module_id_is_type, intermediate_item) = match typ { ModuleDefId::ModuleId(id) => { if last_segment_generics.is_some() { errors.push(PathResolutionError::TurbofishNotAllowedOnItem { @@ -227,13 +227,7 @@ impl<'context> Elaborator<'context> { ModuleDefId::TypeId(id) => ( id.module_id(), true, - IntermediatePathResolutionItem::Struct( - id, - last_segment_generics.as_ref().map(|generics| Turbofish { - generics: generics.clone(), - span: last_segment.turbofish_span(), - }), - ), + IntermediatePathResolutionItem::Type(id, last_segment.turbofish()), ), ModuleDefId::TypeAliasId(id) => { let type_alias = self.interner.get_type_alias(id); @@ -244,25 +238,13 @@ impl<'context> Elaborator<'context> { ( module_id, true, - IntermediatePathResolutionItem::TypeAlias( - id, - last_segment_generics.as_ref().map(|generics| Turbofish { - generics: generics.clone(), - span: last_segment.turbofish_span(), - }), - ), + IntermediatePathResolutionItem::TypeAlias(id, last_segment.turbofish()), ) } ModuleDefId::TraitId(id) => ( id.0, false, - IntermediatePathResolutionItem::Trait( - id, - last_segment_generics.as_ref().map(|generics| Turbofish { - generics: generics.clone(), - span: last_segment.turbofish_span(), - }), - ), + IntermediatePathResolutionItem::Trait(id, last_segment.turbofish()), ), ModuleDefId::FunctionId(_) => panic!("functions cannot be in the type namespace"), ModuleDefId::GlobalId(_) => panic!("globals cannot be in the type namespace"), @@ -284,10 +266,9 @@ impl<'context> Elaborator<'context> { current_module = self.get_module(current_module_id); // Check if namespace - let found_ns = if current_module_id_is_struct { - match self.resolve_struct_function(importing_module, current_module, current_ident) - { - StructMethodLookupResult::NotFound(vec) => { + let found_ns = if current_module_id_is_type { + match self.resolve_method(importing_module, current_module, current_ident) { + MethodLookupResult::NotFound(vec) => { if vec.is_empty() { return Err(PathResolutionError::Unresolved(current_ident.clone())); } else { @@ -303,16 +284,13 @@ impl<'context> Elaborator<'context> { ); } } - StructMethodLookupResult::FoundStructMethod(per_ns) => per_ns, - StructMethodLookupResult::FoundTraitMethod(per_ns, trait_id) => { + MethodLookupResult::FoundMethod(per_ns) => per_ns, + MethodLookupResult::FoundTraitMethod(per_ns, trait_id) => { let trait_ = self.interner.get_trait(trait_id); self.usage_tracker.mark_as_used(importing_module, &trait_.name); per_ns } - StructMethodLookupResult::FoundOneTraitMethodButNotInScope( - per_ns, - trait_id, - ) => { + MethodLookupResult::FoundOneTraitMethodButNotInScope(per_ns, trait_id) => { let trait_ = self.interner.get_trait(trait_id); let trait_name = self.fully_qualified_trait_path(trait_); errors.push(PathResolutionError::TraitMethodNotInScope { @@ -321,7 +299,7 @@ impl<'context> Elaborator<'context> { }); per_ns } - StructMethodLookupResult::FoundMultipleTraitMethods(vec) => { + MethodLookupResult::FoundMultipleTraitMethods(vec) => { let traits = vecmap(vec, |trait_id| { let trait_ = self.interner.get_trait(trait_id); self.usage_tracker.mark_as_used(importing_module, &trait_.name); @@ -373,32 +351,29 @@ impl<'context> Elaborator<'context> { } fn self_type_module_id(&self) -> Option { - if let Some(Type::Struct(struct_type, _)) = &self.self_type { - Some(struct_type.borrow().id.module_id()) + if let Some(Type::DataType(datatype, _)) = &self.self_type { + Some(datatype.borrow().id.module_id()) } else { None } } - fn resolve_struct_function( + fn resolve_method( &self, importing_module_id: ModuleId, current_module: &ModuleData, ident: &Ident, - ) -> StructMethodLookupResult { - // If the current module is a struct, next we need to find a function for it. - // The function could be in the struct itself, or it could be defined in traits. + ) -> MethodLookupResult { + // If the current module is a type, next we need to find a function for it. + // The function could be in the type itself, or it could be defined in traits. let item_scope = current_module.scope(); let Some(values) = item_scope.values().get(ident) else { - return StructMethodLookupResult::NotFound(vec![]); + return MethodLookupResult::NotFound(vec![]); }; - // First search if the function is defined in the struct itself + // First search if the function is defined in the type itself if let Some(item) = values.get(&None) { - return StructMethodLookupResult::FoundStructMethod(PerNs { - types: None, - values: Some(*item), - }); + return MethodLookupResult::FoundMethod(PerNs { types: None, values: Some(*item) }); } // Otherwise, the function could be defined in zero, one or more traits. @@ -427,25 +402,23 @@ impl<'context> Elaborator<'context> { let (trait_id, item) = values.iter().next().expect("Expected an item"); let trait_id = trait_id.expect("The None option was already considered before"); let per_ns = PerNs { types: None, values: Some(*item) }; - return StructMethodLookupResult::FoundOneTraitMethodButNotInScope( - per_ns, trait_id, - ); + return MethodLookupResult::FoundOneTraitMethodButNotInScope(per_ns, trait_id); } else { let trait_ids = vecmap(values, |(trait_id, _)| { trait_id.expect("The none option was already considered before") }); - return StructMethodLookupResult::NotFound(trait_ids); + return MethodLookupResult::NotFound(trait_ids); } } if results.len() > 1 { let trait_ids = vecmap(results, |(trait_id, _)| trait_id); - return StructMethodLookupResult::FoundMultipleTraitMethods(trait_ids); + return MethodLookupResult::FoundMultipleTraitMethods(trait_ids); } let (trait_id, item) = results.remove(0); let per_ns = PerNs { types: None, values: Some(*item) }; - StructMethodLookupResult::FoundTraitMethod(per_ns, trait_id) + MethodLookupResult::FoundTraitMethod(per_ns, trait_id) } } @@ -455,14 +428,14 @@ fn merge_intermediate_path_resolution_item_with_module_def_id( ) -> PathResolutionItem { match module_def_id { ModuleDefId::ModuleId(module_id) => PathResolutionItem::Module(module_id), - ModuleDefId::TypeId(struct_id) => PathResolutionItem::Struct(struct_id), + ModuleDefId::TypeId(type_id) => PathResolutionItem::Type(type_id), ModuleDefId::TypeAliasId(type_alias_id) => PathResolutionItem::TypeAlias(type_alias_id), ModuleDefId::TraitId(trait_id) => PathResolutionItem::Trait(trait_id), ModuleDefId::GlobalId(global_id) => PathResolutionItem::Global(global_id), ModuleDefId::FunctionId(func_id) => match intermediate_item { IntermediatePathResolutionItem::Module => PathResolutionItem::ModuleFunction(func_id), - IntermediatePathResolutionItem::Struct(struct_id, generics) => { - PathResolutionItem::StructFunction(struct_id, generics, func_id) + IntermediatePathResolutionItem::Type(type_id, generics) => { + PathResolutionItem::Method(type_id, generics, func_id) } IntermediatePathResolutionItem::TypeAlias(alias_id, generics) => { PathResolutionItem::TypeAliasFunction(alias_id, generics, func_id) @@ -478,13 +451,13 @@ fn get_type_alias_module_def_id(type_alias: &Shared) -> Option Some(struct_id.borrow().id.module_id()), + Type::DataType(type_id, _generics) => Some(type_id.borrow().id.module_id()), Type::Alias(type_alias, _generics) => get_type_alias_module_def_id(type_alias), Type::Error => None, _ => { - // For now we only allow type aliases that point to structs. + // For now we only allow type aliases that point to data types. // The more general case is captured here: https://github.com/noir-lang/noir/issues/6398 - panic!("Type alias in path not pointing to struct not yet supported") + panic!("Type alias in path not pointing to a data type is not yet supported") } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs index 6a672866d7e..6ab12d1e537 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs @@ -17,7 +17,7 @@ use crate::{ stmt::HirPattern, }, node_interner::{DefinitionId, DefinitionKind, ExprId, FuncId, GlobalId, TraitImplKind}, - Kind, Shared, StructType, Type, TypeAlias, TypeBindings, + DataType, Kind, Shared, Type, TypeAlias, TypeBindings, }; use super::{path_resolution::PathResolutionItem, Elaborator, ResolverMeta}; @@ -192,7 +192,7 @@ impl<'context> Elaborator<'context> { }; let (struct_type, generics) = match self.lookup_type_or_error(name) { - Some(Type::Struct(struct_type, struct_generics)) => (struct_type, struct_generics), + Some(Type::DataType(struct_type, struct_generics)) => (struct_type, struct_generics), None => return error_identifier(self), Some(typ) => { let typ = typ.to_string(); @@ -210,7 +210,7 @@ impl<'context> Elaborator<'context> { turbofish_span, ); - let actual_type = Type::Struct(struct_type.clone(), generics); + let actual_type = Type::DataType(struct_type.clone(), generics); let location = Location::new(span, self.file); self.unify(&actual_type, &expected_type, || TypeCheckError::TypeMismatchWithSource { @@ -250,7 +250,7 @@ impl<'context> Elaborator<'context> { #[allow(clippy::too_many_arguments)] fn resolve_constructor_pattern_fields( &mut self, - struct_type: Shared, + struct_type: Shared, fields: Vec<(Ident, Pattern)>, span: Span, expected_type: Type, @@ -434,7 +434,7 @@ impl<'context> Elaborator<'context> { pub(super) fn resolve_struct_turbofish_generics( &mut self, - struct_type: &StructType, + struct_type: &DataType, generics: Vec, unresolved_turbofish: Option>, span: Span, @@ -574,8 +574,8 @@ impl<'context> Elaborator<'context> { /// solve these fn resolve_item_turbofish(&mut self, item: PathResolutionItem) -> Vec { match item { - PathResolutionItem::StructFunction(struct_id, Some(generics), _func_id) => { - let struct_type = self.interner.get_struct(struct_id); + PathResolutionItem::Method(struct_id, Some(generics), _func_id) => { + let struct_type = self.interner.get_type(struct_id); let struct_type = struct_type.borrow(); let struct_generics = struct_type.instantiate(self.interner); self.resolve_struct_turbofish_generics( @@ -886,7 +886,7 @@ impl<'context> Elaborator<'context> { fn get_type_alias_generics(type_alias: &TypeAlias, generics: &[Type]) -> Vec { let typ = type_alias.get_type(generics); match typ { - Type::Struct(_, generics) => generics, + Type::DataType(_, generics) => generics, Type::Alias(type_alias, generics) => { get_type_alias_generics(&type_alias.borrow(), &generics) } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/scope.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/scope.rs index fe01e3cb7f3..327ae02b204 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/scope.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/scope.rs @@ -10,8 +10,8 @@ use crate::{ expr::{HirCapturedVar, HirIdent}, traits::Trait, }, - node_interner::{DefinitionId, StructId, TraitId}, - Shared, StructType, + node_interner::{DefinitionId, TraitId, TypeId}, + DataType, Shared, }; use crate::{Type, TypeAlias}; @@ -37,8 +37,8 @@ impl<'context> Elaborator<'context> { current_module } - pub(super) fn get_struct(&self, type_id: StructId) -> Shared { - self.interner.get_struct(type_id) + pub(super) fn get_type(&self, type_id: TypeId) -> Shared { + self.interner.get_type(type_id) } pub(super) fn get_trait_mut(&mut self, trait_id: TraitId) -> &mut Trait { @@ -160,12 +160,12 @@ impl<'context> Elaborator<'context> { } /// Lookup a given struct type by name. - pub fn lookup_struct_or_error(&mut self, path: Path) -> Option> { + pub fn lookup_datatype_or_error(&mut self, path: Path) -> Option> { let span = path.span(); match self.resolve_path_or_error(path) { Ok(item) => { - if let PathResolutionItem::Struct(struct_id) = item { - Some(self.get_struct(struct_id)) + if let PathResolutionItem::Type(struct_id) = item { + Some(self.get_type(struct_id)) } else { self.push_err(ResolverError::Expected { expected: "type", @@ -194,10 +194,10 @@ impl<'context> Elaborator<'context> { let span = path.span; match self.resolve_path_or_error(path) { - Ok(PathResolutionItem::Struct(struct_id)) => { - let struct_type = self.get_struct(struct_id); + Ok(PathResolutionItem::Type(struct_id)) => { + let struct_type = self.get_type(struct_id); let generics = struct_type.borrow().instantiate(self.interner); - Some(Type::Struct(struct_type, generics)) + Some(Type::DataType(struct_type, generics)) } Ok(PathResolutionItem::TypeAlias(alias_id)) => { let alias = self.interner.get_type_alias(alias_id); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs index a01b24c2f0f..a95e260b6a5 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs @@ -21,10 +21,10 @@ use crate::{ }, }, node_interner::{DefinitionId, DefinitionKind, GlobalId, StmtId}, - StructType, Type, + DataType, Type, }; -use super::{lints, Elaborator}; +use super::{lints, Elaborator, Loop}; impl<'context> Elaborator<'context> { fn elaborate_statement_value(&mut self, statement: Statement) -> (HirStatement, Type) { @@ -33,7 +33,7 @@ impl<'context> Elaborator<'context> { StatementKind::Constrain(constrain) => self.elaborate_constrain(constrain), StatementKind::Assign(assign) => self.elaborate_assign(assign), StatementKind::For(for_stmt) => self.elaborate_for(for_stmt), - StatementKind::Loop(block) => self.elaborate_loop(block, statement.span), + StatementKind::Loop(block, span) => self.elaborate_loop(block, span), StatementKind::Break => self.elaborate_jump(true, statement.span), StatementKind::Continue => self.elaborate_jump(false, statement.span), StatementKind::Comptime(statement) => self.elaborate_comptime_statement(*statement), @@ -227,7 +227,9 @@ impl<'context> Elaborator<'context> { let (end_range, end_range_type) = self.elaborate_expression(end); let (identifier, block) = (for_loop.identifier, for_loop.block); - self.nested_loops += 1; + let old_loop = std::mem::take(&mut self.current_loop); + + self.current_loop = Some(Loop { is_for: true, has_break: false }); self.push_scope(); // TODO: For loop variables are currently mutable by default since we haven't @@ -261,7 +263,7 @@ impl<'context> Elaborator<'context> { let (block, _block_type) = self.elaborate_expression(block); self.pop_scope(); - self.nested_loops -= 1; + self.current_loop = old_loop; let statement = HirStatement::For(HirForStatement { start_range, end_range, block, identifier }); @@ -279,13 +281,19 @@ impl<'context> Elaborator<'context> { self.push_err(ResolverError::LoopInConstrainedFn { span }); } - self.nested_loops += 1; + let old_loop = std::mem::take(&mut self.current_loop); + self.current_loop = Some(Loop { is_for: false, has_break: false }); self.push_scope(); let (block, _block_type) = self.elaborate_expression(block); self.pop_scope(); - self.nested_loops -= 1; + + let last_loop = + std::mem::replace(&mut self.current_loop, old_loop).expect("Expected a loop"); + if !last_loop.has_break { + self.push_err(ResolverError::LoopWithoutBreak { span }); + } let statement = HirStatement::Loop(block); @@ -298,7 +306,12 @@ impl<'context> Elaborator<'context> { if in_constrained_function { self.push_err(ResolverError::JumpInConstrainedFn { is_break, span }); } - if self.nested_loops == 0 { + + if let Some(current_loop) = &mut self.current_loop { + if is_break { + current_loop.has_break = true; + } + } else { self.push_err(ResolverError::JumpOutsideLoop { is_break, span }); } @@ -478,7 +491,7 @@ impl<'context> Elaborator<'context> { let lhs_type = lhs_type.follow_bindings(); match &lhs_type { - Type::Struct(s, args) => { + Type::DataType(s, args) => { let s = s.borrow(); if let Some((field, visibility, index)) = s.get_field(field_name, args) { let reference_location = Location::new(span, self.file); @@ -542,7 +555,7 @@ impl<'context> Elaborator<'context> { pub(super) fn check_struct_field_visibility( &mut self, - struct_type: &StructType, + struct_type: &DataType, field_name: &str, visibility: ItemVisibility, span: Span, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/trait_impls.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/trait_impls.rs index 20f048bed05..aa27ac29fa6 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/trait_impls.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/trait_impls.rs @@ -217,7 +217,7 @@ impl<'context> Elaborator<'context> { self.file = trait_impl.file_id; let object_crate = match &trait_impl.resolved_object_type { - Some(Type::Struct(struct_type, _)) => struct_type.borrow().id.krate(), + Some(Type::DataType(struct_type, _)) => struct_type.borrow().id.krate(), _ => CrateId::Dummy, }; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs index a1b63910a3e..7fc403ebf23 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs @@ -154,12 +154,12 @@ impl<'context> Elaborator<'context> { let location = Location::new(named_path_span.unwrap_or(typ.span), self.file); match resolved_type { - Type::Struct(ref struct_type, _) => { + Type::DataType(ref data_type, _) => { // Record the location of the type reference self.interner.push_type_ref_location(resolved_type.clone(), location); if !is_synthetic { self.interner.add_struct_reference( - struct_type.borrow().id, + data_type.borrow().id, location, is_self_type_name, ); @@ -259,11 +259,11 @@ impl<'context> Elaborator<'context> { return Type::Alias(type_alias, args); } - match self.lookup_struct_or_error(path) { - Some(struct_type) => { - if self.resolving_ids.contains(&struct_type.borrow().id) { - self.push_err(ResolverError::SelfReferentialStruct { - span: struct_type.borrow().name.span(), + match self.lookup_datatype_or_error(path) { + Some(data_type) => { + if self.resolving_ids.contains(&data_type.borrow().id) { + self.push_err(ResolverError::SelfReferentialType { + span: data_type.borrow().name.span(), }); return Type::Error; @@ -272,23 +272,23 @@ impl<'context> Elaborator<'context> { if !self.in_contract() && self .interner - .struct_attributes(&struct_type.borrow().id) + .struct_attributes(&data_type.borrow().id) .iter() .any(|attr| matches!(attr, SecondaryAttribute::Abi(_))) { self.push_err(ResolverError::AbiAttributeOutsideContract { - span: struct_type.borrow().name.span(), + span: data_type.borrow().name.span(), }); } - let (args, _) = self.resolve_type_args(args, struct_type.borrow(), span); + let (args, _) = self.resolve_type_args(args, data_type.borrow(), span); if let Some(current_item) = self.current_item { - let dependency_id = struct_type.borrow().id; + let dependency_id = data_type.borrow().id; self.interner.add_type_dependency(current_item, dependency_id); } - Type::Struct(struct_type, args) + Type::DataType(data_type, args) } None => Type::Error, } @@ -684,6 +684,60 @@ impl<'context> Elaborator<'context> { None } + /// This resolves a method in the form `Type::method` where `method` is a trait method + fn resolve_type_trait_method(&mut self, path: &Path) -> Option { + if path.segments.len() < 2 { + return None; + } + + let mut path = path.clone(); + let span = path.span(); + let last_segment = path.pop(); + let before_last_segment = path.last_segment(); + + let path_resolution = self.resolve_path(path).ok()?; + let PathResolutionItem::Type(type_id) = path_resolution.item else { + return None; + }; + + let datatype = self.get_type(type_id); + let generics = datatype.borrow().instantiate(self.interner); + let typ = Type::DataType(datatype, generics); + let method_name = &last_segment.ident.0.contents; + + // If we can find a method on the type, this is definitely not a trait method + if self.interner.lookup_direct_method(&typ, method_name, false).is_some() { + return None; + } + + let trait_methods = self.interner.lookup_trait_methods(&typ, method_name, false); + if trait_methods.is_empty() { + return None; + } + + let (hir_method_reference, error) = + self.get_trait_method_in_scope(&trait_methods, method_name, last_segment.span); + let hir_method_reference = hir_method_reference?; + let func_id = hir_method_reference.func_id(self.interner)?; + let HirMethodReference::TraitMethodId(trait_method_id, _, _) = hir_method_reference else { + return None; + }; + + let trait_id = trait_method_id.trait_id; + let trait_ = self.interner.get_trait(trait_id); + let mut constraint = trait_.as_constraint(span); + constraint.typ = typ; + + let method = TraitMethod { method_id: trait_method_id, constraint, assumed: false }; + let turbofish = before_last_segment.turbofish(); + let item = PathResolutionItem::TraitFunction(trait_id, turbofish, func_id); + let mut errors = path_resolution.errors; + if let Some(error) = error { + errors.push(error); + } + Some(TraitPathResolution { method, item: Some(item), errors }) + } + // Try to resolve the given trait method path. // // Returns the trait method, trait constraint, and whether the impl is assumed to exist by a where clause or not @@ -695,6 +749,7 @@ impl<'context> Elaborator<'context> { self.resolve_trait_static_method_by_self(path) .or_else(|| self.resolve_trait_static_method(path)) .or_else(|| self.resolve_trait_method_by_named_generic(path)) + .or_else(|| self.resolve_type_trait_method(path)) } pub(super) fn unify( @@ -1368,12 +1423,12 @@ impl<'context> Elaborator<'context> { self.lookup_method_in_trait_constraints(object_type, method_name, span) } // Mutable references to another type should resolve to methods of their element type. - // This may be a struct or a primitive type. + // This may be a data type or a primitive type. Type::MutableReference(element) => { self.lookup_method(&element, method_name, span, has_self_arg) } - // If we fail to resolve the object to a struct type, we have no way of type + // If we fail to resolve the object to a data type, we have no way of type // checking its arguments as we can't even resolve the name of the function Type::Error => None, @@ -1383,13 +1438,11 @@ impl<'context> Elaborator<'context> { None } - other => { - self.lookup_struct_or_primitive_method(&other, method_name, span, has_self_arg) - } + other => self.lookup_type_or_primitive_method(&other, method_name, span, has_self_arg), } } - fn lookup_struct_or_primitive_method( + fn lookup_type_or_primitive_method( &mut self, object_type: &Type, method_name: &str, @@ -1420,12 +1473,16 @@ impl<'context> Elaborator<'context> { return self.return_trait_method_in_scope(&generic_methods, method_name, span); } - if let Type::Struct(struct_type, _) = object_type { - let has_field_with_function_type = struct_type - .borrow() - .get_fields_as_written() - .into_iter() - .any(|field| field.name.0.contents == method_name && field.typ.is_function()); + if let Type::DataType(datatype, _) = object_type { + let datatype = datatype.borrow(); + let mut has_field_with_function_type = false; + + if let Some(fields) = datatype.try_fields_raw() { + has_field_with_function_type = fields + .iter() + .any(|field| field.name.0.contents == method_name && field.typ.is_function()); + } + if has_field_with_function_type { self.push_err(TypeCheckError::CannotInvokeStructFieldFunctionType { method_name: method_name.to_string(), @@ -1456,6 +1513,19 @@ impl<'context> Elaborator<'context> { method_name: &str, span: Span, ) -> Option { + let (method, error) = self.get_trait_method_in_scope(trait_methods, method_name, span); + if let Some(error) = error { + self.push_err(error); + } + method + } + + fn get_trait_method_in_scope( + &mut self, + trait_methods: &[(FuncId, TraitId)], + method_name: &str, + span: Span, + ) -> (Option, Option) { let module_id = self.module_id(); let module_data = self.get_module(module_id); @@ -1489,28 +1559,24 @@ impl<'context> Elaborator<'context> { let trait_id = *traits.iter().next().unwrap(); let trait_ = self.interner.get_trait(trait_id); let trait_name = self.fully_qualified_trait_path(trait_); - - self.push_err(PathResolutionError::TraitMethodNotInScope { + let method = + self.trait_hir_method_reference(trait_id, trait_methods, method_name, span); + let error = PathResolutionError::TraitMethodNotInScope { ident: Ident::new(method_name.into(), span), trait_name, - }); - - return Some(self.trait_hir_method_reference( - trait_id, - trait_methods, - method_name, - span, - )); + }; + return (Some(method), Some(error)); } else { let traits = vecmap(traits, |trait_id| { let trait_ = self.interner.get_trait(trait_id); self.fully_qualified_trait_path(trait_) }); - self.push_err(PathResolutionError::UnresolvedWithPossibleTraitsToImport { + let method = None; + let error = PathResolutionError::UnresolvedWithPossibleTraitsToImport { ident: Ident::new(method_name.into(), span), traits, - }); - return None; + }; + return (method, Some(error)); } } @@ -1519,15 +1585,18 @@ impl<'context> Elaborator<'context> { let trait_ = self.interner.get_trait(trait_id); self.fully_qualified_trait_path(trait_) }); - self.push_err(PathResolutionError::MultipleTraitsInScope { + let method = None; + let error = PathResolutionError::MultipleTraitsInScope { ident: Ident::new(method_name.into(), span), traits, - }); - return None; + }; + return (method, Some(error)); } let trait_id = traits_in_scope[0].0; - Some(self.trait_hir_method_reference(trait_id, trait_methods, method_name, span)) + let method = self.trait_hir_method_reference(trait_id, trait_methods, method_name, span); + let error = None; + (Some(method), error) } fn trait_hir_method_reference( @@ -1545,7 +1614,7 @@ impl<'context> Elaborator<'context> { // Return a TraitMethodId with unbound generics. These will later be bound by the type-checker. let trait_ = self.interner.get_trait(trait_id); - let generics = trait_.as_constraint(span).trait_bound.trait_generics; + let generics = trait_.get_trait_generics(span); let trait_method_id = trait_.find_method(method_name).unwrap(); HirMethodReference::TraitMethodId(trait_method_id, generics, false) } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs index ccdfdf00e72..cbcf8b02d03 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs @@ -357,7 +357,7 @@ impl<'value, 'interner> Display for ValuePrinter<'value, 'interner> { } Value::Struct(fields, typ) => { let typename = match typ.follow_bindings() { - Type::Struct(def, _) => def.borrow().name.to_string(), + Type::DataType(def, _) => def.borrow().name.to_string(), other => other.to_string(), }; let fields = vecmap(fields, |(name, value)| { @@ -376,7 +376,7 @@ impl<'value, 'interner> Display for ValuePrinter<'value, 'interner> { } Value::Quoted(tokens) => display_quoted(tokens, 0, self.interner, f), Value::StructDefinition(id) => { - let def = self.interner.get_struct(*id); + let def = self.interner.get_type(*id); let def = def.borrow(); write!(f, "{}", def.name) } @@ -732,8 +732,8 @@ fn remove_interned_in_statement_kind( block: remove_interned_in_expression(interner, for_loop.block), ..for_loop }), - StatementKind::Loop(block) => { - StatementKind::Loop(remove_interned_in_expression(interner, block)) + StatementKind::Loop(block, span) => { + StatementKind::Loop(remove_interned_in_expression(interner, block), span) } StatementKind::Comptime(statement) => { StatementKind::Comptime(Box::new(remove_interned_in_statement(interner, *statement))) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs index 71a462c9066..806360c3683 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs @@ -59,7 +59,7 @@ impl HirStatement { block: for_stmt.block.to_display_ast(interner), span, }), - HirStatement::Loop(block) => StatementKind::Loop(block.to_display_ast(interner)), + HirStatement::Loop(block) => StatementKind::Loop(block.to_display_ast(interner), span), HirStatement::Break => StatementKind::Break, HirStatement::Continue => StatementKind::Continue, HirStatement::Expression(expr) => { @@ -246,7 +246,7 @@ impl HirPattern { (name.clone(), pattern.to_display_ast(interner)) }); let name = match typ.follow_bindings() { - Type::Struct(struct_def, _) => { + Type::DataType(struct_def, _) => { let struct_def = struct_def.borrow(); struct_def.name.0.contents.clone() } @@ -301,7 +301,7 @@ impl Type { let fields = vecmap(fields, |field| field.to_display_ast()); UnresolvedTypeData::Tuple(fields) } - Type::Struct(def, generics) => { + Type::DataType(def, generics) => { let struct_def = def.borrow(); let ordered_args = vecmap(generics, |generic| generic.to_display_ast()); let generics = diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs index d48a27c4181..5080a1b2068 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs @@ -1724,22 +1724,68 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { let (end, _) = get_index(self, for_.end_range)?; let was_in_loop = std::mem::replace(&mut self.in_loop, true); + let mut result = Ok(Value::Unit); + for i in start..end { self.push_scope(); self.current_scope_mut().insert(for_.identifier.id, make_value(i)); - match self.evaluate(for_.block) { - Ok(_) => (), - Err(InterpreterError::Break) => break, - Err(InterpreterError::Continue) => continue, - Err(other) => return Err(other), + let must_break = match self.evaluate(for_.block) { + Ok(_) => false, + Err(InterpreterError::Break) => true, + Err(InterpreterError::Continue) => false, + Err(error) => { + result = Err(error); + true + } + }; + + self.pop_scope(); + + if must_break { + break; } + } + + self.in_loop = was_in_loop; + result + } + + fn evaluate_loop(&mut self, expr: ExprId) -> IResult { + let was_in_loop = std::mem::replace(&mut self.in_loop, true); + let in_lsp = self.elaborator.interner.is_in_lsp_mode(); + let mut counter = 0; + let mut result = Ok(Value::Unit); + + loop { + self.push_scope(); + + let must_break = match self.evaluate(expr) { + Ok(_) => false, + Err(InterpreterError::Break) => true, + Err(InterpreterError::Continue) => false, + Err(error) => { + result = Err(error); + true + } + }; self.pop_scope(); + + if must_break { + break; + } + + counter += 1; + if in_lsp && counter == 10_000 { + let location = self.elaborator.interner.expr_location(&expr); + result = Err(InterpreterError::LoopHaltedForUiResponsiveness { location }); + break; + } } self.in_loop = was_in_loop; - Ok(Value::Unit) + result } fn evaluate_loop(&mut self, expr: ExprId) -> IResult { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs index 3506b63919c..7909a423bef 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs @@ -66,7 +66,7 @@ impl<'local, 'context> Interpreter<'local, 'context> { "array_as_str_unchecked" => array_as_str_unchecked(interner, arguments, location), "array_len" => array_len(interner, arguments, location), "array_refcount" => Ok(Value::U32(0)), - "assert_constant" => Ok(Value::Bool(true)), + "assert_constant" => Ok(Value::Unit), "as_slice" => as_slice(interner, arguments, location), "ctstring_eq" => ctstring_eq(arguments, location), "ctstring_hash" => ctstring_hash(arguments, location), @@ -175,6 +175,7 @@ impl<'local, 'context> Interpreter<'local, 'context> { "slice_push_front" => slice_push_front(interner, arguments, location), "slice_refcount" => Ok(Value::U32(0)), "slice_remove" => slice_remove(interner, arguments, location, call_stack), + "static_assert" => static_assert(interner, arguments, location, call_stack), "str_as_bytes" => str_as_bytes(interner, arguments, location), "str_as_ctstring" => str_as_ctstring(interner, arguments, location), "struct_def_add_attribute" => struct_def_add_attribute(interner, arguments, location), @@ -327,6 +328,28 @@ fn slice_push_back( Ok(Value::Slice(values, typ)) } +// static_assert(predicate: bool, message: str) +fn static_assert( + interner: &NodeInterner, + arguments: Vec<(Value, Location)>, + location: Location, + call_stack: &im::Vector, +) -> IResult { + let (predicate, message) = check_two_arguments(arguments, location)?; + let predicate = get_bool(predicate)?; + let message = get_str(interner, message)?; + + if predicate { + Ok(Value::Unit) + } else { + failing_constraint( + format!("static_assert failed: {}", message).clone(), + location, + call_stack, + ) + } +} + fn str_as_bytes( interner: &NodeInterner, arguments: Vec<(Value, Location)>, @@ -403,7 +426,7 @@ fn struct_def_add_generic( }; let struct_id = get_struct(self_argument)?; - let the_struct = interner.get_struct(struct_id); + let the_struct = interner.get_type(struct_id); let mut the_struct = the_struct.borrow_mut(); let name = Rc::new(generic_name); @@ -436,7 +459,7 @@ fn struct_def_as_type( ) -> IResult { let argument = check_one_argument(arguments, location)?; let struct_id = get_struct(argument)?; - let struct_def_rc = interner.get_struct(struct_id); + let struct_def_rc = interner.get_type(struct_id); let struct_def = struct_def_rc.borrow(); let generics = vecmap(&struct_def.generics, |generic| { @@ -444,7 +467,7 @@ fn struct_def_as_type( }); drop(struct_def); - Ok(Value::Type(Type::Struct(struct_def_rc, generics))) + Ok(Value::Type(Type::DataType(struct_def_rc, generics))) } /// fn generics(self) -> [(Type, Option)] @@ -456,7 +479,7 @@ fn struct_def_generics( ) -> IResult { let argument = check_one_argument(arguments, location)?; let struct_id = get_struct(argument)?; - let struct_def = interner.get_struct(struct_id); + let struct_def = interner.get_type(struct_id); let struct_def = struct_def.borrow(); let expected = Type::Slice(Box::new(Type::Tuple(vec![ @@ -526,7 +549,7 @@ fn struct_def_fields( ) -> IResult { let (typ, generic_args) = check_two_arguments(arguments, location)?; let struct_id = get_struct(typ)?; - let struct_def = interner.get_struct(struct_id); + let struct_def = interner.get_type(struct_id); let struct_def = struct_def.borrow(); let args_location = generic_args.1; @@ -569,7 +592,7 @@ fn struct_def_fields_as_written( ) -> IResult { let argument = check_one_argument(arguments, location)?; let struct_id = get_struct(argument)?; - let struct_def = interner.get_struct(struct_id); + let struct_def = interner.get_type(struct_id); let struct_def = struct_def.borrow(); let mut fields = im::Vector::new(); @@ -607,7 +630,7 @@ fn struct_def_name( ) -> IResult { let self_argument = check_one_argument(arguments, location)?; let struct_id = get_struct(self_argument)?; - let the_struct = interner.get_struct(struct_id); + let the_struct = interner.get_type(struct_id); let name = Token::Ident(the_struct.borrow().name.to_string()); Ok(Value::Quoted(Rc::new(vec![name]))) @@ -623,7 +646,7 @@ fn struct_def_set_fields( let (the_struct, fields) = check_two_arguments(arguments, location)?; let struct_id = get_struct(the_struct)?; - let struct_def = interner.get_struct(struct_id); + let struct_def = interner.get_type(struct_id); let mut struct_def = struct_def.borrow_mut(); let field_location = fields.1; @@ -1057,7 +1080,7 @@ fn type_as_struct( location: Location, ) -> IResult { type_as(arguments, return_type, location, |typ| { - if let Type::Struct(struct_type, generics) = typ { + if let Type::DataType(struct_type, generics) = typ { Some(Value::Tuple(vec![ Value::StructDefinition(struct_type.borrow().id), Value::Slice( @@ -1432,7 +1455,7 @@ fn zeroed(return_type: Type, span: Span) -> IResult { } Type::Unit => Ok(Value::Unit), Type::Tuple(fields) => Ok(Value::Tuple(try_vecmap(fields, |field| zeroed(field, span))?)), - Type::Struct(struct_type, generics) => { + Type::DataType(struct_type, generics) => { let fields = struct_type.borrow().get_fields(&generics); let mut values = HashMap::default(); @@ -1441,7 +1464,7 @@ fn zeroed(return_type: Type, span: Span) -> IResult { values.insert(Rc::new(field_name), field_value); } - let typ = Type::Struct(struct_type, generics); + let typ = Type::DataType(struct_type, generics); Ok(Value::Struct(values, typ)) } Type::Alias(alias, generics) => zeroed(alias.borrow().get_type(&generics), span), @@ -2890,7 +2913,7 @@ pub(crate) fn option(option_type: Type, value: Option, span: Span) -> IRe /// Given a type, assert that it's an Option and return the Type for T pub(crate) fn extract_option_generic_type(typ: Type) -> Type { - let Type::Struct(struct_type, mut generics) = typ else { + let Type::DataType(struct_type, mut generics) = typ else { panic!("Expected type to be a struct"); }; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin/builtin_helpers.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin/builtin_helpers.rs index a3f84a00bfb..342f494023d 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin/builtin_helpers.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin/builtin_helpers.rs @@ -28,11 +28,11 @@ use crate::{ function::{FuncMeta, FunctionBody}, stmt::HirPattern, }, - node_interner::{FuncId, NodeInterner, StructId, TraitId, TraitImplId}, + node_interner::{FuncId, NodeInterner, TraitId, TraitImplId, TypeId}, token::{SecondaryAttribute, Token, Tokens}, QuotedType, Type, }; -use crate::{Kind, Shared, StructType}; +use crate::{DataType, Kind, Shared}; use rustc_hash::FxHashMap as HashMap; pub(crate) fn check_argument_count( @@ -108,14 +108,13 @@ pub(crate) fn get_struct_fields( match value { Value::Struct(fields, typ) => Ok((fields, typ)), _ => { - let expected = StructType::new( - StructId::dummy_id(), + let expected = DataType::new( + TypeId::dummy_id(), Ident::new(name.to_string(), location.span), location, Vec::new(), - Vec::new(), ); - let expected = Type::Struct(Shared::new(expected), Vec::new()); + let expected = Type::DataType(Shared::new(expected), Vec::new()); type_mismatch(value, expected, location) } } @@ -327,7 +326,7 @@ pub(crate) fn get_module((value, location): (Value, Location)) -> IResult IResult { +pub(crate) fn get_struct((value, location): (Value, Location)) -> IResult { match value { Value::StructDefinition(id) => Ok(id), _ => type_mismatch(value, Type::Quoted(QuotedType::StructDefinition), location), @@ -434,7 +433,7 @@ fn gather_hir_pattern_tokens( tokens.push(Token::RightParen); } HirPattern::Struct(typ, fields, _) => { - let Type::Struct(struct_type, _) = typ.follow_bindings() else { + let Type::DataType(struct_type, _) = typ.follow_bindings() else { panic!("Expected type to be a struct"); }; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs index 77933ba9361..c5ec7d861cd 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs @@ -18,7 +18,7 @@ use crate::{ HirArrayLiteral, HirConstructorExpression, HirExpression, HirIdent, HirLambda, HirLiteral, ImplKind, }, - node_interner::{ExprId, FuncId, NodeInterner, StmtId, StructId, TraitId, TraitImplId}, + node_interner::{ExprId, FuncId, NodeInterner, StmtId, TraitId, TraitImplId, TypeId}, parser::{Item, Parser}, token::{SpannedToken, Token, Tokens}, Kind, QuotedType, Shared, Type, TypeBindings, @@ -62,7 +62,7 @@ pub enum Value { /// tokens can cause larger spans to be before lesser spans, causing an assert. They may also /// be inserted into separate files entirely. Quoted(Rc>), - StructDefinition(StructId), + StructDefinition(TypeId), TraitConstraint(TraitId, TraitGenerics), TraitDefinition(TraitId), TraitImpl(TraitImplId), @@ -234,7 +234,7 @@ impl Value { })?; let struct_type = match typ.follow_bindings() { - Type::Struct(def, _) => Some(def.borrow().id), + Type::DataType(def, _) => Some(def.borrow().id), _ => return Err(InterpreterError::NonStructInConstructor { typ, location }), }; @@ -388,7 +388,7 @@ impl Value { })?; let (r#type, struct_generics) = match typ.follow_bindings() { - Type::Struct(def, generics) => (def, generics), + Type::DataType(def, generics) => (def, generics), _ => return Err(InterpreterError::NonStructInConstructor { typ, location }), }; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index 10866f4b309..9aad806bb3c 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -16,8 +16,8 @@ use crate::hir::Context; use crate::ast::{Expression, NoirEnumeration}; use crate::node_interner::{ - FuncId, GlobalId, ModuleAttributes, NodeInterner, ReferenceId, StructId, TraitId, TraitImplId, - TypeAliasId, + FuncId, GlobalId, ModuleAttributes, NodeInterner, ReferenceId, TraitId, TraitImplId, + TypeAliasId, TypeId, }; use crate::ast::{ @@ -147,8 +147,8 @@ pub struct DefCollector { #[derive(Default)] pub struct CollectedItems { pub functions: Vec, - pub(crate) structs: BTreeMap, - pub(crate) enums: BTreeMap, + pub(crate) structs: BTreeMap, + pub(crate) enums: BTreeMap, pub(crate) type_aliases: BTreeMap, pub(crate) traits: BTreeMap, pub globals: Vec, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index 41234980942..b3047d66989 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -17,7 +17,7 @@ use crate::ast::{ UnresolvedTypeData, }; use crate::hir::resolution::errors::ResolverError; -use crate::node_interner::{ModuleAttributes, NodeInterner, ReferenceId, StructId}; +use crate::node_interner::{ModuleAttributes, NodeInterner, ReferenceId, TypeId}; use crate::token::SecondaryAttribute; use crate::usage_tracker::{UnusedItem, UsageTracker}; use crate::{ @@ -819,7 +819,7 @@ impl<'a> ModCollector<'a> { inner_attributes: Vec, add_to_parent_scope: bool, is_contract: bool, - is_struct: bool, + is_type: bool, ) -> Result { push_child_module( &mut context.def_interner, @@ -832,7 +832,7 @@ impl<'a> ModCollector<'a> { inner_attributes, add_to_parent_scope, is_contract, - is_struct, + is_type, ) } @@ -868,7 +868,7 @@ fn push_child_module( inner_attributes: Vec, add_to_parent_scope: bool, is_contract: bool, - is_struct: bool, + is_type: bool, ) -> Result { // Note: the difference between `location` and `mod_location` is: // - `mod_location` will point to either the token "foo" in `mod foo { ... }` @@ -884,7 +884,7 @@ fn push_child_module( outer_attributes, inner_attributes, is_contract, - is_struct, + is_type, ); let module_id = def_map.modules.insert(new_module); @@ -998,7 +998,7 @@ pub fn collect_struct( module_id: LocalModuleId, krate: CrateId, definition_errors: &mut Vec<(CompilationError, FileId)>, -) -> Option<(StructId, UnresolvedStruct)> { +) -> Option<(TypeId, UnresolvedStruct)> { let doc_comments = struct_definition.doc_comments; let struct_definition = struct_definition.item; @@ -1031,7 +1031,11 @@ pub fn collect_struct( true, // is struct ) { Ok(module_id) => { - interner.new_struct(&unresolved, resolved_generics, krate, module_id.local_id, file_id) + let name = unresolved.struct_def.name.clone(); + let span = unresolved.struct_def.span; + let attributes = unresolved.struct_def.attributes.clone(); + let local_id = module_id.local_id; + interner.new_type(name, span, attributes, resolved_generics, krate, local_id, file_id) } Err(error) => { definition_errors.push((error.into(), file_id)); @@ -1050,7 +1054,7 @@ pub fn collect_struct( // Add the struct to scope so its path can be looked up later let visibility = unresolved.struct_def.visibility; - let result = def_map.modules[module_id.0].declare_struct(name.clone(), visibility, id); + let result = def_map.modules[module_id.0].declare_type(name.clone(), visibility, id); let parent_module_id = ModuleId { krate, local_id: module_id }; @@ -1081,16 +1085,97 @@ pub fn collect_struct( #[allow(clippy::too_many_arguments)] pub fn collect_enum( - _interner: &mut NodeInterner, - _def_map: &mut CrateDefMap, - _usage_tracker: &mut UsageTracker, - _enum_definition: Documented, - _file_id: FileId, - _module_id: LocalModuleId, - _krate: CrateId, - _definition_errors: &mut [(CompilationError, FileId)], -) -> Option<(StructId, UnresolvedEnum)> { - todo!("Implement collect_enum") + interner: &mut NodeInterner, + def_map: &mut CrateDefMap, + usage_tracker: &mut UsageTracker, + enum_def: Documented, + file_id: FileId, + module_id: LocalModuleId, + krate: CrateId, + definition_errors: &mut Vec<(CompilationError, FileId)>, +) -> Option<(TypeId, UnresolvedEnum)> { + let doc_comments = enum_def.doc_comments; + let enum_def = enum_def.item; + + check_duplicate_variant_names(&enum_def, file_id, definition_errors); + + let name = enum_def.name.clone(); + + let unresolved = UnresolvedEnum { file_id, module_id, enum_def }; + + let resolved_generics = Context::resolve_generics( + interner, + &unresolved.enum_def.generics, + definition_errors, + file_id, + ); + + // Create the corresponding module for the enum namespace + let location = Location::new(name.span(), file_id); + let id = match push_child_module( + interner, + def_map, + module_id, + &name, + ItemVisibility::Public, + location, + Vec::new(), + Vec::new(), + false, // add to parent scope + false, // is contract + true, // is type + ) { + Ok(module_id) => { + let name = unresolved.enum_def.name.clone(); + let span = unresolved.enum_def.span; + let attributes = unresolved.enum_def.attributes.clone(); + let local_id = module_id.local_id; + interner.new_type(name, span, attributes, resolved_generics, krate, local_id, file_id) + } + Err(error) => { + definition_errors.push((error.into(), file_id)); + return None; + } + }; + + interner.set_doc_comments(ReferenceId::Enum(id), doc_comments); + + for (index, variant) in unresolved.enum_def.variants.iter().enumerate() { + if !variant.doc_comments.is_empty() { + let id = ReferenceId::EnumVariant(id, index); + interner.set_doc_comments(id, variant.doc_comments.clone()); + } + } + + // Add the enum to scope so its path can be looked up later + let visibility = unresolved.enum_def.visibility; + let result = def_map.modules[module_id.0].declare_type(name.clone(), visibility, id); + + let parent_module_id = ModuleId { krate, local_id: module_id }; + + if !unresolved.enum_def.is_abi() { + usage_tracker.add_unused_item( + parent_module_id, + name.clone(), + UnusedItem::Enum(id), + visibility, + ); + } + + if let Err((first_def, second_def)) = result { + let error = DefCollectorErrorKind::Duplicate { + typ: DuplicateType::TypeDefinition, + first_def, + second_def, + }; + definition_errors.push((error.into(), file_id)); + } + + if interner.is_in_lsp_mode() { + interner.register_enum(id, name.to_string(), visibility, parent_module_id); + } + + Some((id, unresolved)) } pub fn collect_impl( @@ -1333,14 +1418,35 @@ fn check_duplicate_field_names( } let previous_field_name = *seen_field_names.get(field_name).unwrap(); - definition_errors.push(( - DefCollectorErrorKind::DuplicateField { - first_def: previous_field_name.clone(), - second_def: field_name.clone(), - } - .into(), - file, - )); + let error = DefCollectorErrorKind::Duplicate { + typ: DuplicateType::StructField, + first_def: previous_field_name.clone(), + second_def: field_name.clone(), + }; + definition_errors.push((error.into(), file)); + } +} + +fn check_duplicate_variant_names( + enum_def: &NoirEnumeration, + file: FileId, + definition_errors: &mut Vec<(CompilationError, FileId)>, +) { + let mut seen_variant_names = std::collections::HashSet::new(); + for variant in &enum_def.variants { + let variant_name = &variant.item.name; + + if seen_variant_names.insert(variant_name) { + continue; + } + + let previous_variant_name = *seen_variant_names.get(variant_name).unwrap(); + let error = DefCollectorErrorKind::Duplicate { + typ: DuplicateType::EnumVariant, + first_def: previous_variant_name.clone(), + second_def: variant_name.clone(), + }; + definition_errors.push((error.into(), file)); } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/errors.rs index 1582e297144..1ca62acd29b 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/errors.rs @@ -21,21 +21,21 @@ pub enum DuplicateType { TraitAssociatedType, TraitAssociatedConst, TraitAssociatedFunction, + StructField, + EnumVariant, } #[derive(Error, Debug, Clone)] pub enum DefCollectorErrorKind { - #[error("duplicate {typ} found in namespace")] + #[error("Duplicate {typ}")] Duplicate { typ: DuplicateType, first_def: Ident, second_def: Ident }, - #[error("duplicate struct field {first_def}")] - DuplicateField { first_def: Ident, second_def: Ident }, - #[error("unresolved import")] + #[error("Unresolved import")] UnresolvedModuleDecl { mod_name: Ident, expected_path: String, alternative_path: String }, - #[error("overlapping imports")] + #[error("Overlapping imports")] OverlappingModuleDecls { mod_name: Ident, expected_path: String, alternative_path: String }, - #[error("path resolution error")] + #[error("Path resolution error")] PathResolutionError(PathResolutionError), - #[error("cannot re-export {item_name} because it has less visibility than this use statement")] + #[error("Cannot re-export {item_name} because it has less visibility than this use statement")] CannotReexportItemWithLessVisibility { item_name: Ident, desired_visibility: ItemVisibility }, #[error("Non-struct type used in impl")] NonStructTypeInImpl { span: Span }, @@ -120,6 +120,8 @@ impl fmt::Display for DuplicateType { DuplicateType::TraitAssociatedType => write!(f, "trait associated type"), DuplicateType::TraitAssociatedConst => write!(f, "trait associated constant"), DuplicateType::TraitAssociatedFunction => write!(f, "trait associated function"), + DuplicateType::StructField => write!(f, "struct field"), + DuplicateType::EnumVariant => write!(f, "enum variant"), } } } @@ -144,23 +146,6 @@ impl<'a> From<&'a DefCollectorErrorKind> for Diagnostic { diag } } - DefCollectorErrorKind::DuplicateField { first_def, second_def } => { - let primary_message = format!( - "Duplicate definitions of struct field with name {} found", - &first_def.0.contents - ); - { - let first_span = first_def.0.span(); - let second_span = second_def.0.span(); - let mut diag = Diagnostic::simple_error( - primary_message, - "First definition found here".to_string(), - first_span, - ); - diag.add_secondary("Second definition found here".to_string(), second_span); - diag - } - } DefCollectorErrorKind::UnresolvedModuleDecl { mod_name, expected_path, alternative_path } => { let span = mod_name.0.span(); let mod_name = &mod_name.0.contents; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs index f7fc6ca08ea..3d4049a1738 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -1,7 +1,7 @@ use crate::graph::{CrateGraph, CrateId}; use crate::hir::def_collector::dc_crate::{CompilationError, DefCollector}; use crate::hir::Context; -use crate::node_interner::{FuncId, GlobalId, NodeInterner, StructId}; +use crate::node_interner::{FuncId, GlobalId, NodeInterner, TypeId}; use crate::parse_program; use crate::parser::{ParsedModule, ParserError}; use crate::token::{FunctionAttribute, SecondaryAttribute, TestScope}; @@ -356,7 +356,7 @@ pub struct ContractFunctionMeta { } pub struct ContractOutputs { - pub structs: HashMap>, + pub structs: HashMap>, pub globals: HashMap>, } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/module_data.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/module_data.rs index 06188f3920b..b048aed214a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/module_data.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/module_data.rs @@ -4,7 +4,7 @@ use noirc_errors::Location; use super::{ItemScope, LocalModuleId, ModuleDefId, ModuleId, PerNs}; use crate::ast::{Ident, ItemVisibility}; -use crate::node_interner::{FuncId, GlobalId, StructId, TraitId, TypeAliasId}; +use crate::node_interner::{FuncId, GlobalId, TraitId, TypeAliasId, TypeId}; use crate::token::SecondaryAttribute; /// Contains the actual contents of a module: its parent (if one exists), @@ -120,11 +120,11 @@ impl ModuleData { self.declare(name, visibility, id.into(), None) } - pub fn declare_struct( + pub fn declare_type( &mut self, name: Ident, visibility: ItemVisibility, - id: StructId, + id: TypeId, ) -> Result<(), (Ident, Ident)> { self.declare(name, visibility, ModuleDefId::TypeId(id), None) } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/module_def.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/module_def.rs index a751eacd2dd..40d57ae2e23 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/module_def.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/module_def.rs @@ -1,4 +1,4 @@ -use crate::node_interner::{FuncId, GlobalId, StructId, TraitId, TypeAliasId}; +use crate::node_interner::{FuncId, GlobalId, TraitId, TypeAliasId, TypeId}; use super::ModuleId; @@ -7,7 +7,7 @@ use super::ModuleId; pub enum ModuleDefId { ModuleId(ModuleId), FunctionId(FuncId), - TypeId(StructId), + TypeId(TypeId), TypeAliasId(TypeAliasId), TraitId(TraitId), GlobalId(GlobalId), @@ -21,7 +21,7 @@ impl ModuleDefId { } } - pub fn as_type(&self) -> Option { + pub fn as_type(&self) -> Option { match self { ModuleDefId::TypeId(type_id) => Some(*type_id), _ => None, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/mod.rs index b231f8c9698..fea52be88bc 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/mod.rs @@ -9,7 +9,7 @@ use crate::ast::UnresolvedGenerics; use crate::debug::DebugInstrumenter; use crate::graph::{CrateGraph, CrateId}; use crate::hir_def::function::FuncMeta; -use crate::node_interner::{FuncId, NodeInterner, StructId}; +use crate::node_interner::{FuncId, NodeInterner, TypeId}; use crate::parser::ParserError; use crate::usage_tracker::UsageTracker; use crate::{Generics, Kind, ParsedModule, ResolvedGeneric, TypeVariable}; @@ -151,7 +151,7 @@ impl Context<'_, '_> { /// /// For example, if you project contains a `main.nr` and `foo.nr` and you provide the `main_crate_id` and the /// `bar_struct_id` where the `Bar` struct is inside `foo.nr`, this function would return `foo::Bar` as a [String]. - pub fn fully_qualified_struct_path(&self, crate_id: &CrateId, id: StructId) -> String { + pub fn fully_qualified_struct_path(&self, crate_id: &CrateId, id: TypeId) -> String { fully_qualified_module_path(&self.def_maps, &self.crate_graph, crate_id, id.module_id()) } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs index 77ba76a0595..6298ef796b4 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs @@ -98,8 +98,10 @@ pub enum ResolverError { DependencyCycle { span: Span, item: String, cycle: String }, #[error("break/continue are only allowed in unconstrained functions")] JumpInConstrainedFn { is_break: bool, span: Span }, - #[error("loop is only allowed in unconstrained functions")] + #[error("`loop` is only allowed in unconstrained functions")] LoopInConstrainedFn { span: Span }, + #[error("`loop` must have at least one `break` in it")] + LoopWithoutBreak { span: Span }, #[error("break/continue are only allowed within loops")] JumpOutsideLoop { is_break: bool, span: Span }, #[error("Only `comptime` globals can be mutable")] @@ -114,8 +116,8 @@ pub enum ResolverError { NonIntegralGlobalType { span: Span, global_value: Value }, #[error("Global value `{global_value}` is larger than its kind's maximum value")] GlobalLargerThanKind { span: Span, global_value: FieldElement, kind: Kind }, - #[error("Self-referential structs are not supported")] - SelfReferentialStruct { span: Span }, + #[error("Self-referential types are not supported")] + SelfReferentialType { span: Span }, #[error("#[no_predicates] attribute is only allowed on constrained functions")] NoPredicatesAttributeOnUnconstrained { ident: Ident }, #[error("#[fold] attribute is only allowed on constrained functions")] @@ -443,6 +445,13 @@ impl<'a> From<&'a ResolverError> for Diagnostic { *span, ) }, + ResolverError::LoopWithoutBreak { span } => { + Diagnostic::simple_error( + "`loop` must have at least one `break` in it".into(), + "Infinite loops are disallowed".into(), + *span, + ) + }, ResolverError::JumpOutsideLoop { is_break, span } => { let item = if *is_break { "break" } else { "continue" }; Diagnostic::simple_error( @@ -493,9 +502,9 @@ impl<'a> From<&'a ResolverError> for Diagnostic { *span, ) } - ResolverError::SelfReferentialStruct { span } => { + ResolverError::SelfReferentialType { span } => { Diagnostic::simple_error( - "Self-referential structs are not supported".into(), + "Self-referential types are not supported".into(), "".into(), *span, ) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/visibility.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/visibility.rs index 557f799df89..99860d86c82 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/visibility.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/visibility.rs @@ -1,5 +1,5 @@ use crate::graph::CrateId; -use crate::node_interner::{FuncId, NodeInterner, StructId, TraitId}; +use crate::node_interner::{FuncId, NodeInterner, TraitId, TypeId}; use crate::Type; use std::collections::BTreeMap; @@ -75,7 +75,25 @@ fn module_is_parent_of_struct_module( } pub fn struct_member_is_visible( - struct_id: StructId, + struct_id: TypeId, + visibility: ItemVisibility, + current_module_id: ModuleId, + def_maps: &BTreeMap, +) -> bool { + type_member_is_visible(struct_id.module_id(), visibility, current_module_id, def_maps) +} + +pub fn trait_member_is_visible( + trait_id: TraitId, + visibility: ItemVisibility, + current_module_id: ModuleId, + def_maps: &BTreeMap, +) -> bool { + type_member_is_visible(trait_id.0, visibility, current_module_id, def_maps) +} + +fn type_member_is_visible( + type_module_id: ModuleId, visibility: ItemVisibility, current_module_id: ModuleId, def_maps: &BTreeMap, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/generics.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/generics.rs index 370223f1f11..f823b495040 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/generics.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/generics.rs @@ -5,7 +5,7 @@ use iter_extended::vecmap; use crate::{ hir_def::traits::NamedType, node_interner::{FuncId, NodeInterner, TraitId, TypeAliasId}, - ResolvedGeneric, StructType, Type, + DataType, ResolvedGeneric, Type, }; /// Represents something that can be generic over type variables @@ -74,7 +74,7 @@ impl Generic for TypeAliasId { } } -impl Generic for Ref<'_, StructType> { +impl Generic for Ref<'_, DataType> { fn item_kind(&self) -> &'static str { "struct" } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs index 5ac228a56d6..14a3c6fc5e9 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs @@ -12,7 +12,7 @@ use crate::Shared; use super::stmt::HirPattern; use super::traits::{ResolvedTraitBound, TraitConstraint}; -use super::types::{StructType, Type}; +use super::types::{DataType, Type}; /// A HirExpression is the result of an Expression in the AST undergoing /// name resolution. It is almost identical to the Expression AST node, but @@ -273,7 +273,7 @@ impl HirMethodCallExpression { #[derive(Debug, Clone)] pub struct HirConstructorExpression { - pub r#type: Shared, + pub r#type: Shared, pub struct_generics: Vec, // NOTE: It is tempting to make this a BTreeSet to force ordering of field diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/function.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/function.rs index aa04738733f..33515d4888e 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/function.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/function.rs @@ -8,7 +8,7 @@ use super::traits::TraitConstraint; use crate::ast::{BlockExpression, FunctionKind, FunctionReturnType, Visibility}; use crate::graph::CrateId; use crate::hir::def_map::LocalModuleId; -use crate::node_interner::{ExprId, NodeInterner, StructId, TraitId, TraitImplId}; +use crate::node_interner::{ExprId, NodeInterner, TraitId, TraitImplId, TypeId}; use crate::{ResolvedGeneric, Type}; @@ -133,7 +133,7 @@ pub struct FuncMeta { pub trait_constraints: Vec, /// The struct this function belongs to, if any - pub struct_id: Option, + pub struct_id: Option, // The trait this function belongs to, if any pub trait_id: Option, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/traits.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/traits.rs index ff0cac027b1..a80c25492a3 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/traits.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/traits.rs @@ -186,22 +186,22 @@ impl Trait { (ordered, named) } - /// Returns a TraitConstraint for this trait using Self as the object - /// type and the uninstantiated generics for any trait generics. - pub fn as_constraint(&self, span: Span) -> TraitConstraint { + pub fn get_trait_generics(&self, span: Span) -> TraitGenerics { let ordered = vecmap(&self.generics, |generic| generic.clone().as_named_generic()); let named = vecmap(&self.associated_types, |generic| { let name = Ident::new(generic.name.to_string(), span); NamedType { name, typ: generic.clone().as_named_generic() } }); + TraitGenerics { ordered, named } + } + /// Returns a TraitConstraint for this trait using Self as the object + /// type and the uninstantiated generics for any trait generics. + pub fn as_constraint(&self, span: Span) -> TraitConstraint { + let trait_generics = self.get_trait_generics(span); TraitConstraint { typ: Type::TypeVariable(self.self_type_typevar.clone()), - trait_bound: ResolvedTraitBound { - trait_generics: TraitGenerics { ordered, named }, - trait_id: self.id, - span, - }, + trait_bound: ResolvedTraitBound { trait_generics, trait_id: self.id, span }, } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs index 4eeec314917..2e665fde3f3 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs @@ -21,7 +21,7 @@ use noirc_printable_type::PrintableType; use crate::{ ast::{Ident, Signedness}, - node_interner::StructId, + node_interner::TypeId, }; use super::{ @@ -67,7 +67,7 @@ pub enum Type { /// A user-defined struct type. The `Shared` field here refers to /// the shared definition for each instance of this struct type. The `Vec` /// represents the generic arguments (if any) to this struct type. - Struct(Shared, Vec), + DataType(Shared, Vec), /// A user-defined alias to another type. Similar to a Struct, this carries a shared /// reference to the definition of the alias along with any generics that may have @@ -330,31 +330,46 @@ pub enum QuotedType { /// the binding to later be undone if needed. pub type TypeBindings = HashMap; -/// Represents a struct type in the type system. Each instance of this -/// rust struct will be shared across all Type::Struct variants that represent -/// the same struct type. -pub struct StructType { - /// A unique id representing this struct type. Used to check if two - /// struct types are equal. - pub id: StructId, +/// Represents a struct or enum type in the type system. Each instance of this +/// rust struct will be shared across all Type::DataType variants that represent +/// the same struct or enum type. +pub struct DataType { + /// A unique id representing this type. Used to check if two types are equal. + pub id: TypeId, pub name: Ident, - /// Fields are ordered and private, they should only - /// be accessed through get_field(), get_fields(), or instantiate() + /// A type's body is private to force struct fields or enum variants to only be + /// accessed through get_field(), get_fields(), instantiate(), or similar functions /// since these will handle applying generic arguments to fields as well. - fields: Vec, + body: TypeBody, pub generics: Generics, pub location: Location, } +enum TypeBody { + /// A type with no body is still in the process of being created + None, + Struct(Vec), + + #[allow(unused)] + Enum(Vec), +} + +#[derive(Clone)] pub struct StructField { pub visibility: ItemVisibility, pub name: Ident, pub typ: Type, } +#[derive(Clone)] +pub struct EnumVariant { + pub name: Ident, + pub params: Vec, +} + /// Corresponds to generic lists such as `` in the source program. /// Used mainly for resolved types which no longer need information such /// as names or kinds @@ -388,42 +403,35 @@ enum FunctionCoercionResult { UnconstrainedMismatch(Type), } -impl std::hash::Hash for StructType { +impl std::hash::Hash for DataType { fn hash(&self, state: &mut H) { self.id.hash(state); } } -impl Eq for StructType {} +impl Eq for DataType {} -impl PartialEq for StructType { +impl PartialEq for DataType { fn eq(&self, other: &Self) -> bool { self.id == other.id } } -impl PartialOrd for StructType { +impl PartialOrd for DataType { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } -impl Ord for StructType { +impl Ord for DataType { fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.id.cmp(&other.id) } } -impl StructType { - pub fn new( - id: StructId, - name: Ident, - - location: Location, - fields: Vec, - generics: Generics, - ) -> StructType { - StructType { id, fields, name, location, generics } +impl DataType { + pub fn new(id: TypeId, name: Ident, location: Location, generics: Generics) -> DataType { + DataType { id, name, location, generics, body: TypeBody::None } } /// To account for cyclic references between structs, a struct's @@ -431,14 +439,46 @@ impl StructType { /// created. Therefore, this method is used to set the fields once they /// become known. pub fn set_fields(&mut self, fields: Vec) { - self.fields = fields; + self.body = TypeBody::Struct(fields); } - pub fn num_fields(&self) -> usize { - self.fields.len() + pub fn is_struct(&self) -> bool { + matches!(&self.body, TypeBody::Struct(_)) + } + + /// Retrieve the fields of this type with no modifications. + /// Returns None if this is not a struct type. + pub fn try_fields_raw(&self) -> Option<&[StructField]> { + match &self.body { + TypeBody::Struct(fields) => Some(fields), + _ => None, + } + } + + /// Retrieve the fields of this type with no modifications. + /// Panics if this is not a struct type. + fn fields_raw(&self) -> &[StructField] { + match &self.body { + TypeBody::Struct(fields) => fields, + // Turns out we call `fields_raw` in a few places before a type may be fully finished. + // One of these is when checking for nested slices, so that check will have false + // negatives. + TypeBody::None => &[], + _ => panic!("Called DataType::fields_raw on a non-struct type: {}", self.name), + } + } + + /// Retrieve the variants of this type with no modifications. + /// Panics if this is not an enum type. + fn variants_raw(&self) -> &[EnumVariant] { + match &self.body { + TypeBody::Enum(variants) => variants, + _ => panic!("Called DataType::variants_raw on a non-enum type: {}", self.name), + } } /// Returns the field matching the given field name, as well as its visibility and field index. + /// Panics if this is not a struct type. pub fn get_field( &self, field_name: &str, @@ -446,42 +486,38 @@ impl StructType { ) -> Option<(Type, ItemVisibility, usize)> { assert_eq!(self.generics.len(), generic_args.len()); - self.fields.iter().enumerate().find(|(_, field)| field.name.0.contents == field_name).map( - |(i, field)| { - let substitutions = self - .generics - .iter() - .zip(generic_args) - .map(|(old, new)| { - ( - old.type_var.id(), - (old.type_var.clone(), old.type_var.kind(), new.clone()), - ) - }) - .collect(); + let mut fields = self.fields_raw().iter().enumerate(); + fields.find(|(_, field)| field.name.0.contents == field_name).map(|(i, field)| { + let generics = self.generics.iter().zip(generic_args); + let substitutions = generics + .map(|(old, new)| { + (old.type_var.id(), (old.type_var.clone(), old.type_var.kind(), new.clone())) + }) + .collect(); - (field.typ.substitute(&substitutions), field.visibility, i) - }, - ) + (field.typ.substitute(&substitutions), field.visibility, i) + }) } /// Returns all the fields of this type, after being applied to the given generic arguments. + /// Panics if this is not a struct type. pub fn get_fields_with_visibility( &self, generic_args: &[Type], ) -> Vec<(String, ItemVisibility, Type)> { let substitutions = self.get_fields_substitutions(generic_args); - vecmap(&self.fields, |field| { + vecmap(self.fields_raw(), |field| { let name = field.name.0.contents.clone(); (name, field.visibility, field.typ.substitute(&substitutions)) }) } + /// Retrieve the fields of this type. Panics if this is not a struct type pub fn get_fields(&self, generic_args: &[Type]) -> Vec<(String, Type)> { let substitutions = self.get_fields_substitutions(generic_args); - vecmap(&self.fields, |field| { + vecmap(self.fields_raw(), |field| { let name = field.name.0.contents.clone(); (name, field.typ.substitute(&substitutions)) }) @@ -508,21 +544,35 @@ impl StructType { /// /// This method is almost never what is wanted for type checking or monomorphization, /// prefer to use `get_fields` whenever possible. + /// + /// Panics if this is not a struct type. pub fn get_fields_as_written(&self) -> Vec { - vecmap(&self.fields, |field| StructField { - visibility: field.visibility, - name: field.name.clone(), - typ: field.typ.clone(), - }) + self.fields_raw().to_vec() } - /// Returns the field at the given index. Panics if no field exists at the given index. + /// Returns the name and raw parameters of each variant of this type. + /// This will not substitute any generic arguments so a generic variant like `X` + /// in `enum Foo { X(T) }` will return a `("X", Vec)` pair. + /// + /// Panics if this is not an enum type. + pub fn get_variants_as_written(&self) -> Vec { + self.variants_raw().to_vec() + } + + /// Returns the field at the given index. Panics if no field exists at the given index or this + /// is not a struct type. pub fn field_at(&self, index: usize) -> &StructField { - &self.fields[index] + &self.fields_raw()[index] + } + + /// Returns the enum variant at the given index. Panics if no field exists at the given index + /// or this is not an enum type. + pub fn variant_at(&self, index: usize) -> &EnumVariant { + &self.variants_raw()[index] } pub fn field_names(&self) -> BTreeSet { - self.fields.iter().map(|field| field.name.clone()).collect() + self.fields_raw().iter().map(|field| field.name.clone()).collect() } /// Instantiate this struct type, returning a Vec of the new generic args (in @@ -530,9 +580,27 @@ impl StructType { pub fn instantiate(&self, interner: &mut NodeInterner) -> Vec { vecmap(&self.generics, |generic| interner.next_type_variable_with_kind(generic.kind())) } + + /// Returns the function type of the variant at the given index of this enum. + /// Requires the `Shared` handle of self to create the given function type. + /// Panics if this is not an enum. + /// + /// The function type uses the variant "as written" ie. no generic substitutions. + /// Although the returned function is technically generic, Type::Function is returned + /// instead of Type::Forall. + pub fn variant_function_type(&self, variant_index: usize, this: Shared) -> Type { + let variant = self.variant_at(variant_index); + let args = variant.params.clone(); + assert_eq!(this.borrow().id, self.id); + let generics = vecmap(&self.generics, |generic| { + Type::NamedGeneric(generic.type_var.clone(), generic.name.clone()) + }); + let ret = Box::new(Type::DataType(this, generics)); + Type::Function(args, ret, Box::new(Type::Unit), false) + } } -impl std::fmt::Display for StructType { +impl std::fmt::Display for DataType { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.name) } @@ -850,7 +918,7 @@ impl std::fmt::Display for Type { } } } - Type::Struct(s, args) => { + Type::DataType(s, args) => { let args = vecmap(args, |arg| arg.to_string()); if args.is_empty() { write!(f, "{}", s.borrow()) @@ -1084,7 +1152,7 @@ impl Type { alias_type.borrow().get_type(&generics).is_primitive() } Type::MutableReference(typ) => typ.is_primitive(), - Type::Struct(..) + Type::DataType(..) | Type::TypeVariable(..) | Type::TraitAsType(..) | Type::NamedGeneric(..) @@ -1145,7 +1213,7 @@ impl Type { } Type::String(length) => length.is_valid_for_program_input(), Type::Tuple(elements) => elements.iter().all(|elem| elem.is_valid_for_program_input()), - Type::Struct(definition, generics) => definition + Type::DataType(definition, generics) => definition .borrow() .get_fields(generics) .into_iter() @@ -1200,7 +1268,7 @@ impl Type { } Type::String(length) => length.is_valid_non_inlined_function_input(), Type::Tuple(elements) => elements.iter().all(|elem| elem.is_valid_non_inlined_function_input()), - Type::Struct(definition, generics) => definition + Type::DataType(definition, generics) => definition .borrow() .get_fields(generics) .into_iter() @@ -1252,7 +1320,7 @@ impl Type { Type::Tuple(elements) => { elements.iter().all(|elem| elem.is_valid_for_unconstrained_boundary()) } - Type::Struct(definition, generics) => definition + Type::DataType(definition, generics) => definition .borrow() .get_fields(generics) .into_iter() @@ -1324,7 +1392,7 @@ impl Type { | Type::FmtString(..) | Type::Unit | Type::Tuple(..) - | Type::Struct(..) + | Type::DataType(..) | Type::TraitAsType(..) | Type::Function(..) | Type::MutableReference(..) @@ -1398,7 +1466,7 @@ impl Type { let typ = typ.as_ref(); length * typ.field_count(location) } - Type::Struct(def, args) => { + Type::DataType(def, args) => { let struct_type = def.borrow(); let fields = struct_type.get_fields(args); fields.iter().fold(0, |acc, (_, field_type)| acc + field_type.field_count(location)) @@ -1439,7 +1507,7 @@ impl Type { pub(crate) fn contains_slice(&self) -> bool { match self { Type::Slice(_) => true, - Type::Struct(struct_typ, generics) => { + Type::DataType(struct_typ, generics) => { let fields = struct_typ.borrow().get_fields(generics); for field in fields.iter() { if field.1.contains_slice() { @@ -1687,7 +1755,7 @@ impl Type { // No recursive try_unify call for struct fields. Don't want // to mutate shared type variables within struct definitions. // This isn't possible currently but will be once noir gets generic types - (Struct(id_a, args_a), Struct(id_b, args_b)) => { + (DataType(id_a, args_a), DataType(id_b, args_b)) => { if id_a == id_b && args_a.len() == args_b.len() { for (a, b) in args_a.iter().zip(args_b) { a.try_unify(b, bindings)?; @@ -2037,28 +2105,6 @@ impl Type { } } - /// Iterate over the fields of this type. - /// Panics if the type is not a struct or tuple. - pub fn iter_fields(&self) -> impl Iterator { - let fields: Vec<_> = match self { - // Unfortunately the .borrow() here forces us to collect into a Vec - // only to have to call .into_iter again afterward. Trying to elide - // collecting to a Vec leads to us dropping the temporary Ref before - // the iterator is returned - Type::Struct(def, args) => vecmap(&def.borrow().fields, |field| { - let name = &field.name.0.contents; - let typ = def.borrow().get_field(name, args).unwrap().0; - (name.clone(), typ) - }), - Type::Tuple(fields) => { - let fields = fields.iter().enumerate(); - vecmap(fields, |(i, field)| (i.to_string(), field.clone())) - } - other => panic!("Tried to iterate over the fields of '{other}', which has none"), - }; - fields.into_iter() - } - /// Retrieves the type of the given field name /// Panics if the type is not a struct or tuple. pub fn get_field_type_and_visibility( @@ -2066,7 +2112,7 @@ impl Type { field_name: &str, ) -> Option<(Type, ItemVisibility)> { match self.follow_bindings() { - Type::Struct(def, args) => def + Type::DataType(def, args) => def .borrow() .get_field(field_name, &args) .map(|(typ, visibility, _)| (typ, visibility)), @@ -2266,11 +2312,11 @@ impl Type { // Do not substitute_helper fields, it can lead to infinite recursion // and we should not match fields when type checking anyway. - Type::Struct(fields, args) => { + Type::DataType(fields, args) => { let args = vecmap(args, |arg| { arg.substitute_helper(type_bindings, substitute_bound_typevars) }); - Type::Struct(fields.clone(), args) + Type::DataType(fields.clone(), args) } Type::Alias(alias, args) => { let args = vecmap(args, |arg| { @@ -2342,7 +2388,7 @@ impl Type { let field_occurs = fields.occurs(target_id); len_occurs || field_occurs } - Type::Struct(_, generic_args) | Type::Alias(_, generic_args) => { + Type::DataType(_, generic_args) | Type::Alias(_, generic_args) => { generic_args.iter().any(|arg| arg.occurs(target_id)) } Type::TraitAsType(_, _, args) => { @@ -2399,9 +2445,9 @@ impl Type { let args = Box::new(args.follow_bindings()); FmtString(size, args) } - Struct(def, args) => { + DataType(def, args) => { let args = vecmap(args, |arg| arg.follow_bindings()); - Struct(def.clone(), args) + DataType(def.clone(), args) } Alias(def, args) => { // We don't need to vecmap(args, follow_bindings) since we're recursively @@ -2499,7 +2545,7 @@ impl Type { field.replace_named_generics_with_type_variables(); } } - Type::Struct(_, generics) => { + Type::DataType(_, generics) => { for generic in generics { generic.replace_named_generics_with_type_variables(); } @@ -2601,7 +2647,7 @@ impl Type { | Type::FmtString(..) | Type::Unit | Type::Tuple(..) - | Type::Struct(..) + | Type::DataType(..) | Type::TraitAsType(..) | Type::Function(..) | Type::Forall(..) @@ -2759,7 +2805,7 @@ impl From<&Type> for PrintableType { Type::Error => unreachable!(), Type::Unit => PrintableType::Unit, Type::Constant(_, _) => unreachable!(), - Type::Struct(def, ref args) => { + Type::DataType(def, ref args) => { let struct_type = def.borrow(); let fields = struct_type.get_fields(args); let fields = vecmap(fields, |(name, typ)| (name, typ.into())); @@ -2815,7 +2861,7 @@ impl std::fmt::Debug for Type { write!(f, "{}", binding.borrow()) } } - Type::Struct(s, args) => { + Type::DataType(s, args) => { let args = vecmap(args, |arg| format!("{:?}", arg)); if args.is_empty() { write!(f, "{}", s.borrow()) @@ -2897,7 +2943,7 @@ impl std::fmt::Debug for TypeVariable { } } -impl std::fmt::Debug for StructType { +impl std::fmt::Debug for DataType { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.name) } @@ -2934,7 +2980,7 @@ impl std::hash::Hash for Type { env.hash(state); } Type::Tuple(elems) => elems.hash(state), - Type::Struct(def, args) => { + Type::DataType(def, args) => { def.hash(state); args.hash(state); } @@ -3005,7 +3051,7 @@ impl PartialEq for Type { lhs_len == rhs_len && lhs_env == rhs_env } (Tuple(lhs_types), Tuple(rhs_types)) => lhs_types == rhs_types, - (Struct(lhs_struct, lhs_generics), Struct(rhs_struct, rhs_generics)) => { + (DataType(lhs_struct, lhs_generics), DataType(rhs_struct, rhs_generics)) => { lhs_struct == rhs_struct && lhs_generics == rhs_generics } (Alias(lhs_alias, lhs_generics), Alias(rhs_alias, rhs_generics)) => { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/locations.rs b/noir/noir-repo/compiler/noirc_frontend/src/locations.rs index ecae5b19a95..33c37172b50 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/locations.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/locations.rs @@ -7,7 +7,7 @@ use crate::{ ast::{FunctionDefinition, ItemVisibility}, hir::def_map::{ModuleDefId, ModuleId}, node_interner::{ - DefinitionId, FuncId, GlobalId, NodeInterner, ReferenceId, StructId, TraitId, TypeAliasId, + DefinitionId, FuncId, GlobalId, NodeInterner, ReferenceId, TraitId, TypeAliasId, TypeId, }, }; use petgraph::prelude::NodeIndex as PetGraphIndex; @@ -60,18 +60,22 @@ impl NodeInterner { match reference { ReferenceId::Module(id) => self.module_attributes(&id).location, ReferenceId::Function(id) => self.function_modifiers(&id).name_location, - ReferenceId::Struct(id) => { - let struct_type = self.get_struct(id); - let struct_type = struct_type.borrow(); - Location::new(struct_type.name.span(), struct_type.location.file) + ReferenceId::Struct(id) | ReferenceId::Enum(id) => { + let typ = self.get_type(id); + let typ = typ.borrow(); + Location::new(typ.name.span(), typ.location.file) } ReferenceId::StructMember(id, field_index) => { - let struct_type = self.get_struct(id); + let struct_type = self.get_type(id); let struct_type = struct_type.borrow(); - Location::new( - struct_type.field_at(field_index).name.span(), - struct_type.location.file, - ) + let file = struct_type.location.file; + Location::new(struct_type.field_at(field_index).name.span(), file) + } + ReferenceId::EnumVariant(id, variant_index) => { + let typ = self.get_type(id); + let typ = typ.borrow(); + let file = typ.location.file; + Location::new(typ.variant_at(variant_index).name.span(), file) } ReferenceId::Trait(id) => { let trait_type = self.get_trait(id); @@ -126,7 +130,7 @@ impl NodeInterner { pub(crate) fn add_struct_reference( &mut self, - id: StructId, + id: TypeId, location: Location, is_self_type: bool, ) { @@ -135,7 +139,7 @@ impl NodeInterner { pub(crate) fn add_struct_member_reference( &mut self, - id: StructId, + id: TypeId, member_index: usize, location: Location, ) { @@ -324,7 +328,7 @@ impl NodeInterner { pub(crate) fn register_struct( &mut self, - id: StructId, + id: TypeId, name: String, visibility: ItemVisibility, parent_module_id: ModuleId, @@ -333,6 +337,17 @@ impl NodeInterner { self.register_name_for_auto_import(name, ModuleDefId::TypeId(id), visibility, None); } + pub(crate) fn register_enum( + &mut self, + id: TypeId, + name: String, + visibility: ItemVisibility, + parent_module_id: ModuleId, + ) { + self.add_definition_location(ReferenceId::Enum(id), Some(parent_module_id)); + self.register_name_for_auto_import(name, ModuleDefId::TypeId(id), visibility, None); + } + pub(crate) fn register_trait( &mut self, id: TraitId, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs index 191c3937e4b..3e4b478c23a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -1159,7 +1159,7 @@ impl<'interner> Monomorphizer<'interner> { monomorphized_default } - HirType::Struct(def, args) => { + HirType::DataType(def, args) => { // Not all generic arguments may be used in a struct's fields so we have to check // the arguments as well as the fields in case any need to be defaulted or are unbound. for arg in args { @@ -1279,7 +1279,7 @@ impl<'interner> Monomorphizer<'interner> { Self::check_type(&default, location) } - HirType::Struct(_def, args) => { + HirType::DataType(_def, args) => { for arg in args { Self::check_type(arg, location)?; } @@ -2133,7 +2133,7 @@ fn unwrap_struct_type( location: Location, ) -> Result, MonomorphizationError> { match typ.follow_bindings() { - HirType::Struct(def, args) => { + HirType::DataType(def, args) => { // Some of args might not be mentioned in fields, so we need to check that they aren't unbound. for arg in &args { Monomorphizer::check_type(arg, location)?; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs b/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs index 431bed3b604..fbf933f59dd 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs @@ -18,7 +18,7 @@ use crate::ast::{ use crate::graph::CrateId; use crate::hir::comptime; use crate::hir::def_collector::dc_crate::CompilationError; -use crate::hir::def_collector::dc_crate::{UnresolvedStruct, UnresolvedTrait, UnresolvedTypeAlias}; +use crate::hir::def_collector::dc_crate::{UnresolvedTrait, UnresolvedTypeAlias}; use crate::hir::def_map::DefMaps; use crate::hir::def_map::{LocalModuleId, ModuleDefId, ModuleId}; use crate::hir::type_check::generics::TraitGenerics; @@ -32,7 +32,7 @@ use crate::hir_def::expr::HirIdent; use crate::hir_def::stmt::HirLetStatement; use crate::hir_def::traits::TraitImpl; use crate::hir_def::traits::{Trait, TraitConstraint}; -use crate::hir_def::types::{Kind, StructType, Type}; +use crate::hir_def::types::{DataType, Kind, Type}; use crate::hir_def::{ expr::HirExpression, function::{FuncMeta, HirFunction}, @@ -106,14 +106,14 @@ pub struct NodeInterner { // Similar to `id_to_type` but maps definitions to their type definition_to_type: HashMap, - // Struct map. + // Struct and Enum map. // - // Each struct definition is possibly shared across multiple type nodes. + // Each type definition is possibly shared across multiple type nodes. // It is also mutated through the RefCell during name resolution to append // methods from impls to the type. - structs: HashMap>, + data_types: HashMap>, - struct_attributes: HashMap, + type_attributes: HashMap, // Maps TypeAliasId -> Shared // @@ -286,7 +286,7 @@ pub struct NodeInterner { /// ``` #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum DependencyId { - Struct(StructId), + Struct(TypeId), Global(GlobalId), Function(FuncId), Alias(TypeAliasId), @@ -299,8 +299,10 @@ pub enum DependencyId { #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum ReferenceId { Module(ModuleId), - Struct(StructId), - StructMember(StructId, usize), + Struct(TypeId), + StructMember(TypeId, usize), + Enum(TypeId), + EnumVariant(TypeId, usize), Trait(TraitId), Global(GlobalId), Function(FuncId), @@ -465,14 +467,14 @@ impl fmt::Display for FuncId { } #[derive(Debug, Eq, PartialEq, Hash, Copy, Clone, PartialOrd, Ord)] -pub struct StructId(ModuleId); +pub struct TypeId(ModuleId); -impl StructId { +impl TypeId { //dummy id for error reporting // This can be anything, as the program will ultimately fail // after resolution - pub fn dummy_id() -> StructId { - StructId(ModuleId { krate: CrateId::dummy_id(), local_id: LocalModuleId::dummy_id() }) + pub fn dummy_id() -> TypeId { + TypeId(ModuleId { krate: CrateId::dummy_id(), local_id: LocalModuleId::dummy_id() }) } pub fn module_id(self) -> ModuleId { @@ -652,8 +654,8 @@ impl Default for NodeInterner { definitions: vec![], id_to_type: HashMap::default(), definition_to_type: HashMap::default(), - structs: HashMap::default(), - struct_attributes: HashMap::default(), + data_types: HashMap::default(), + type_attributes: HashMap::default(), type_aliases: Vec::new(), traits: HashMap::default(), trait_implementations: HashMap::default(), @@ -747,25 +749,25 @@ impl NodeInterner { self.traits.insert(type_id, new_trait); } - pub fn new_struct( + /// Creates a new struct or enum type with no fields or variants. + #[allow(clippy::too_many_arguments)] + pub fn new_type( &mut self, - typ: &UnresolvedStruct, + name: Ident, + span: Span, + attributes: Vec, generics: Generics, krate: CrateId, local_id: LocalModuleId, file_id: FileId, - ) -> StructId { - let struct_id = StructId(ModuleId { krate, local_id }); - let name = typ.struct_def.name.clone(); + ) -> TypeId { + let type_id = TypeId(ModuleId { krate, local_id }); - // Fields will be filled in later - let no_fields = Vec::new(); - - let location = Location::new(typ.struct_def.span, file_id); - let new_struct = StructType::new(struct_id, name, location, no_fields, generics); - self.structs.insert(struct_id, Shared::new(new_struct)); - self.struct_attributes.insert(struct_id, typ.struct_def.attributes.clone()); - struct_id + let location = Location::new(span, file_id); + let new_type = DataType::new(type_id, name, location, generics); + self.data_types.insert(type_id, Shared::new(new_type)); + self.type_attributes.insert(type_id, attributes); + type_id } pub fn push_type_alias( @@ -791,8 +793,8 @@ impl NodeInterner { pub fn add_type_alias_ref(&mut self, type_id: TypeAliasId, location: Location) { self.type_alias_ref.push((type_id, location)); } - pub fn update_struct(&mut self, type_id: StructId, f: impl FnOnce(&mut StructType)) { - let mut value = self.structs.get_mut(&type_id).unwrap().borrow_mut(); + pub fn update_struct(&mut self, type_id: TypeId, f: impl FnOnce(&mut DataType)) { + let mut value = self.data_types.get_mut(&type_id).unwrap().borrow_mut(); f(&mut value); } @@ -803,10 +805,10 @@ impl NodeInterner { pub fn update_struct_attributes( &mut self, - type_id: StructId, + type_id: TypeId, f: impl FnOnce(&mut StructAttributes), ) { - let value = self.struct_attributes.get_mut(&type_id).unwrap(); + let value = self.type_attributes.get_mut(&type_id).unwrap(); f(value); } @@ -1096,8 +1098,8 @@ impl NodeInterner { &self.function_modifiers[func_id].attributes } - pub fn struct_attributes(&self, struct_id: &StructId) -> &StructAttributes { - &self.struct_attributes[struct_id] + pub fn struct_attributes(&self, struct_id: &TypeId) -> &StructAttributes { + &self.type_attributes[struct_id] } pub fn add_module_attributes(&mut self, module_id: ModuleId, attributes: ModuleAttributes) { @@ -1213,8 +1215,8 @@ impl NodeInterner { self.id_to_location.insert(id.into(), Location::new(span, file)); } - pub fn get_struct(&self, id: StructId) -> Shared { - self.structs[&id].clone() + pub fn get_type(&self, id: TypeId) -> Shared { + self.data_types[&id].clone() } pub fn get_type_methods(&self, typ: &Type) -> Option<&HashMap> { @@ -1387,7 +1389,7 @@ impl NodeInterner { unreachable!("Cannot add a method to the unsupported type '{}'", self_type) }); - if trait_id.is_none() && matches!(self_type, Type::Struct(..)) { + if trait_id.is_none() && matches!(self_type, Type::DataType(..)) { if let Some(existing) = self.lookup_direct_method(self_type, &method_name, true) { return Some(existing); @@ -1980,7 +1982,7 @@ impl NodeInterner { /// Register that `dependent` depends on `dependency`. /// This is usually because `dependent` refers to `dependency` in one of its struct fields. - pub fn add_type_dependency(&mut self, dependent: DependencyId, dependency: StructId) { + pub fn add_type_dependency(&mut self, dependent: DependencyId, dependency: TypeId) { self.add_dependency(dependent, DependencyId::Struct(dependency)); } @@ -2033,7 +2035,7 @@ impl NodeInterner { for (i, index) in scc.iter().enumerate() { match self.dependency_graph[*index] { DependencyId::Struct(struct_id) => { - let struct_type = self.get_struct(struct_id); + let struct_type = self.get_type(struct_id); let struct_type = struct_type.borrow(); push_error(struct_type.name.to_string(), &scc, i, struct_type.location); break; @@ -2080,7 +2082,7 @@ impl NodeInterner { /// element at the given start index. fn get_cycle_error_string(&self, scc: &[PetGraphIndex], start_index: usize) -> String { let index_to_string = |index: PetGraphIndex| match self.dependency_graph[index] { - DependencyId::Struct(id) => Cow::Owned(self.get_struct(id).borrow().name.to_string()), + DependencyId::Struct(id) => Cow::Owned(self.get_type(id).borrow().name.to_string()), DependencyId::Function(id) => Cow::Borrowed(self.function_name(&id)), DependencyId::Alias(id) => { Cow::Owned(self.get_type_alias(id).borrow().name.to_string()) @@ -2422,7 +2424,7 @@ enum TypeMethodKey { Function, Generic, Quoted(QuotedType), - Struct(StructId), + Struct(TypeId), } fn get_type_method_key(typ: &Type) -> Option { @@ -2450,7 +2452,7 @@ fn get_type_method_key(typ: &Type) -> Option { Type::Quoted(quoted) => Some(Quoted(*quoted)), Type::MutableReference(element) => get_type_method_key(element), Type::Alias(alias, _) => get_type_method_key(&alias.borrow().typ), - Type::Struct(struct_type, _) => Some(Struct(struct_type.borrow().id)), + Type::DataType(struct_type, _) => Some(Struct(struct_type.borrow().id)), // We do not support adding methods to these types Type::Forall(_, _) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/module.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/module.rs index da733168099..1bc3d7b5beb 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/module.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/module.rs @@ -25,6 +25,7 @@ impl<'a> Parser<'a> { visibility, ident: Ident::default(), outer_attributes, + has_semicolon: false, }); }; @@ -41,10 +42,16 @@ impl<'a> Parser<'a> { is_contract, }) } else { - if !self.eat_semicolons() { + let has_semicolon = self.eat_semicolons(); + if !has_semicolon { self.expected_token(Token::Semicolon); } - ItemKind::ModuleDecl(ModuleDeclaration { visibility, ident, outer_attributes }) + ItemKind::ModuleDecl(ModuleDeclaration { + visibility, + ident, + outer_attributes, + has_semicolon, + }) } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/statement.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/statement.rs index 8c6d18d90ef..005216b1deb 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/statement.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/statement.rs @@ -157,8 +157,8 @@ impl<'a> Parser<'a> { return Some(StatementKind::For(for_loop)); } - if let Some(block) = self.parse_loop() { - return Some(StatementKind::Loop(block)); + if let Some((block, span)) = self.parse_loop() { + return Some(StatementKind::Loop(block, span)); } if let Some(kind) = self.parse_if_expr() { @@ -293,7 +293,7 @@ impl<'a> Parser<'a> { } /// LoopStatement = 'loop' Block - fn parse_loop(&mut self) -> Option { + fn parse_loop(&mut self) -> Option<(Expression, Span)> { let start_span = self.current_token_span; if !self.eat_keyword(Keyword::Loop) { return None; @@ -312,7 +312,7 @@ impl<'a> Parser<'a> { Expression { kind: ExpressionKind::Error, span: self.span_since(block_start_span) } }; - Some(block) + Some((block, start_span)) } /// ForRange @@ -824,13 +824,15 @@ mod tests { let src = "loop { }"; let mut parser = Parser::for_str(src); let statement = parser.parse_statement_or_error(); - let StatementKind::Loop(block) = statement.kind else { + let StatementKind::Loop(block, span) = statement.kind else { panic!("Expected loop"); }; let ExpressionKind::Block(block) = block.kind else { panic!("Expected block"); }; assert!(block.statements.is_empty()); + assert_eq!(span.start(), 0); + assert_eq!(span.end(), 4); } #[test] @@ -838,7 +840,7 @@ mod tests { let src = "loop { 1; 2 }"; let mut parser = Parser::for_str(src); let statement = parser.parse_statement_or_error(); - let StatementKind::Loop(block) = statement.kind else { + let StatementKind::Loop(block, _) = statement.kind else { panic!("Expected loop"); }; let ExpressionKind::Block(block) = block.kind else { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/resolve_locations.rs b/noir/noir-repo/compiler/noirc_frontend/src/resolve_locations.rs index b9e86bf0ef7..1b904f653bd 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/resolve_locations.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/resolve_locations.rs @@ -93,7 +93,7 @@ impl NodeInterner { fn get_type_location_from_index(&self, index: impl Into) -> Option { match self.id_type(index.into()) { - Type::Struct(struct_type, _) => Some(struct_type.borrow().location), + Type::DataType(struct_type, _) => Some(struct_type.borrow().location), _ => None, } } @@ -150,7 +150,7 @@ impl NodeInterner { let expr_rhs = &expr_member_access.rhs; let lhs_self_struct = match self.id_type(expr_lhs) { - Type::Struct(struct_type, _) => struct_type, + Type::DataType(struct_type, _) => struct_type, _ => return None, }; @@ -217,7 +217,7 @@ impl NodeInterner { .iter() .find(|(_typ, type_ref_location)| type_ref_location.contains(&location)) .and_then(|(typ, _)| match typ { - Type::Struct(struct_typ, _) => Some(struct_typ.borrow().location), + Type::DataType(struct_typ, _) => Some(struct_typ.borrow().location), _ => None, }) } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs index 087e34fcc64..064068dba7d 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs @@ -2818,12 +2818,13 @@ fn duplicate_struct_field() { let errors = get_program_errors(src); assert_eq!(errors.len(), 1); - let CompilationError::DefinitionError(DefCollectorErrorKind::DuplicateField { + let CompilationError::DefinitionError(DefCollectorErrorKind::Duplicate { + typ: _, first_def, second_def, }) = &errors[0].0 else { - panic!("Expected a duplicate field error, got {:?}", errors[0].0); + panic!("Expected a 'duplicate' error, got {:?}", errors[0].0); }; assert_eq!(first_def.to_string(), "x"); @@ -4073,3 +4074,85 @@ fn regression_7088() { "#; assert_no_errors(src); } + +#[test] +fn errors_on_empty_loop_no_break() { + let src = r#" + fn main() { + /// Safety: test + unsafe { + foo() + } + } + + unconstrained fn foo() { + loop {} + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + &errors[0].0, + CompilationError::ResolverError(ResolverError::LoopWithoutBreak { .. }) + )); +} + +#[test] +fn errors_on_loop_without_break() { + let src = r#" + fn main() { + /// Safety: test + unsafe { + foo() + } + } + + unconstrained fn foo() { + let mut x = 1; + loop { + x += 1; + bar(x); + } + } + + fn bar(_: Field) {} + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + &errors[0].0, + CompilationError::ResolverError(ResolverError::LoopWithoutBreak { .. }) + )); +} + +#[test] +fn errors_on_loop_without_break_with_nested_loop() { + let src = r#" + fn main() { + /// Safety: test + unsafe { + foo() + } + } + + unconstrained fn foo() { + let mut x = 1; + loop { + x += 1; + bar(x); + loop { + x += 2; + break; + } + } + } + + fn bar(_: Field) {} + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + &errors[0].0, + CompilationError::ResolverError(ResolverError::LoopWithoutBreak { .. }) + )); +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests/metaprogramming.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests/metaprogramming.rs index 8256744e18f..b42342fa47d 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/tests/metaprogramming.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests/metaprogramming.rs @@ -3,6 +3,7 @@ use noirc_errors::Spanned; use crate::{ ast::Ident, hir::{ + comptime::InterpreterError, def_collector::{ dc_crate::CompilationError, errors::{DefCollectorErrorKind, DuplicateType}, @@ -26,6 +27,26 @@ fn comptime_let() { assert_eq!(errors.len(), 0); } +#[test] +fn comptime_code_rejects_dynamic_variable() { + let src = r#"fn main(x: Field) { + comptime let my_var = (x - x) + 2; + assert_eq(my_var, 2); + }"#; + let errors = get_program_errors(src); + + assert_eq!(errors.len(), 1); + match &errors[0].0 { + CompilationError::InterpreterError(InterpreterError::NonComptimeVarReferenced { + name, + .. + }) => { + assert_eq!(name, "x"); + } + _ => panic!("expected an InterpreterError"), + } +} + #[test] fn comptime_type_in_runtime_code() { let source = "pub fn foo(_f: FunctionDefinition) {}"; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests/traits.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests/traits.rs index 212dca5fd39..7f252b556c2 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/tests/traits.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests/traits.rs @@ -1341,9 +1341,7 @@ fn regression_6530() { assert_eq!(errors.len(), 0); } -// See https://github.com/noir-lang/noir/issues/7090 #[test] -#[should_panic] fn calls_trait_method_using_struct_name_when_multiple_impls_exist() { let src = r#" trait From2 { @@ -1367,3 +1365,32 @@ fn calls_trait_method_using_struct_name_when_multiple_impls_exist() { "#; assert_no_errors(src); } + +#[test] +fn calls_trait_method_using_struct_name_when_multiple_impls_exist_and_errors_turbofish() { + let src = r#" + trait From2 { + fn from2(input: T) -> Self; + } + struct U60Repr {} + impl From2<[Field; 3]> for U60Repr { + fn from2(_: [Field; 3]) -> Self { + U60Repr {} + } + } + impl From2 for U60Repr { + fn from2(_: Field) -> Self { + U60Repr {} + } + } + fn main() { + let _ = U60Repr::::from2([1, 2, 3]); + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::TypeError(TypeCheckError::TypeMismatch { .. }) + )); +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/usage_tracker.rs b/noir/noir-repo/compiler/noirc_frontend/src/usage_tracker.rs index 6987358ddb7..ea4919096c0 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/usage_tracker.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/usage_tracker.rs @@ -3,14 +3,15 @@ use std::collections::HashMap; use crate::{ ast::{Ident, ItemVisibility}, hir::def_map::ModuleId, - node_interner::{FuncId, GlobalId, StructId, TraitId, TypeAliasId}, + node_interner::{FuncId, GlobalId, TraitId, TypeAliasId, TypeId}, }; #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum UnusedItem { Import, Function(FuncId), - Struct(StructId), + Struct(TypeId), + Enum(TypeId), Trait(TraitId), TypeAlias(TypeAliasId), Global(GlobalId), @@ -22,6 +23,7 @@ impl UnusedItem { UnusedItem::Import => "import", UnusedItem::Function(_) => "function", UnusedItem::Struct(_) => "struct", + UnusedItem::Enum(_) => "enum", UnusedItem::Trait(_) => "trait", UnusedItem::TypeAlias(_) => "type alias", UnusedItem::Global(_) => "global", diff --git a/noir/noir-repo/cspell.json b/noir/noir-repo/cspell.json index 25a0cc91f52..1174a56dd33 100644 --- a/noir/noir-repo/cspell.json +++ b/noir/noir-repo/cspell.json @@ -32,6 +32,7 @@ "boilerplates", "bridgekeeper", "brillig", + "brillig_", "bunx", "bytecount", "cachix", diff --git a/noir/noir-repo/docs/docs/noir/concepts/control_flow.md b/noir/noir-repo/docs/docs/noir/concepts/control_flow.md index a11db545e32..3e2d913ec96 100644 --- a/noir/noir-repo/docs/docs/noir/concepts/control_flow.md +++ b/noir/noir-repo/docs/docs/noir/concepts/control_flow.md @@ -79,8 +79,9 @@ The iteration variable `i` is still increased by one as normal when `continue` i ## Loops -In unconstrained code, `loop` is allowed for loops that end with a `break`. This is only allowed -in unconstrained code since normal constrained code requires that Noir knows exactly how many iterations +In unconstrained code, `loop` is allowed for loops that end with a `break`. +A `loop` must have at least one `break` in it. +This is only allowed in unconstrained code since normal constrained code requires that Noir knows exactly how many iterations a loop may have. ```rust diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.36.0/noir/standard_library/cryptographic_primitives/ec_primitives.md b/noir/noir-repo/docs/versioned_docs/version-v0.36.0/noir/standard_library/cryptographic_primitives/ec_primitives.md index f262d8160d6..b93e3704c4e 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.36.0/noir/standard_library/cryptographic_primitives/ec_primitives.md +++ b/noir/noir-repo/docs/versioned_docs/version-v0.36.0/noir/standard_library/cryptographic_primitives/ec_primitives.md @@ -18,7 +18,7 @@ curve you want to use, which would be specified using any one of the methods `std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::new` which take the coefficients in the defining equation together with a generator point as parameters. You can find more detail in the comments in -[`noir_stdlib/src/ec/mod.nr`](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec/mod.nr), but +[`noir_stdlib/src/ec/mod.nr`](https://github.com/noir-lang/ec/blob/master/src/lib.nr), but the gist of it is that the elliptic curves of interest are usually expressed in one of the standard forms implemented here (Twisted Edwards, Montgomery and Short Weierstraß), and in addition to that, you could choose to use `affine` coordinates (Cartesian coordinates - the usual (x,y) - possibly @@ -67,12 +67,12 @@ does indeed lie on `c` by calling `c.contains(p1)`. the curve configurations, the SWU map-to-curve method may be called as `c.swu_map(z,n)`, where `z: Field` depends on `Field` and `c` and must be chosen by the user (the conditions it needs to satisfy are specified in the comments - [here](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec/mod.nr)). + [here](https://github.com/noir-lang/ec/blob/master/src/lib.nr)). ## Examples The -[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/test_programs/compile_success_empty/ec_baby_jubjub/src/main.nr) +[ec_baby_jubjub test](https://github.com/noir-lang/ec/blob/460dff3cc6a1c0c5d9449f99a0a158bde21c19a8/src/lib.nr#L210) illustrates all of the above primitives on various forms of the Baby Jubjub curve. A couple of more interesting examples in Noir would be: diff --git a/noir/noir-repo/docs/versioned_docs/version-v1.0.0-beta.0/noir/standard_library/cryptographic_primitives/ec_primitives.md b/noir/noir-repo/docs/versioned_docs/version-v1.0.0-beta.0/noir/standard_library/cryptographic_primitives/ec_primitives.md index 00b8071487e..e4a73d57ab6 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v1.0.0-beta.0/noir/standard_library/cryptographic_primitives/ec_primitives.md +++ b/noir/noir-repo/docs/versioned_docs/version-v1.0.0-beta.0/noir/standard_library/cryptographic_primitives/ec_primitives.md @@ -18,7 +18,7 @@ curve you want to use, which would be specified using any one of the methods `std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::new` which take the coefficients in the defining equation together with a generator point as parameters. You can find more detail in the comments in -[`noir_stdlib/src/ec/mod.nr`](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec/mod.nr), but +[`noir_stdlib/src/ec/mod.nr`](https://github.com/noir-lang/ec/blob/master/src/lib.nr), but the gist of it is that the elliptic curves of interest are usually expressed in one of the standard forms implemented here (Twisted Edwards, Montgomery and Short Weierstraß), and in addition to that, you could choose to use `affine` coordinates (Cartesian coordinates - the usual (x,y) - possibly @@ -67,12 +67,12 @@ does indeed lie on `c` by calling `c.contains(p1)`. the curve configurations, the SWU map-to-curve method may be called as `c.swu_map(z,n)`, where `z: Field` depends on `Field` and `c` and must be chosen by the user (the conditions it needs to satisfy are specified in the comments - [here](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec/mod.nr)). + [here](https://github.com/noir-lang/ec/blob/master/src/lib.nr)). ## Examples The -[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/test_programs/compile_success_empty/ec_baby_jubjub/src/main.nr) +[ec_baby_jubjub test](https://github.com/noir-lang/ec/blob/460dff3cc6a1c0c5d9449f99a0a158bde21c19a8/src/lib.nr#L210) illustrates all of the above primitives on various forms of the Baby Jubjub curve. A couple of more interesting examples in Noir would be: diff --git a/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr b/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr index a1befdd58ec..c030544e791 100644 --- a/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr +++ b/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr @@ -1,4 +1,4 @@ -use crate::{cmp::Eq, convert::From, runtime::is_unconstrained}; +use crate::{cmp::Eq, convert::From, runtime::is_unconstrained, static_assert}; /// A `BoundedVec` is a growable storage similar to a `Vec` except that it /// is bounded with a maximum possible length. Unlike `Vec`, `BoundedVec` is not implemented @@ -345,7 +345,7 @@ impl BoundedVec { /// let bounded_vec: BoundedVec = BoundedVec::from_array([1, 2, 3]) /// ``` pub fn from_array(array: [T; Len]) -> Self { - assert(Len <= MaxLen, "from array out of bounds"); + static_assert(Len <= MaxLen, "from array out of bounds"); let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array(array); vec diff --git a/noir/noir-repo/noir_stdlib/src/field/mod.nr b/noir/noir-repo/noir_stdlib/src/field/mod.nr index 7ebeb29b05b..411a9e6a376 100644 --- a/noir/noir-repo/noir_stdlib/src/field/mod.nr +++ b/noir/noir-repo/noir_stdlib/src/field/mod.nr @@ -1,5 +1,5 @@ pub mod bn254; -use crate::runtime::is_unconstrained; +use crate::{runtime::is_unconstrained, static_assert}; use bn254::lt as bn254_lt; impl Field { @@ -10,7 +10,10 @@ impl Field { // docs:start:assert_max_bit_size pub fn assert_max_bit_size(self) { // docs:end:assert_max_bit_size - assert(BIT_SIZE < modulus_num_bits() as u32); + static_assert( + BIT_SIZE < modulus_num_bits() as u32, + "BIT_SIZE must be less than modulus_num_bits", + ); self.__assert_max_bit_size(BIT_SIZE); } @@ -29,9 +32,7 @@ impl Field { /// (e.g. 254 for the BN254 field) allow for multiple bit decompositions. This is due to how the `Field` will /// wrap around due to overflow when verifying the decomposition. #[builtin(to_le_bits)] - // docs:start:to_le_bits - pub fn to_le_bits(self: Self) -> [u1; N] {} - // docs:end:to_le_bits + fn _to_le_bits(self: Self) -> [u1; N] {} /// Decomposes `self` into its big endian bit decomposition as a `[u1; N]` array. /// This array will be zero padded should not all bits be necessary to represent `self`. @@ -45,9 +46,71 @@ impl Field { /// (e.g. 254 for the BN254 field) allow for multiple bit decompositions. This is due to how the `Field` will /// wrap around due to overflow when verifying the decomposition. #[builtin(to_be_bits)] + fn _to_be_bits(self: Self) -> [u1; N] {} + + /// Decomposes `self` into its little endian bit decomposition as a `[u1; N]` array. + /// This slice will be zero padded should not all bits be necessary to represent `self`. + /// + /// # Failures + /// Causes a constraint failure for `Field` values exceeding `2^N` as the resulting slice will not + /// be able to represent the original `Field`. + /// + /// # Safety + /// The bit decomposition returned is canonical and is guaranteed to not overflow the modulus. + // docs:start:to_le_bits + pub fn to_le_bits(self: Self) -> [u1; N] { + // docs:end:to_le_bits + let bits = self._to_le_bits(); + + if !is_unconstrained() { + // Ensure that the byte decomposition does not overflow the modulus + let p = modulus_le_bits(); + assert(bits.len() <= p.len()); + let mut ok = bits.len() != p.len(); + for i in 0..N { + if !ok { + if (bits[N - 1 - i] != p[N - 1 - i]) { + assert(p[N - 1 - i] == 1); + ok = true; + } + } + } + assert(ok); + } + bits + } + + /// Decomposes `self` into its big endian bit decomposition as a `[u1; N]` array. + /// This array will be zero padded should not all bits be necessary to represent `self`. + /// + /// # Failures + /// Causes a constraint failure for `Field` values exceeding `2^N` as the resulting slice will not + /// be able to represent the original `Field`. + /// + /// # Safety + /// The bit decomposition returned is canonical and is guaranteed to not overflow the modulus. // docs:start:to_be_bits - pub fn to_be_bits(self: Self) -> [u1; N] {} - // docs:end:to_be_bits + pub fn to_be_bits(self: Self) -> [u1; N] { + // docs:end:to_be_bits + let bits = self._to_be_bits(); + + if !is_unconstrained() { + // Ensure that the decomposition does not overflow the modulus + let p = modulus_be_bits(); + assert(bits.len() <= p.len()); + let mut ok = bits.len() != p.len(); + for i in 0..N { + if !ok { + if (bits[i] != p[i]) { + assert(p[i] == 1); + ok = true; + } + } + } + assert(ok); + } + bits + } /// Decomposes `self` into its little endian byte decomposition as a `[u8;N]` array /// This array will be zero padded should not all bytes be necessary to represent `self`. @@ -61,6 +124,10 @@ impl Field { // docs:start:to_le_bytes pub fn to_le_bytes(self: Self) -> [u8; N] { // docs:end:to_le_bytes + static_assert( + N <= modulus_le_bytes().len(), + "N must be less than or equal to modulus_le_bytes().len()", + ); // Compute the byte decomposition let bytes = self.to_le_radix(256); @@ -94,6 +161,10 @@ impl Field { // docs:start:to_be_bytes pub fn to_be_bytes(self: Self) -> [u8; N] { // docs:end:to_be_bytes + static_assert( + N <= modulus_le_bytes().len(), + "N must be less than or equal to modulus_le_bytes().len()", + ); // Compute the byte decomposition let bytes = self.to_be_radix(256); @@ -119,7 +190,9 @@ impl Field { pub fn to_le_radix(self: Self, radix: u32) -> [u8; N] { // Brillig does not need an immediate radix if !crate::runtime::is_unconstrained() { - crate::assert_constant(radix); + static_assert(1 < radix, "radix must be greater than 1"); + static_assert(radix <= 256, "radix must be less than or equal to 256"); + static_assert(radix & (radix - 1) == 0, "radix must be a power of 2"); } self.__to_le_radix(radix) } @@ -139,6 +212,7 @@ impl Field { #[builtin(to_le_radix)] fn __to_le_radix(self, radix: u32) -> [u8; N] {} + // `_radix` must be less than 256 #[builtin(to_be_radix)] fn __to_be_radix(self, radix: u32) -> [u8; N] {} @@ -172,6 +246,10 @@ impl Field { /// Convert a little endian byte array to a field element. /// If the provided byte array overflows the field modulus then the Field will silently wrap around. pub fn from_le_bytes(bytes: [u8; N]) -> Field { + static_assert( + N <= modulus_le_bytes().len(), + "N must be less than or equal to modulus_le_bytes().len()", + ); let mut v = 1; let mut result = 0; @@ -263,6 +341,7 @@ fn lt_fallback(x: Field, y: Field) -> bool { } mod tests { + use crate::{panic::panic, runtime}; use super::field_less_than; #[test] @@ -323,6 +402,75 @@ mod tests { } // docs:end:to_le_radix_example + #[test(should_fail_with = "radix must be greater than 1")] + fn test_to_le_radix_1() { + // this test should only fail in constrained mode + if !runtime::is_unconstrained() { + let field = 2; + let _: [u8; 8] = field.to_le_radix(1); + } else { + panic(f"radix must be greater than 1"); + } + } + + #[test] + fn test_to_le_radix_brillig_1() { + // this test should only fail in constrained mode + if runtime::is_unconstrained() { + let field = 1; + let out: [u8; 8] = field.to_le_radix(1); + crate::println(out); + let expected = [0; 8]; + assert(out == expected, "unexpected result"); + } + } + + #[test(should_fail_with = "radix must be a power of 2")] + fn test_to_le_radix_3() { + // this test should only fail in constrained mode + if !runtime::is_unconstrained() { + let field = 2; + let _: [u8; 8] = field.to_le_radix(3); + } else { + panic(f"radix must be a power of 2"); + } + } + + #[test] + fn test_to_le_radix_brillig_3() { + // this test should only fail in constrained mode + if runtime::is_unconstrained() { + let field = 1; + let out: [u8; 8] = field.to_le_radix(3); + let mut expected = [0; 8]; + expected[0] = 1; + assert(out == expected, "unexpected result"); + } + } + + #[test(should_fail_with = "radix must be less than or equal to 256")] + fn test_to_le_radix_512() { + // this test should only fail in constrained mode + if !runtime::is_unconstrained() { + let field = 2; + let _: [u8; 8] = field.to_le_radix(512); + } else { + panic(f"radix must be less than or equal to 256") + } + } + + #[test] + fn test_to_le_radix_brillig_512() { + // this test should only fail in constrained mode + if runtime::is_unconstrained() { + let field = 1; + let out: [u8; 8] = field.to_le_radix(512); + let mut expected = [0; 8]; + expected[0] = 1; + assert(out == expected, "unexpected result"); + } + } + #[test] unconstrained fn test_field_less_than() { assert(field_less_than(0, 1)); diff --git a/noir/noir-repo/noir_stdlib/src/meta/ctstring.nr b/noir/noir-repo/noir_stdlib/src/meta/ctstring.nr index e23567ece7d..00b4f1fdb6f 100644 --- a/noir/noir-repo/noir_stdlib/src/meta/ctstring.nr +++ b/noir/noir-repo/noir_stdlib/src/meta/ctstring.nr @@ -7,7 +7,8 @@ impl CtString { "".as_ctstring() } - // Bug: using &mut self as the object results in this method not being found + // TODO(https://github.com/noir-lang/noir/issues/6980): Bug: using &mut self + // as the object results in this method not being found // docs:start:append_str pub comptime fn append_str(self, s: str) -> Self { // docs:end:append_str diff --git a/noir/noir-repo/noir_stdlib/src/uint128.nr b/noir/noir-repo/noir_stdlib/src/uint128.nr index bcb0746832e..6c9b802f5b3 100644 --- a/noir/noir-repo/noir_stdlib/src/uint128.nr +++ b/noir/noir-repo/noir_stdlib/src/uint128.nr @@ -1,5 +1,6 @@ use crate::cmp::{Eq, Ord, Ordering}; use crate::ops::{Add, BitAnd, BitOr, BitXor, Div, Mul, Not, Rem, Shl, Shr, Sub}; +use crate::static_assert; use super::{convert::AsPrimitive, default::Default}; global pow64: Field = 18446744073709551616; //2^64; @@ -67,11 +68,10 @@ impl U128 { } pub fn from_hex(hex: str) -> U128 { - let N = N as u32; let bytes = hex.as_bytes(); // string must starts with "0x" assert((bytes[0] == 48) & (bytes[1] == 120), "Invalid hexadecimal string"); - assert(N < 35, "Input does not fit into a U128"); + static_assert(N < 35, "Input does not fit into a U128"); let mut lo = 0; let mut hi = 0; diff --git a/noir/noir-repo/test_programs/compile_failure/comptime_static_assert_failure/Nargo.toml b/noir/noir-repo/test_programs/compile_failure/comptime_static_assert_failure/Nargo.toml new file mode 100644 index 00000000000..006fd9f7ffe --- /dev/null +++ b/noir/noir-repo/test_programs/compile_failure/comptime_static_assert_failure/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "comptime_static_assert_failure" +type = "bin" +authors = [""] + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/compile_failure/comptime_static_assert_failure/src/main.nr b/noir/noir-repo/test_programs/compile_failure/comptime_static_assert_failure/src/main.nr new file mode 100644 index 00000000000..fcd757f4c94 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_failure/comptime_static_assert_failure/src/main.nr @@ -0,0 +1,13 @@ +use std::static_assert; + +comptime fn foo(x: Field) -> bool { + static_assert(x == 4, "x != 4"); + x == 4 +} + +fn main() { + comptime { + static_assert(foo(3), "expected message"); + } +} + diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_static_assert/Nargo.toml b/noir/noir-repo/test_programs/compile_success_empty/comptime_static_assert/Nargo.toml new file mode 100644 index 00000000000..4c969fe7a79 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/comptime_static_assert/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "comptime_static_assert" +type = "bin" +authors = [""] + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_static_assert/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/comptime_static_assert/src/main.nr new file mode 100644 index 00000000000..2ddbba7b0de --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/comptime_static_assert/src/main.nr @@ -0,0 +1,19 @@ +use std::static_assert; + +comptime fn foo(x: Field) -> bool { + static_assert(x == 4, "x != 4"); + x == 4 +} + +global C: bool = { + let out = foo(2 + 2); + static_assert(out, "foo did not pass in C"); + out +}; + +fn main() { + comptime { + static_assert(foo(4), "foo did not pass in main"); + static_assert(C, "C did not pass") + } +} diff --git a/noir/noir-repo/test_programs/compile_success_no_bug/check_unconstrained_regression/src/main.nr b/noir/noir-repo/test_programs/compile_success_no_bug/check_unconstrained_regression/src/main.nr deleted file mode 100644 index 174b68fd162..00000000000 --- a/noir/noir-repo/test_programs/compile_success_no_bug/check_unconstrained_regression/src/main.nr +++ /dev/null @@ -1,32 +0,0 @@ -struct Trigger { - x: u32, - y: Field, - z: [Field; 3], -} -struct ResultType { - a: u32, - b: Field, - c: [Field; 3], -} - -unconstrained fn convert(trigger: Trigger) -> ResultType { - let result = ResultType { - a: trigger.x + 1, - b: trigger.y - 1 + trigger.z[2], - c: [trigger.z[0], 0, trigger.z[1]], - }; - result -} -impl Trigger { - fn execute(self) -> ResultType { - /// Safety: testing context - let result = unsafe { convert(self) }; - assert(result.a == self.x + 1); - assert(result.b == self.y - 1 + self.z[2]); - assert(result.c[1] == 0); - result - } -} -fn main(x: Trigger) -> pub ResultType { - x.execute() -} diff --git a/noir/noir-repo/test_programs/execution_failure/regression_7128/Nargo.toml b/noir/noir-repo/test_programs/execution_failure/regression_7128/Nargo.toml new file mode 100644 index 00000000000..4d7b621526a --- /dev/null +++ b/noir/noir-repo/test_programs/execution_failure/regression_7128/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "regression_7128" +type = "bin" +authors = [""] + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_failure/regression_7128/Prover.toml b/noir/noir-repo/test_programs/execution_failure/regression_7128/Prover.toml new file mode 100644 index 00000000000..dd9b68d125e --- /dev/null +++ b/noir/noir-repo/test_programs/execution_failure/regression_7128/Prover.toml @@ -0,0 +1 @@ +in0 = "1" diff --git a/noir/noir-repo/test_programs/execution_failure/regression_7128/src/main.nr b/noir/noir-repo/test_programs/execution_failure/regression_7128/src/main.nr new file mode 100644 index 00000000000..46759fe90a2 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_failure/regression_7128/src/main.nr @@ -0,0 +1,26 @@ +fn main(in0: Field) -> pub Field { + let mut out0: Field = 0; + let tmp1: Field = in0; + + if (out0 == out0) // <== changing out0 to in0 or removing + { + // the comparison changes the result + let in0_as_bytes: [u8; 32] = in0.to_be_bytes(); + let mut result: [u8; 32] = [0; 32]; + for i in 0..32 { + result[i] = in0_as_bytes[i]; + } + } + + let mut tmp2: Field = 0; // <== moving this to the top of main, + if (0.lt(in0)) // changes the result + { + tmp2 = 1; + } + + out0 = (tmp2 - tmp1); + + assert(out0 != 0, "soundness violation"); + + out0 +} diff --git a/noir/noir-repo/test_programs/execution_success/regression_11294/Nargo.toml b/noir/noir-repo/test_programs/execution_success/regression_11294/Nargo.toml new file mode 100644 index 00000000000..42fcd7432ff --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_11294/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "regression_11294" +version = "0.1.0" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/regression_11294/Prover.toml b/noir/noir-repo/test_programs/execution_success/regression_11294/Prover.toml new file mode 100644 index 00000000000..c0bc12aeed9 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_11294/Prover.toml @@ -0,0 +1,47 @@ +[[previous_kernel_public_inputs.end.private_call_stack]] +args_hash = "0x0c78b411fc893c51d446c08daa5741b9ba6103126c9e450bed90fcde8793168a" +returns_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +start_side_effect_counter = "0x0000000000000000000000000000000000000000000000000000000000000002" +end_side_effect_counter = "0x0000000000000000000000000000000000000000000000000000000000000007" + +[[previous_kernel_public_inputs.end.private_call_stack]] +args_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +returns_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +start_side_effect_counter = "0x0000000000000000000000000000000000000000000000000000000000000000" +end_side_effect_counter = "0x0000000000000000000000000000000000000000000000000000000000000000" + +[[previous_kernel_public_inputs.end.private_call_stack]] +args_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +returns_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +start_side_effect_counter = "0x0000000000000000000000000000000000000000000000000000000000000000" +end_side_effect_counter = "0x0000000000000000000000000000000000000000000000000000000000000000" + +[[previous_kernel_public_inputs.end.private_call_stack]] +args_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +returns_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +start_side_effect_counter = "0x0000000000000000000000000000000000000000000000000000000000000000" +end_side_effect_counter = "0x0000000000000000000000000000000000000000000000000000000000000000" + +[[previous_kernel_public_inputs.end.private_call_stack]] +args_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +returns_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +start_side_effect_counter = "0x0000000000000000000000000000000000000000000000000000000000000000" +end_side_effect_counter = "0x0000000000000000000000000000000000000000000000000000000000000000" + +[[previous_kernel_public_inputs.end.private_call_stack]] +args_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +returns_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +start_side_effect_counter = "0x0000000000000000000000000000000000000000000000000000000000000000" +end_side_effect_counter = "0x0000000000000000000000000000000000000000000000000000000000000000" + +[[previous_kernel_public_inputs.end.private_call_stack]] +args_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +returns_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +start_side_effect_counter = "0x0000000000000000000000000000000000000000000000000000000000000000" +end_side_effect_counter = "0x0000000000000000000000000000000000000000000000000000000000000000" + +[[previous_kernel_public_inputs.end.private_call_stack]] +args_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +returns_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" +start_side_effect_counter = "0x0000000000000000000000000000000000000000000000000000000000000000" +end_side_effect_counter = "0x0000000000000000000000000000000000000000000000000000000000000000" diff --git a/noir/noir-repo/test_programs/execution_success/regression_11294/src/main.nr b/noir/noir-repo/test_programs/execution_success/regression_11294/src/main.nr new file mode 100644 index 00000000000..9440a8d1482 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_11294/src/main.nr @@ -0,0 +1,186 @@ +// Capture the "attempt to subtract with overflow" from https://github.com/AztecProtocol/aztec-packages/pull/11294 + +pub global MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX: u32 = 8; + +unconstrained fn main( + previous_kernel_public_inputs: PrivateKernelCircuitPublicInputs, +) -> pub PrivateKernelCircuitPublicInputs { + let private_inputs = PrivateKernelInnerCircuitPrivateInputs::new(previous_kernel_public_inputs); + private_inputs.execute() +} + +pub struct PrivateKernelCircuitPublicInputs { + pub end: PrivateAccumulatedData, +} + +pub struct PrivateKernelData { + pub public_inputs: PrivateKernelCircuitPublicInputs, +} + +pub struct PrivateAccumulatedData { + pub private_call_stack: [PrivateCallRequest; MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX], +} + +pub struct PrivateCallRequest { + pub args_hash: Field, + pub returns_hash: Field, + pub start_side_effect_counter: u32, + pub end_side_effect_counter: u32, +} + +pub struct PrivateKernelCircuitPublicInputsComposer { + pub public_inputs: PrivateKernelCircuitPublicInputsBuilder, +} + +impl PrivateKernelCircuitPublicInputsComposer { + pub unconstrained fn new_from_previous_kernel( + previous_kernel_public_inputs: PrivateKernelCircuitPublicInputs, + ) -> Self { + let mut public_inputs = PrivateKernelCircuitPublicInputsBuilder { + end: PrivateAccumulatedDataBuilder { private_call_stack: BoundedVec::new() }, + }; + + let start = previous_kernel_public_inputs.end; + public_inputs.end.private_call_stack = array_to_bounded_vec(start.private_call_stack); + + PrivateKernelCircuitPublicInputsComposer { public_inputs } + } + + pub fn pop_top_call_request(&mut self) -> Self { + // Pop the top item in the call stack, which is the caller of the current call, and shouldn't be propagated to the output. + let _call_request = self.public_inputs.end.private_call_stack.pop(); + *self + } + + pub fn finish(self) -> PrivateKernelCircuitPublicInputs { + self.public_inputs.finish() + } +} + +pub struct PrivateKernelCircuitPublicInputsBuilder { + pub end: PrivateAccumulatedDataBuilder, +} + +impl PrivateKernelCircuitPublicInputsBuilder { + pub fn finish(self) -> PrivateKernelCircuitPublicInputs { + PrivateKernelCircuitPublicInputs { end: self.end.finish() } + } +} + +pub struct PrivateAccumulatedDataBuilder { + pub private_call_stack: BoundedVec, +} + +impl PrivateAccumulatedDataBuilder { + pub fn finish(self) -> PrivateAccumulatedData { + PrivateAccumulatedData { private_call_stack: self.private_call_stack.storage() } + } +} + +pub struct PrivateKernelInnerCircuitPrivateInputs { + previous_kernel: PrivateKernelData, +} + +impl PrivateKernelInnerCircuitPrivateInputs { + pub fn new(public_inputs: PrivateKernelCircuitPublicInputs) -> Self { + Self { previous_kernel: PrivateKernelData { public_inputs } } + } + + unconstrained fn generate_output(self) -> PrivateKernelCircuitPublicInputs { + // XXX: Declaring `let mut composer = ` would make the circuit pass. + PrivateKernelCircuitPublicInputsComposer::new_from_previous_kernel( + self.previous_kernel.public_inputs, + ) + .pop_top_call_request() + .finish() + } + + pub fn execute(self) -> PrivateKernelCircuitPublicInputs { + // XXX: Running both this and the bottom assertion would make the circuit pass. + // assert(!is_empty(self.previous_kernel.public_inputs.end.private_call_stack[0]), "not empty before"); + + // Safety: This is where the program treated the input as mutable. + let output = unsafe { self.generate_output() }; + + assert( + !is_empty(self.previous_kernel.public_inputs.end.private_call_stack[0]), + "not empty after", + ); + + output + } +} + +pub trait Empty { + fn empty() -> Self; +} + +pub fn is_empty(item: T) -> bool +where + T: Empty + Eq, +{ + item.eq(T::empty()) +} + +impl Eq for PrivateCallRequest { + fn eq(self, other: PrivateCallRequest) -> bool { + (self.args_hash == other.args_hash) + & (self.returns_hash == other.returns_hash) + & (self.start_side_effect_counter == other.start_side_effect_counter) + & (self.end_side_effect_counter == other.end_side_effect_counter) + } +} + +impl Empty for PrivateCallRequest { + fn empty() -> Self { + PrivateCallRequest { + args_hash: 0, + returns_hash: 0, + start_side_effect_counter: 0, + end_side_effect_counter: 0, + } + } +} + +// Copy of https://github.com/AztecProtocol/aztec-packages/blob/f1fd2d104d01a4582d8a48a6ab003d8791010967/noir-projects/noir-protocol-circuits/crates/types/src/utils/arrays.nr#L110 +pub fn array_length(array: [T; N]) -> u32 +where + T: Empty + Eq, +{ + // We get the length by checking the index of the first empty element. + + // Safety: This is safe because we have validated the array (see function doc above) and the emptiness + // of the element and non-emptiness of the previous element is checked below. + let length = unsafe { find_index_hint(array, |elem: T| is_empty(elem)) }; + // if length != 0 { + // assert(!is_empty(array[length - 1])); + // } + // if length != N { + // assert(is_empty(array[length])); + // } + length +} + +// Helper function to find the index of the first element in an array that satisfies a given predicate. If the element +// is not found, the function returns N as the index. +pub unconstrained fn find_index_hint( + array: [T; N], + find: fn[Env](T) -> bool, +) -> u32 { + let mut index = N; + for i in 0..N { + // We check `index == N` to ensure that we only update the index if we haven't found a match yet. + if (index == N) & find(array[i]) { + index = i; + } + } + index +} + +pub unconstrained fn array_to_bounded_vec(array: [T; N]) -> BoundedVec +where + T: Empty + Eq, +{ + let len = array_length(array); + BoundedVec::from_parts_unchecked(array, len) +} diff --git a/noir/noir-repo/test_programs/execution_success/regression_7062/Nargo.toml b/noir/noir-repo/test_programs/execution_success/regression_7062/Nargo.toml new file mode 100644 index 00000000000..0e11219ad98 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_7062/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "regression_7062" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/regression_7062/Prover.toml b/noir/noir-repo/test_programs/execution_success/regression_7062/Prover.toml new file mode 100644 index 00000000000..08608e6b3ba --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_7062/Prover.toml @@ -0,0 +1,2 @@ +index = 1 +value = 1 diff --git a/noir/noir-repo/test_programs/execution_success/regression_7062/src/main.nr b/noir/noir-repo/test_programs/execution_success/regression_7062/src/main.nr new file mode 100644 index 00000000000..47e7593c0e6 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_7062/src/main.nr @@ -0,0 +1,10 @@ +fn main(value: Field, index: u32) { + let mut args = &[0, 1]; + args[index] = value; + /// Safety: n/a + unsafe { store(args) }; + // Dummy test to remove the 'underconstraint bug' + assert(args[0] + args[1] != 0); +} + +pub unconstrained fn store(_: [Field]) {} diff --git a/noir/noir-repo/test_programs/execution_success/regression_7128/Nargo.toml b/noir/noir-repo/test_programs/execution_success/regression_7128/Nargo.toml new file mode 100644 index 00000000000..4d7b621526a --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_7128/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "regression_7128" +type = "bin" +authors = [""] + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/regression_7128/Prover.toml b/noir/noir-repo/test_programs/execution_success/regression_7128/Prover.toml new file mode 100644 index 00000000000..dd9b68d125e --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_7128/Prover.toml @@ -0,0 +1 @@ +in0 = "1" diff --git a/noir/noir-repo/test_programs/execution_success/regression_7128/src/main.nr b/noir/noir-repo/test_programs/execution_success/regression_7128/src/main.nr new file mode 100644 index 00000000000..454c2220b88 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_7128/src/main.nr @@ -0,0 +1,26 @@ +fn main(in0: Field) -> pub Field { + let mut out0: Field = 0; + let tmp1: Field = in0; + + if (out0 == out0) // <== changing out0 to in0 or removing + { + // the comparison changes the result + let in0_as_bytes: [u8; 32] = in0.to_be_bytes(); + let mut result: [u8; 32] = [0; 32]; + for i in 0..32 { + result[i] = in0_as_bytes[i]; + } + } + + let mut tmp2: Field = 0; // <== moving this to the top of main, + if (0.lt(in0)) // changes the result + { + tmp2 = 1; + } + + out0 = (tmp2 - tmp1); + + assert(out0 == 0, "completeness violation"); + + out0 +} diff --git a/noir/noir-repo/test_programs/execution_success/regression_7143/Nargo.toml b/noir/noir-repo/test_programs/execution_success/regression_7143/Nargo.toml new file mode 100644 index 00000000000..1f581c8b24d --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_7143/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "regression_7143" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/regression_7143/Prover.toml b/noir/noir-repo/test_programs/execution_success/regression_7143/Prover.toml new file mode 100644 index 00000000000..f2f801df886 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_7143/Prover.toml @@ -0,0 +1,3 @@ +array = [0] +x = 0 +return = 1 diff --git a/noir/noir-repo/test_programs/execution_success/regression_7143/src/main.nr b/noir/noir-repo/test_programs/execution_success/regression_7143/src/main.nr new file mode 100644 index 00000000000..396ddf1a633 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_7143/src/main.nr @@ -0,0 +1,3 @@ +fn main(x: u32, array: call_data(0) [bool; 1]) -> pub bool { + !array[x] +} diff --git a/noir/noir-repo/tooling/inspector/Cargo.toml b/noir/noir-repo/tooling/inspector/Cargo.toml new file mode 100644 index 00000000000..2124f7e9a28 --- /dev/null +++ b/noir/noir-repo/tooling/inspector/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "noir_inspector" +description = "Inspector for noir build artifacts" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +rust-version.workspace = true +repository.workspace = true + +[lints] +workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[[bin]] +name = "noir-inspector" +path = "src/main.rs" + +[dependencies] +clap.workspace = true +serde.workspace = true +serde_json.workspace = true +color-eyre.workspace = true +const_format.workspace = true +acir.workspace = true +noirc_artifacts.workspace = true +noirc_artifacts_info.workspace = true diff --git a/noir/noir-repo/tooling/inspector/src/cli/info_cmd.rs b/noir/noir-repo/tooling/inspector/src/cli/info_cmd.rs new file mode 100644 index 00000000000..6a9db2676f2 --- /dev/null +++ b/noir/noir-repo/tooling/inspector/src/cli/info_cmd.rs @@ -0,0 +1,35 @@ +use std::path::PathBuf; + +use clap::Args; +use color_eyre::eyre; +use noirc_artifacts::program::ProgramArtifact; +use noirc_artifacts_info::{count_opcodes_and_gates_in_program, show_info_report, InfoReport}; + +#[derive(Debug, Clone, Args)] +pub(crate) struct InfoCommand { + /// The artifact to inspect + artifact: PathBuf, + + /// Output a JSON formatted report. Changes to this format are not currently considered breaking. + #[clap(long, hide = true)] + json: bool, +} + +pub(crate) fn run(args: InfoCommand) -> eyre::Result<()> { + let file = std::fs::File::open(args.artifact.clone())?; + let artifact: ProgramArtifact = serde_json::from_reader(file)?; + + let package_name = args + .artifact + .with_extension("") + .file_name() + .map(|s| s.to_string_lossy().to_string()) + .unwrap_or_else(|| "artifact".to_string()); + + let program_info = count_opcodes_and_gates_in_program(artifact, package_name.to_string(), None); + + let info_report = InfoReport { programs: vec![program_info] }; + show_info_report(info_report, args.json); + + Ok(()) +} diff --git a/noir/noir-repo/tooling/inspector/src/cli/mod.rs b/noir/noir-repo/tooling/inspector/src/cli/mod.rs new file mode 100644 index 00000000000..8cce6ec3a6f --- /dev/null +++ b/noir/noir-repo/tooling/inspector/src/cli/mod.rs @@ -0,0 +1,33 @@ +use clap::{command, Parser, Subcommand}; +use color_eyre::eyre; +use const_format::formatcp; + +mod info_cmd; +mod print_acir_cmd; + +const INSPECTOR_VERSION: &str = env!("CARGO_PKG_VERSION"); + +static VERSION_STRING: &str = formatcp!("version = {}\n", INSPECTOR_VERSION,); + +#[derive(Parser, Debug)] +#[command(name="Noir inspector", author, version=VERSION_STRING, about, long_about = None)] +struct InspectorCli { + #[command(subcommand)] + command: InspectorCommand, +} + +#[non_exhaustive] +#[derive(Subcommand, Clone, Debug)] +enum InspectorCommand { + Info(info_cmd::InfoCommand), + PrintAcir(print_acir_cmd::PrintAcirCommand), +} + +pub(crate) fn start_cli() -> eyre::Result<()> { + let InspectorCli { command } = InspectorCli::parse(); + + match command { + InspectorCommand::Info(args) => info_cmd::run(args), + InspectorCommand::PrintAcir(args) => print_acir_cmd::run(args), + } +} diff --git a/noir/noir-repo/tooling/inspector/src/cli/print_acir_cmd.rs b/noir/noir-repo/tooling/inspector/src/cli/print_acir_cmd.rs new file mode 100644 index 00000000000..f3dfe528973 --- /dev/null +++ b/noir/noir-repo/tooling/inspector/src/cli/print_acir_cmd.rs @@ -0,0 +1,21 @@ +use std::path::PathBuf; + +use clap::Args; +use color_eyre::eyre; +use noirc_artifacts::program::ProgramArtifact; + +#[derive(Debug, Clone, Args)] +pub(crate) struct PrintAcirCommand { + /// The artifact to print + artifact: PathBuf, +} + +pub(crate) fn run(args: PrintAcirCommand) -> eyre::Result<()> { + let file = std::fs::File::open(args.artifact.clone())?; + let artifact: ProgramArtifact = serde_json::from_reader(file)?; + + println!("Compiled ACIR for main:"); + println!("{}", artifact.bytecode); + + Ok(()) +} diff --git a/noir/noir-repo/tooling/inspector/src/main.rs b/noir/noir-repo/tooling/inspector/src/main.rs new file mode 100644 index 00000000000..8270fedbf2c --- /dev/null +++ b/noir/noir-repo/tooling/inspector/src/main.rs @@ -0,0 +1,8 @@ +mod cli; + +fn main() { + if let Err(report) = cli::start_cli() { + eprintln!("{report:?}"); + std::process::exit(1); + } +} diff --git a/noir/noir-repo/tooling/lsp/src/requests/code_action/fill_struct_fields.rs b/noir/noir-repo/tooling/lsp/src/requests/code_action/fill_struct_fields.rs index 739f0bf4a21..7a4d562e402 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/code_action/fill_struct_fields.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/code_action/fill_struct_fields.rs @@ -24,7 +24,7 @@ impl<'a> CodeActionFinder<'a> { return; }; - let struct_type = self.interner.get_struct(struct_id); + let struct_type = self.interner.get_type(struct_id); let struct_type = struct_type.borrow(); // First get all of the struct's fields diff --git a/noir/noir-repo/tooling/lsp/src/requests/completion.rs b/noir/noir-repo/tooling/lsp/src/requests/completion.rs index a845fd4496f..9948a29691e 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/completion.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/completion.rs @@ -18,9 +18,9 @@ use noirc_frontend::{ AsTraitPath, AttributeTarget, BlockExpression, CallExpression, ConstructorExpression, Expression, ExpressionKind, ForLoopStatement, GenericTypeArgs, Ident, IfExpression, IntegerBitSize, ItemVisibility, LValue, Lambda, LetStatement, MemberAccessExpression, - MethodCallExpression, NoirFunction, NoirStruct, NoirTraitImpl, Path, PathKind, Pattern, - Signedness, Statement, TraitBound, TraitImplItemKind, TypeImpl, TypePath, - UnresolvedGeneric, UnresolvedGenerics, UnresolvedType, UnresolvedTypeData, + MethodCallExpression, ModuleDeclaration, NoirFunction, NoirStruct, NoirTraitImpl, Path, + PathKind, Pattern, Signedness, Statement, TraitBound, TraitImplItemKind, TypeImpl, + TypePath, UnresolvedGeneric, UnresolvedGenerics, UnresolvedType, UnresolvedTypeData, UnresolvedTypeExpression, UseTree, UseTreeKind, Visitor, }, graph::{CrateId, Dependency}, @@ -31,10 +31,10 @@ use noirc_frontend::{ }, }, hir_def::traits::Trait, - node_interner::{FuncId, NodeInterner, ReferenceId, StructId}, + node_interner::{FuncId, NodeInterner, ReferenceId, TypeId}, parser::{Item, ItemKind, ParsedSubModule}, token::{MetaAttribute, Token, Tokens}, - Kind, ParsedModule, StructType, Type, TypeBinding, + DataType, Kind, ParsedModule, Type, TypeBinding, }; use sort_text::underscore_sort_text; @@ -203,7 +203,7 @@ impl<'a> NodeFinder<'a> { return; }; - let struct_type = self.interner.get_struct(struct_id); + let struct_type = self.interner.get_type(struct_id); let struct_type = struct_type.borrow(); // First get all of the struct's fields @@ -318,9 +318,9 @@ impl<'a> NodeFinder<'a> { match module_def_id { ModuleDefId::ModuleId(id) => module_id = id, ModuleDefId::TypeId(struct_id) => { - let struct_type = self.interner.get_struct(struct_id); + let struct_type = self.interner.get_type(struct_id); self.complete_type_methods( - &Type::Struct(struct_type, vec![]), + &Type::DataType(struct_type, vec![]), &prefix, FunctionKind::Any, function_completion_kind, @@ -568,7 +568,7 @@ impl<'a> NodeFinder<'a> { ) { let typ = &typ; match typ { - Type::Struct(struct_type, generics) => { + Type::DataType(struct_type, generics) => { self.complete_struct_fields(&struct_type.borrow(), generics, prefix, self_prefix); } Type::MutableReference(typ) => { @@ -800,7 +800,7 @@ impl<'a> NodeFinder<'a> { fn complete_struct_fields( &mut self, - struct_type: &StructType, + struct_type: &DataType, generics: &[Type], prefix: &str, self_prefix: bool, @@ -1111,7 +1111,55 @@ impl<'a> NodeFinder<'a> { } } - /// Determine where each segment in a `use` statement is located. + /// Try to suggest the name of a module to declare based on which + /// files exist in the filesystem, excluding modules that are already declared. + fn complete_module_delcaration(&mut self, module: &ModuleDeclaration) -> Option<()> { + let filename = self.files.get_absolute_name(self.file).ok()?.into_path_buf(); + + let is_main_lib_or_mod = filename.ends_with("main.nr") + || filename.ends_with("lib.nr") + || filename.ends_with("mod.nr"); + + let paths = if is_main_lib_or_mod { + // For a "main" file we list sibling files + std::fs::read_dir(filename.parent()?) + } else { + // For a non-main files we list directory children + std::fs::read_dir(filename.with_extension("")) + }; + let paths = paths.ok()?; + + // See which modules are already defined via `mod ...;` + let module_data = + &self.def_maps[&self.module_id.krate].modules()[self.module_id.local_id.0]; + let existing_children: HashSet = + module_data.children.keys().map(|ident| ident.to_string()).collect(); + + for path in paths { + let Ok(path) = path else { + continue; + }; + let file_name = path.file_name().to_string_lossy().to_string(); + let Some(name) = file_name.strip_suffix(".nr") else { + continue; + }; + if name == "main" || name == "mod" || name == "lib" { + continue; + } + if existing_children.contains(name) { + continue; + } + + let label = if module.has_semicolon { name.to_string() } else { format!("{};", name) }; + self.completion_items.push(simple_completion_item( + label, + CompletionItemKind::MODULE, + None, + )); + } + + Some(()) + } fn includes_span(&self, span: Span) -> bool { span.start() as usize <= self.byte_index && self.byte_index <= span.end() as usize @@ -1795,11 +1843,19 @@ impl<'a> Visitor for NodeFinder<'a> { trait_bound.trait_generics.accept(self); false } + + fn visit_module_declaration(&mut self, module: &ModuleDeclaration, _: Span) { + if !self.includes_span(module.ident.span()) { + return; + } + + self.complete_module_delcaration(module); + } } fn get_field_type(typ: &Type, name: &str) -> Option { match typ { - Type::Struct(struct_type, generics) => { + Type::DataType(struct_type, generics) => { Some(struct_type.borrow().get_field(name, generics)?.0) } Type::Tuple(types) => { @@ -1839,9 +1895,9 @@ fn get_array_element_type(typ: Type) -> Option { } } -fn get_type_struct_id(typ: &Type) -> Option { +fn get_type_struct_id(typ: &Type) -> Option { match typ { - Type::Struct(struct_type, _) => Some(struct_type.borrow().id), + Type::DataType(struct_type, _) => Some(struct_type.borrow().id), Type::Alias(type_alias, generics) => { let type_alias = type_alias.borrow(); let typ = type_alias.get_type(generics); @@ -1898,10 +1954,12 @@ fn module_def_id_from_reference_id(reference_id: ReferenceId) -> Option Some(ModuleDefId::ModuleId(module_id)), ReferenceId::Struct(struct_id) => Some(ModuleDefId::TypeId(struct_id)), + ReferenceId::Enum(enum_id) => Some(ModuleDefId::TypeId(enum_id)), ReferenceId::Trait(trait_id) => Some(ModuleDefId::TraitId(trait_id)), ReferenceId::Function(func_id) => Some(ModuleDefId::FunctionId(func_id)), ReferenceId::Alias(type_alias_id) => Some(ModuleDefId::TypeAliasId(type_alias_id)), ReferenceId::StructMember(_, _) + | ReferenceId::EnumVariant(_, _) | ReferenceId::Global(_) | ReferenceId::Local(_) | ReferenceId::Reference(_, _) => None, diff --git a/noir/noir-repo/tooling/lsp/src/requests/completion/completion_items.rs b/noir/noir-repo/tooling/lsp/src/requests/completion/completion_items.rs index db31683d51a..c8ae16bf1f4 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/completion/completion_items.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/completion/completion_items.rs @@ -6,7 +6,7 @@ use noirc_frontend::{ ast::AttributeTarget, hir::def_map::{ModuleDefId, ModuleId}, hir_def::{function::FuncMeta, stmt::HirPattern}, - node_interner::{FuncId, GlobalId, ReferenceId, StructId, TraitId, TypeAliasId}, + node_interner::{FuncId, GlobalId, ReferenceId, TraitId, TypeAliasId, TypeId}, QuotedType, Type, }; @@ -110,7 +110,7 @@ impl<'a> NodeFinder<'a> { self.completion_item_with_doc_comments(ReferenceId::Module(id), completion_item) } - fn struct_completion_item(&self, name: String, struct_id: StructId) -> CompletionItem { + fn struct_completion_item(&self, name: String, struct_id: TypeId) -> CompletionItem { let completion_item = simple_completion_item(name.clone(), CompletionItemKind::STRUCT, Some(name)); self.completion_item_with_doc_comments(ReferenceId::Struct(struct_id), completion_item) @@ -120,7 +120,7 @@ impl<'a> NodeFinder<'a> { &self, field: &str, typ: &Type, - struct_id: StructId, + struct_id: TypeId, field_index: usize, self_type: bool, ) -> CompletionItem { diff --git a/noir/noir-repo/tooling/lsp/src/requests/hover.rs b/noir/noir-repo/tooling/lsp/src/requests/hover.rs index 5d8c50fa47b..8d845dce13d 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/hover.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/hover.rs @@ -13,10 +13,10 @@ use noirc_frontend::{ traits::Trait, }, node_interner::{ - DefinitionId, DefinitionKind, ExprId, FuncId, GlobalId, NodeInterner, ReferenceId, - StructId, TraitId, TraitImplKind, TypeAliasId, + DefinitionId, DefinitionKind, ExprId, FuncId, GlobalId, NodeInterner, ReferenceId, TraitId, + TraitImplKind, TypeAliasId, TypeId, }, - Generics, Shared, StructType, Type, TypeAlias, TypeBinding, TypeVariable, + DataType, Generics, Shared, Type, TypeAlias, TypeBinding, TypeVariable, }; use crate::{ @@ -77,6 +77,10 @@ fn format_reference(reference: ReferenceId, args: &ProcessRequestCallbackArgs) - ReferenceId::StructMember(id, field_index) => { Some(format_struct_member(id, field_index, args)) } + ReferenceId::Enum(id) => Some(format_enum(id, args)), + ReferenceId::EnumVariant(id, variant_index) => { + Some(format_enum_variant(id, variant_index, args)) + } ReferenceId::Trait(id) => Some(format_trait(id, args)), ReferenceId::Global(id) => Some(format_global(id, args)), ReferenceId::Function(id) => Some(format_function(id, args)), @@ -122,8 +126,8 @@ fn format_module(id: ModuleId, args: &ProcessRequestCallbackArgs) -> Option String { - let struct_type = args.interner.get_struct(id); +fn format_struct(id: TypeId, args: &ProcessRequestCallbackArgs) -> String { + let struct_type = args.interner.get_type(id); let struct_type = struct_type.borrow(); let mut string = String::new(); @@ -149,12 +153,45 @@ fn format_struct(id: StructId, args: &ProcessRequestCallbackArgs) -> String { string } +fn format_enum(id: TypeId, args: &ProcessRequestCallbackArgs) -> String { + let typ = args.interner.get_type(id); + let typ = typ.borrow(); + + let mut string = String::new(); + if format_parent_module(ReferenceId::Enum(id), args, &mut string) { + string.push('\n'); + } + string.push_str(" "); + string.push_str("enum "); + string.push_str(&typ.name.0.contents); + format_generics(&typ.generics, &mut string); + string.push_str(" {\n"); + for field in typ.get_variants_as_written() { + string.push_str(" "); + string.push_str(&field.name.0.contents); + + if !field.params.is_empty() { + let types = field.params.iter().map(ToString::to_string).collect::>(); + string.push('('); + string.push_str(&types.join(", ")); + string.push(')'); + } + + string.push_str(",\n"); + } + string.push_str(" }"); + + append_doc_comments(args.interner, ReferenceId::Enum(id), &mut string); + + string +} + fn format_struct_member( - id: StructId, + id: TypeId, field_index: usize, args: &ProcessRequestCallbackArgs, ) -> String { - let struct_type = args.interner.get_struct(id); + let struct_type = args.interner.get_type(id); let struct_type = struct_type.borrow(); let field = struct_type.field_at(field_index); @@ -175,6 +212,39 @@ fn format_struct_member( string } +fn format_enum_variant( + id: TypeId, + field_index: usize, + args: &ProcessRequestCallbackArgs, +) -> String { + let enum_type = args.interner.get_type(id); + let enum_type = enum_type.borrow(); + let variant = enum_type.variant_at(field_index); + + let mut string = String::new(); + if format_parent_module(ReferenceId::Enum(id), args, &mut string) { + string.push_str("::"); + } + string.push_str(&enum_type.name.0.contents); + string.push('\n'); + string.push_str(" "); + string.push_str(&variant.name.0.contents); + if !variant.params.is_empty() { + let types = variant.params.iter().map(ToString::to_string).collect::>(); + string.push('('); + string.push_str(&types.join(", ")); + string.push(')'); + } + + for typ in variant.params.iter() { + string.push_str(&go_to_type_links(typ, args.interner, args.files)); + } + + append_doc_comments(args.interner, ReferenceId::EnumVariant(id, field_index), &mut string); + + string +} + fn format_trait(id: TraitId, args: &ProcessRequestCallbackArgs) -> String { let a_trait = args.interner.get_trait(id); @@ -368,7 +438,7 @@ fn format_function(id: FuncId, args: &ProcessRequestCallbackArgs) -> String { true } else if let Some(struct_id) = func_meta.struct_id { - let struct_type = args.interner.get_struct(struct_id); + let struct_type = args.interner.get_type(struct_id); let struct_type = struct_type.borrow(); if formatted_parent_module { string.push_str("::"); @@ -421,14 +491,24 @@ fn format_function(id: FuncId, args: &ProcessRequestCallbackArgs) -> String { string.push('('); let parameters = &func_meta.parameters; for (index, (pattern, typ, visibility)) in parameters.iter().enumerate() { + let is_self = pattern_is_self(pattern, args.interner); + + // `&mut self` is represented as a mutable reference type, not as a mutable pattern + if is_self && matches!(typ, Type::MutableReference(..)) { + string.push_str("&mut "); + } + format_pattern(pattern, args.interner, &mut string); - if !pattern_is_self(pattern, args.interner) { + + // Don't add type for `self` param + if !is_self { string.push_str(": "); if matches!(visibility, Visibility::Public) { string.push_str("pub "); } string.push_str(&format!("{}", typ)); } + if index != parameters.len() - 1 { string.push_str(", "); } @@ -685,7 +765,7 @@ impl<'a> TypeLinksGatherer<'a> { self.gather_type_links(typ); } } - Type::Struct(struct_type, generics) => { + Type::DataType(struct_type, generics) => { self.gather_struct_type_links(struct_type); for generic in generics { self.gather_type_links(generic); @@ -739,7 +819,7 @@ impl<'a> TypeLinksGatherer<'a> { } } - fn gather_struct_type_links(&mut self, struct_type: &Shared) { + fn gather_struct_type_links(&mut self, struct_type: &Shared) { let struct_type = struct_type.borrow(); if let Some(lsp_location) = to_lsp_location(self.files, struct_type.location.file, struct_type.name.span()) @@ -1168,4 +1248,12 @@ mod hover_tests { .await; assert!(hover_text.contains("Some docs")); } + + #[test] + async fn hover_on_function_with_mut_self() { + let hover_text = + get_hover_text("workspace", "two/src/lib.nr", Position { line: 96, character: 10 }) + .await; + assert!(hover_text.contains("fn mut_self(&mut self)")); + } } diff --git a/noir/noir-repo/tooling/lsp/src/requests/inlay_hint.rs b/noir/noir-repo/tooling/lsp/src/requests/inlay_hint.rs index c6415acb545..1798f845a31 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/inlay_hint.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/inlay_hint.rs @@ -83,25 +83,34 @@ impl<'a> InlayHintCollector<'a> { let location = Location::new(ident.span(), self.file_id); if let Some(lsp_location) = to_lsp_location(self.files, self.file_id, span) { if let Some(referenced) = self.interner.find_referenced(location) { + let include_colon = true; match referenced { ReferenceId::Global(global_id) => { let global_info = self.interner.get_global(global_id); let definition_id = global_info.definition_id; let typ = self.interner.definition_type(definition_id); - self.push_type_hint(lsp_location, &typ, editable); + self.push_type_hint(lsp_location, &typ, editable, include_colon); } ReferenceId::Local(definition_id) => { let typ = self.interner.definition_type(definition_id); - self.push_type_hint(lsp_location, &typ, editable); + self.push_type_hint(lsp_location, &typ, editable, include_colon); } ReferenceId::StructMember(struct_id, field_index) => { - let struct_type = self.interner.get_struct(struct_id); + let struct_type = self.interner.get_type(struct_id); let struct_type = struct_type.borrow(); let field = struct_type.field_at(field_index); - self.push_type_hint(lsp_location, &field.typ, false); + self.push_type_hint(lsp_location, &field.typ, false, include_colon); + } + ReferenceId::EnumVariant(type_id, variant_index) => { + let typ = self.interner.get_type(type_id); + let shared_type = typ.clone(); + let typ = typ.borrow(); + let variant_type = typ.variant_function_type(variant_index, shared_type); + self.push_type_hint(lsp_location, &variant_type, false, include_colon); } ReferenceId::Module(_) | ReferenceId::Struct(_) + | ReferenceId::Enum(_) | ReferenceId::Trait(_) | ReferenceId::Function(_) | ReferenceId::Alias(_) @@ -111,11 +120,21 @@ impl<'a> InlayHintCollector<'a> { } } - fn push_type_hint(&mut self, location: lsp_types::Location, typ: &Type, editable: bool) { + fn push_type_hint( + &mut self, + location: lsp_types::Location, + typ: &Type, + editable: bool, + include_colon: bool, + ) { let position = location.range.end; let mut parts = Vec::new(); - parts.push(string_part(": ")); + if include_colon { + parts.push(string_part(": ")); + } else { + parts.push(string_part(" ")); + } push_type_parts(typ, &mut parts, self.files); self.inlay_hints.push(InlayHint { @@ -209,6 +228,36 @@ impl<'a> InlayHintCollector<'a> { } } + fn collect_method_call_chain_hints(&mut self, method: &MethodCallExpression) { + let Some(object_lsp_location) = + to_lsp_location(self.files, self.file_id, method.object.span) + else { + return; + }; + + let Some(name_lsp_location) = + to_lsp_location(self.files, self.file_id, method.method_name.span()) + else { + return; + }; + + if object_lsp_location.range.end.line >= name_lsp_location.range.start.line { + return; + } + + let object_location = Location::new(method.object.span, self.file_id); + let Some(typ) = self.interner.type_at_location(object_location) else { + return; + }; + + self.push_type_hint( + object_lsp_location, + &typ, + false, // not editable + false, // don't include colon + ); + } + fn get_pattern_name(&self, pattern: &HirPattern) -> Option { match pattern { HirPattern::Identifier(ident) => { @@ -349,6 +398,10 @@ impl<'a> Visitor for InlayHintCollector<'a> { &method_call_expression.arguments, ); + if self.options.chaining_hints.enabled { + self.collect_method_call_chain_hints(method_call_expression); + } + true } @@ -410,7 +463,7 @@ fn push_type_parts(typ: &Type, parts: &mut Vec, files: &File } parts.push(string_part(")")); } - Type::Struct(struct_type, generics) => { + Type::DataType(struct_type, generics) => { let struct_type = struct_type.borrow(); let location = Location::new(struct_type.name.span(), struct_type.location.file); parts.push(text_part_with_location(struct_type.name.to_string(), location, files)); @@ -540,7 +593,9 @@ fn get_expression_name(expression: &Expression) -> Option { #[cfg(test)] mod inlay_hints_tests { use crate::{ - requests::{ClosingBraceHintsOptions, ParameterHintsOptions, TypeHintsOptions}, + requests::{ + ChainingHintsOptions, ClosingBraceHintsOptions, ParameterHintsOptions, TypeHintsOptions, + }, test_utils, }; @@ -577,6 +632,7 @@ mod inlay_hints_tests { type_hints: TypeHintsOptions { enabled: false }, parameter_hints: ParameterHintsOptions { enabled: false }, closing_brace_hints: ClosingBraceHintsOptions { enabled: false, min_lines: 25 }, + chaining_hints: ChainingHintsOptions { enabled: false }, } } @@ -585,6 +641,7 @@ mod inlay_hints_tests { type_hints: TypeHintsOptions { enabled: true }, parameter_hints: ParameterHintsOptions { enabled: false }, closing_brace_hints: ClosingBraceHintsOptions { enabled: false, min_lines: 25 }, + chaining_hints: ChainingHintsOptions { enabled: false }, } } @@ -593,6 +650,7 @@ mod inlay_hints_tests { type_hints: TypeHintsOptions { enabled: false }, parameter_hints: ParameterHintsOptions { enabled: true }, closing_brace_hints: ClosingBraceHintsOptions { enabled: false, min_lines: 25 }, + chaining_hints: ChainingHintsOptions { enabled: false }, } } @@ -601,6 +659,16 @@ mod inlay_hints_tests { type_hints: TypeHintsOptions { enabled: false }, parameter_hints: ParameterHintsOptions { enabled: false }, closing_brace_hints: ClosingBraceHintsOptions { enabled: true, min_lines }, + chaining_hints: ChainingHintsOptions { enabled: false }, + } + } + + fn chaining_hints() -> InlayHintsOptions { + InlayHintsOptions { + type_hints: TypeHintsOptions { enabled: false }, + parameter_hints: ParameterHintsOptions { enabled: false }, + closing_brace_hints: ClosingBraceHintsOptions { enabled: false, min_lines: 0 }, + chaining_hints: ChainingHintsOptions { enabled: true }, } } @@ -955,4 +1023,39 @@ mod inlay_hints_tests { panic!("Expected InlayHintLabel::String, got {:?}", inlay_hint.label); } } + + #[test] + async fn test_shows_receiver_type_in_multiline_method_call() { + let mut inlay_hints = get_inlay_hints(125, 130, chaining_hints()).await; + assert_eq!(inlay_hints.len(), 3); + + inlay_hints.sort_by_key(|hint| hint.position.line); + + let inlay_hint = &inlay_hints[0]; + assert_eq!(inlay_hint.position.line, 125); + assert_eq!(inlay_hint.position.character, 59); + let InlayHintLabel::LabelParts(parts) = &inlay_hint.label else { + panic!("Expected label parts"); + }; + let label = parts.iter().map(|part| part.value.clone()).collect::>().join(""); + assert_eq!(label, " [u32; 14]"); + + let inlay_hint = &inlay_hints[1]; + assert_eq!(inlay_hint.position.line, 126); + assert_eq!(inlay_hint.position.character, 37); + let InlayHintLabel::LabelParts(parts) = &inlay_hint.label else { + panic!("Expected label parts"); + }; + let label = parts.iter().map(|part| part.value.clone()).collect::>().join(""); + assert_eq!(label, " [u32; 14]"); + + let inlay_hint = &inlay_hints[2]; + assert_eq!(inlay_hint.position.line, 127); + assert_eq!(inlay_hint.position.character, 23); + let InlayHintLabel::LabelParts(parts) = &inlay_hint.label else { + panic!("Expected label parts"); + }; + let label = parts.iter().map(|part| part.value.clone()).collect::>().join(""); + assert_eq!(label, " bool"); + } } diff --git a/noir/noir-repo/tooling/lsp/src/requests/mod.rs b/noir/noir-repo/tooling/lsp/src/requests/mod.rs index 80f4a167a04..334599e8f3d 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/mod.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/mod.rs @@ -90,6 +90,9 @@ pub(crate) struct InlayHintsOptions { #[serde(rename = "closingBraceHints", default = "default_closing_brace_hints")] pub(crate) closing_brace_hints: ClosingBraceHintsOptions, + + #[serde(rename = "ChainingHints", default = "default_chaining_hints")] + pub(crate) chaining_hints: ChainingHintsOptions, } #[derive(Debug, Deserialize, Serialize, Copy, Clone)] @@ -113,6 +116,12 @@ pub(crate) struct ClosingBraceHintsOptions { pub(crate) min_lines: u32, } +#[derive(Debug, Deserialize, Serialize, Copy, Clone)] +pub(crate) struct ChainingHintsOptions { + #[serde(rename = "enabled", default = "default_chaining_hints_enabled")] + pub(crate) enabled: bool, +} + fn default_enable_code_lens() -> bool { true } @@ -126,6 +135,7 @@ fn default_inlay_hints() -> InlayHintsOptions { type_hints: default_type_hints(), parameter_hints: default_parameter_hints(), closing_brace_hints: default_closing_brace_hints(), + chaining_hints: default_chaining_hints(), } } @@ -160,6 +170,14 @@ fn default_closing_brace_min_lines() -> u32 { 25 } +fn default_chaining_hints() -> ChainingHintsOptions { + ChainingHintsOptions { enabled: default_chaining_hints_enabled() } +} + +fn default_chaining_hints_enabled() -> bool { + true +} + impl Default for LspInitializationOptions { fn default() -> Self { Self { diff --git a/noir/noir-repo/tooling/lsp/src/trait_impl_method_stub_generator.rs b/noir/noir-repo/tooling/lsp/src/trait_impl_method_stub_generator.rs index eb1709e34d0..4e505eb5e12 100644 --- a/noir/noir-repo/tooling/lsp/src/trait_impl_method_stub_generator.rs +++ b/noir/noir-repo/tooling/lsp/src/trait_impl_method_stub_generator.rs @@ -181,7 +181,7 @@ impl<'a> TraitImplMethodStubGenerator<'a> { } self.string.push(')'); } - Type::Struct(struct_type, generics) => { + Type::DataType(struct_type, generics) => { let struct_type = struct_type.borrow(); let current_module_data = diff --git a/noir/noir-repo/tooling/lsp/test_programs/inlay_hints/src/main.nr b/noir/noir-repo/tooling/lsp/test_programs/inlay_hints/src/main.nr index 46a6d3bc558..64eca72a667 100644 --- a/noir/noir-repo/tooling/lsp/test_programs/inlay_hints/src/main.nr +++ b/noir/noir-repo/tooling/lsp/test_programs/inlay_hints/src/main.nr @@ -119,4 +119,12 @@ mod some_module { contract some_contract { +}} + +use std::ops::Not; +pub fn chain() { + let _ = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] + .map(|x| x + 123456789012345) + .any(|x| x > 5) + .not(); } diff --git a/noir/noir-repo/tooling/lsp/test_programs/workspace/two/src/lib.nr b/noir/noir-repo/tooling/lsp/test_programs/workspace/two/src/lib.nr index d18a663b276..aacc4508756 100644 --- a/noir/noir-repo/tooling/lsp/test_programs/workspace/two/src/lib.nr +++ b/noir/noir-repo/tooling/lsp/test_programs/workspace/two/src/lib.nr @@ -93,3 +93,6 @@ impl TraitWithDocs for Field { fn foo() {} } +impl Foo { + fn mut_self(&mut self) {} +} diff --git a/noir/noir-repo/tooling/nargo_cli/Cargo.toml b/noir/noir-repo/tooling/nargo_cli/Cargo.toml index 001306bb162..92eeed1b391 100644 --- a/noir/noir-repo/tooling/nargo_cli/Cargo.toml +++ b/noir/noir-repo/tooling/nargo_cli/Cargo.toml @@ -43,6 +43,7 @@ noirc_frontend = { workspace = true, features = ["bn254"] } noirc_abi.workspace = true noirc_errors.workspace = true noirc_artifacts.workspace = true +noirc_artifacts_info.workspace = true acvm = { workspace = true, features = ["bn254"] } bn254_blackbox_solver.workspace = true toml.workspace = true diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs index 0af05703c9a..8a4b991a234 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -335,6 +335,7 @@ mod tests { use noirc_driver::{CompileOptions, CrateName}; use crate::cli::compile_cmd::{get_target_width, parse_workspace, read_workspace}; + use crate::cli::test_cmd::formatters::diagnostic_to_string; /// Try to find the directory that Cargo sets when it is running; /// otherwise fallback to assuming the CWD is the root of the repository @@ -414,7 +415,12 @@ mod tests { &CompileOptions::default(), None, ) - .expect("failed to compile"); + .unwrap_or_else(|err| { + for diagnostic in err { + println!("{}", diagnostic_to_string(&diagnostic, &file_manager)); + } + panic!("Failed to compile") + }); let width = get_target_width(package.expression_width, None); diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs index 4f8fec739f9..43927fb8b46 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -111,18 +111,9 @@ fn execute_program_and_decode( package_name: Option, pedantic_solving: bool, ) -> Result { - let read_inputs = - |format| read_inputs_from_file(&package.root_dir, prover_name, format, &program.abi); - - // Parse the initial witness values from Prover.toml or Prover.json - let (inputs_map, expected_return) = read_inputs(Format::Toml).or_else(|e1| match &e1 { - FilesystemError::MissingTomlFile(..) => read_inputs(Format::Json).map_err(|e2| match e2 { - FilesystemError::MissingTomlFile(..) => e1, - _ => e2, - }), - _ => Err(e1), - })?; - + // Parse the initial witness values from Prover.toml + let (inputs_map, expected_return) = + read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &program.abi)?; let witness_stack = execute_program( &program, &inputs_map, diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs index 8d0fc257e1c..a41eb547e4f 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs @@ -8,16 +8,19 @@ use nargo::{ use nargo_toml::{get_package_manifest, resolve_workspace_from_toml}; use noirc_abi::input_parser::Format; use noirc_artifacts::program::ProgramArtifact; +use noirc_artifacts_info::{ + count_opcodes_and_gates_in_program, show_info_report, FunctionInfo, InfoReport, ProgramInfo, +}; use noirc_driver::{CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; -use prettytable::{row, table, Row}; +use prettytable::{row, Row}; use rayon::prelude::*; use serde::Serialize; -use crate::{cli::fs::inputs::read_inputs_from_file, errors::CliError}; +use crate::errors::CliError; use super::{ compile_cmd::{compile_workspace_full, get_target_width}, - fs::program::read_program_from_file, + fs::{inputs::read_inputs_from_file, program::read_program_from_file}, NargoConfig, PackageOptions, }; @@ -94,74 +97,18 @@ pub(crate) fn run(mut args: InfoCommand, config: NargoConfig) -> Result<(), CliE package.expression_width, args.compile_options.expression_width, ); - count_opcodes_and_gates_in_program(program, &package, target_width) + let package_name = package.name.to_string(); + count_opcodes_and_gates_in_program(program, package_name, Some(target_width)) }) .collect() }; let info_report = InfoReport { programs: program_info }; - - if args.json { - // Expose machine-readable JSON data. - println!("{}", serde_json::to_string(&info_report).unwrap()); - } else { - // Otherwise print human-readable table. - if !info_report.programs.is_empty() { - let mut program_table = table!([Fm->"Package", Fm->"Function", Fm->"Expression Width", Fm->"ACIR Opcodes", Fm->"Brillig Opcodes"]); - - for program_info in info_report.programs { - let program_rows: Vec = program_info.into(); - for row in program_rows { - program_table.add_row(row); - } - } - program_table.printstd(); - } - } + show_info_report(info_report, args.json); Ok(()) } -#[derive(Debug, Default, Serialize)] -struct InfoReport { - programs: Vec, -} - -#[derive(Debug, Serialize)] -struct ProgramInfo { - package_name: String, - #[serde(skip)] - expression_width: ExpressionWidth, - functions: Vec, - #[serde(skip)] - unconstrained_functions_opcodes: usize, - unconstrained_functions: Vec, -} - -impl From for Vec { - fn from(program_info: ProgramInfo) -> Self { - let mut main = vecmap(program_info.functions, |function| { - row![ - Fm->format!("{}", program_info.package_name), - Fc->format!("{}", function.name), - format!("{:?}", program_info.expression_width), - Fc->format!("{}", function.opcodes), - Fc->format!("{}", program_info.unconstrained_functions_opcodes), - ] - }); - main.extend(vecmap(program_info.unconstrained_functions, |function| { - row![ - Fm->format!("{}", program_info.package_name), - Fc->format!("{}", function.name), - format!("N/A", ), - Fc->format!("N/A"), - Fc->format!("{}", function.opcodes), - ] - })); - main - } -} - #[derive(Debug, Serialize)] struct ContractInfo { name: String, @@ -171,12 +118,6 @@ struct ContractInfo { functions: Vec, } -#[derive(Debug, Serialize)] -struct FunctionInfo { - name: String, - opcodes: usize, -} - impl From for Vec { fn from(contract_info: ContractInfo) -> Self { vecmap(contract_info.functions, |function| { @@ -190,51 +131,6 @@ impl From for Vec { } } -fn count_opcodes_and_gates_in_program( - compiled_program: ProgramArtifact, - package: &Package, - expression_width: ExpressionWidth, -) -> ProgramInfo { - let functions = compiled_program - .bytecode - .functions - .into_par_iter() - .enumerate() - .map(|(i, function)| FunctionInfo { - name: compiled_program.names[i].clone(), - opcodes: function.opcodes.len(), - }) - .collect(); - - let opcodes_len: Vec = compiled_program - .bytecode - .unconstrained_functions - .iter() - .map(|func| func.bytecode.len()) - .collect(); - let unconstrained_functions_opcodes = compiled_program - .bytecode - .unconstrained_functions - .into_par_iter() - .map(|function| function.bytecode.len()) - .sum(); - let unconstrained_info: Vec = compiled_program - .brillig_names - .clone() - .iter() - .zip(opcodes_len) - .map(|(name, len)| FunctionInfo { name: name.clone(), opcodes: len }) - .collect(); - - ProgramInfo { - package_name: package.name.to_string(), - expression_width, - functions, - unconstrained_functions_opcodes, - unconstrained_functions: unconstrained_info, - } -} - fn profile_brillig_execution( binary_packages: Vec<(Package, ProgramArtifact)>, prover_name: &str, @@ -243,7 +139,7 @@ fn profile_brillig_execution( ) -> Result, CliError> { let mut program_info = Vec::new(); for (package, program_artifact) in binary_packages.iter() { - // Parse the initial witness values from Prover.toml + // Parse the initial witness values from Prover.toml or Prover.json let (inputs_map, _) = read_inputs_from_file( &package.root_dir, prover_name, @@ -270,7 +166,7 @@ fn profile_brillig_execution( program_info.push(ProgramInfo { package_name: package.name.to_string(), - expression_width, + expression_width: Some(expression_width), functions: vec![FunctionInfo { name: "main".to_string(), opcodes: 0 }], unconstrained_functions_opcodes: profiling_samples.len(), unconstrained_functions: vec![FunctionInfo { diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs index 3072faee1dc..6c8e548309c 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs @@ -26,7 +26,7 @@ use crate::{cli::check_cmd::check_crate_and_report_errors, errors::CliError}; use super::{NargoConfig, PackageOptions}; -mod formatters; +pub(crate) mod formatters; /// Run the tests for this program #[derive(Debug, Clone, Args)] diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd/formatters.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd/formatters.rs index 75cf14ba120..bc4621c92ea 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd/formatters.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd/formatters.rs @@ -514,7 +514,10 @@ fn package_start(package_name: &str, test_count: usize) -> std::io::Result<()> { Ok(()) } -fn diagnostic_to_string(file_diagnostic: &FileDiagnostic, file_manager: &FileManager) -> String { +pub(crate) fn diagnostic_to_string( + file_diagnostic: &FileDiagnostic, + file_manager: &FileManager, +) -> String { let file_map = file_manager.as_file_map(); let custom_diagnostic = &file_diagnostic.diagnostic; diff --git a/noir/noir-repo/tooling/nargo_fmt/src/formatter/statement.rs b/noir/noir-repo/tooling/nargo_fmt/src/formatter/statement.rs index 27d558ec92b..751bc419d4a 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/formatter/statement.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/formatter/statement.rs @@ -75,7 +75,7 @@ impl<'a, 'b> ChunkFormatter<'a, 'b> { StatementKind::For(for_loop_statement) => { group.group(self.format_for_loop(for_loop_statement)); } - StatementKind::Loop(block) => { + StatementKind::Loop(block, _) => { group.group(self.format_loop(block)); } StatementKind::Break => { diff --git a/noir/noir-repo/tooling/noirc_artifacts_info/Cargo.toml b/noir/noir-repo/tooling/noirc_artifacts_info/Cargo.toml new file mode 100644 index 00000000000..0b8d18eef2f --- /dev/null +++ b/noir/noir-repo/tooling/noirc_artifacts_info/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "noirc_artifacts_info" +description = "The logic for `nargo info` and `nargo-inspector`" +version.workspace = true +authors.workspace = true +edition.workspace = true +rust-version.workspace = true +license.workspace = true + +[lints] +workspace = true + +[dependencies] +clap.workspace = true +serde.workspace = true +serde_json.workspace = true +acir.workspace = true +noirc_artifacts.workspace = true +rayon.workspace = true +acvm = { workspace = true, features = ["bn254"] } +iter-extended.workspace = true +prettytable-rs = "0.10" diff --git a/noir/noir-repo/tooling/noirc_artifacts_info/src/lib.rs b/noir/noir-repo/tooling/noirc_artifacts_info/src/lib.rs new file mode 100644 index 00000000000..6f4c80accbd --- /dev/null +++ b/noir/noir-repo/tooling/noirc_artifacts_info/src/lib.rs @@ -0,0 +1,122 @@ +use acvm::acir::circuit::ExpressionWidth; +use iter_extended::vecmap; +use noirc_artifacts::program::ProgramArtifact; +use prettytable::{row, table, Row}; +use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator}; +use serde::Serialize; + +#[derive(Debug, Default, Serialize)] +pub struct InfoReport { + pub programs: Vec, +} + +#[derive(Debug, Serialize)] +pub struct ProgramInfo { + pub package_name: String, + #[serde(skip)] + pub expression_width: Option, + pub functions: Vec, + #[serde(skip)] + pub unconstrained_functions_opcodes: usize, + pub unconstrained_functions: Vec, +} + +impl From for Vec { + fn from(program_info: ProgramInfo) -> Self { + let expression_width = if let Some(expression_width) = program_info.expression_width { + format!("{:?}", expression_width) + } else { + "N/A".to_string() + }; + let mut main = vecmap(program_info.functions, |function| { + row![ + Fm->format!("{}", program_info.package_name), + Fc->format!("{}", function.name), + format!("{}", expression_width), + Fc->format!("{}", function.opcodes), + Fc->format!("{}", program_info.unconstrained_functions_opcodes), + ] + }); + main.extend(vecmap(program_info.unconstrained_functions, |function| { + row![ + Fm->format!("{}", program_info.package_name), + Fc->format!("{}", function.name), + format!("N/A", ), + Fc->format!("N/A"), + Fc->format!("{}", function.opcodes), + ] + })); + main + } +} + +#[derive(Debug, Serialize)] +pub struct FunctionInfo { + pub name: String, + pub opcodes: usize, +} + +pub fn count_opcodes_and_gates_in_program( + compiled_program: ProgramArtifact, + package_name: String, + expression_width: Option, +) -> ProgramInfo { + let functions = compiled_program + .bytecode + .functions + .into_par_iter() + .enumerate() + .map(|(i, function)| FunctionInfo { + name: compiled_program.names[i].clone(), + opcodes: function.opcodes.len(), + }) + .collect(); + + let opcodes_len: Vec = compiled_program + .bytecode + .unconstrained_functions + .iter() + .map(|func| func.bytecode.len()) + .collect(); + let unconstrained_functions_opcodes = compiled_program + .bytecode + .unconstrained_functions + .into_par_iter() + .map(|function| function.bytecode.len()) + .sum(); + let unconstrained_info: Vec = compiled_program + .brillig_names + .clone() + .iter() + .zip(opcodes_len) + .map(|(name, len)| FunctionInfo { name: name.clone(), opcodes: len }) + .collect(); + + ProgramInfo { + package_name, + expression_width, + functions, + unconstrained_functions_opcodes, + unconstrained_functions: unconstrained_info, + } +} + +pub fn show_info_report(info_report: InfoReport, json: bool) { + if json { + // Expose machine-readable JSON data. + println!("{}", serde_json::to_string(&info_report).unwrap()); + } else { + // Otherwise print human-readable table. + if !info_report.programs.is_empty() { + let mut program_table = table!([Fm->"Package", Fm->"Function", Fm->"Expression Width", Fm->"ACIR Opcodes", Fm->"Brillig Opcodes"]); + + for program_info in info_report.programs { + let program_rows: Vec = program_info.into(); + for row in program_rows { + program_table.add_row(row); + } + } + program_table.printstd(); + } + } +}