diff --git a/app-server/Cargo.lock b/app-server/Cargo.lock index 55dd7f2a..90dc168a 100644 --- a/app-server/Cargo.lock +++ b/app-server/Cargo.lock @@ -39,7 +39,7 @@ dependencies = [ "encoding_rs", "flate2", "futures-core", - "h2", + "h2 0.3.26", "http 0.2.12", "httparse", "httpdate", @@ -65,7 +65,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb" dependencies = [ "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -104,7 +104,7 @@ dependencies = [ "parse-size", "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -221,7 +221,7 @@ dependencies = [ "actix-router", "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -241,9 +241,9 @@ dependencies = [ [[package]] name = "addr2line" -version = "0.24.1" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5fb1d8e4442bd405fdfd1dacb42792696b0cf9cb15882e5d097b742a676d375" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" dependencies = [ "gimli", ] @@ -311,9 +311,9 @@ checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "amq-protocol" -version = "7.2.1" +version = "7.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f0234884b3641db74d22ccc20fc2594db5f23d7d41ade5c93d7ee33d200960c" +checksum = "e3a41c091e49edfcc098b4f90d4d7706a8cf9158034e84ebfee7ff346092f67c" dependencies = [ "amq-protocol-tcp", "amq-protocol-types", @@ -325,9 +325,9 @@ dependencies = [ [[package]] name = "amq-protocol-tcp" -version = "7.2.1" +version = "7.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "265dca43d9dbb3d5bbb0b3ef1b0cd9044ce3aa5d697d5b66cde974d1f6063f09" +checksum = "3ed7a4a662472f88823ed2fc81babb0b00562f2c54284e3e7bffc02b6df649bf" dependencies = [ "amq-protocol-uri", "tcp-stream", @@ -336,9 +336,9 @@ dependencies = [ [[package]] name = "amq-protocol-types" -version = "7.2.1" +version = "7.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7412353b58923fa012feb9a64ccc0c811747babee2e5a2fd63eb102dc8054c3" +checksum = "bd6484fdc918c1b6e2ae8eda2914d19a5873e1975f93ad8d33d6a24d1d98df05" dependencies = [ "cookie-factory", "nom", @@ -348,9 +348,9 @@ dependencies = [ [[package]] name = "amq-protocol-uri" -version = "7.2.1" +version = "7.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2be91352c805d5704784e079117d5291fd5bf2569add53c914ebce6d1a795d33" +checksum = "7f7f2da69e0e1182765bf33407cd8a843f20791b5af2b57a2645818c4776c56c" dependencies = [ "amq-protocol-types", "percent-encoding", @@ -423,9 +423,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.88" +version = "1.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e1496f8fb1fbf272686b8d37f523dab3e4a7443300055e74cdaa449f3114356" +checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6" [[package]] name = "app-server" @@ -471,7 +471,7 @@ dependencies = [ "regex", "reqwest", "reqwest-eventsource", - "rustls 0.23.13", + "rustls 0.23.14", "serde", "serde-jsonlines", "serde_json", @@ -513,7 +513,7 @@ checksum = "965c2d33e53cb6b267e148a4cb0760bc01f4904c1cd4bb4002a085bb016d1490" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", "synstructure", ] @@ -525,7 +525,7 @@ checksum = "7b18050c2cd6fe86c3a76584ef5e0baf286d038cda203eb6223df2cc413565f7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -652,9 +652,9 @@ dependencies = [ [[package]] name = "async-stream" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" +checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" dependencies = [ "async-stream-impl", "futures-core", @@ -663,13 +663,13 @@ dependencies = [ [[package]] name = "async-stream-impl" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" +checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -680,13 +680,13 @@ checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" [[package]] name = "async-trait" -version = "0.1.82" +version = "0.1.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a27b8a3a6e1a44fa4c8baf1f653e4172e81486d4941f2237e20dc2d0cf4ddff1" +checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -720,15 +720,15 @@ dependencies = [ [[package]] name = "autocfg" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "aws-config" -version = "1.5.6" +version = "1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "848d7b9b605720989929279fa644ce8f244d0ce3146fcca5b70e4eb7b3c020fc" +checksum = "8191fb3091fa0561d1379ef80333c3c7191c6f0435d986e85821bcf7acbd1126" dependencies = [ "aws-credential-types", "aws-runtime", @@ -768,9 +768,9 @@ dependencies = [ [[package]] name = "aws-lc-rs" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f95446d919226d587817a7d21379e6eb099b97b45110a7f272a444ca5c54070" +checksum = "cdd82dba44d209fddb11c190e0a94b78651f95299598e472215667417a03ff1d" dependencies = [ "aws-lc-sys", "mirai-annotations", @@ -780,9 +780,9 @@ dependencies = [ [[package]] name = "aws-lc-sys" -version = "0.21.1" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "234314bd569802ec87011d653d6815c6d7b9ffb969e9fee5b8b20ef860e8dce9" +checksum = "df7a4168111d7eb622a31b214057b8509c0a7e1794f44c546d742330dc793972" dependencies = [ "bindgen", "cc", @@ -820,9 +820,9 @@ dependencies = [ [[package]] name = "aws-sdk-bedrockruntime" -version = "1.49.0" +version = "1.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84f36b4d7320eadeb25078fd82a2cf2022df9be5346690791e1ffa35839c0bfb" +checksum = "674caa49696129cc1d1b944e2bbd84842df37f090788696c08901d792ce420e0" dependencies = [ "aws-credential-types", "aws-runtime", @@ -843,9 +843,9 @@ dependencies = [ [[package]] name = "aws-sdk-sso" -version = "1.42.0" +version = "1.44.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27bf24cd0d389daa923e974b0e7c38daf308fc21e963c049f57980235017175e" +checksum = "0b90cfe6504115e13c41d3ea90286ede5aa14da294f3fe077027a6e83850843c" dependencies = [ "aws-credential-types", "aws-runtime", @@ -865,9 +865,9 @@ dependencies = [ [[package]] name = "aws-sdk-ssooidc" -version = "1.43.0" +version = "1.45.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b43b3220f1c46ac0e9dcc0a97d94b93305dacb36d1dd393996300c6b9b74364" +checksum = "167c0fad1f212952084137308359e8e4c4724d1c643038ce163f06de9662c1d0" dependencies = [ "aws-credential-types", "aws-runtime", @@ -887,9 +887,9 @@ dependencies = [ [[package]] name = "aws-sdk-sts" -version = "1.42.0" +version = "1.44.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1c46924fb1add65bba55636e12812cae2febf68c0f37361766f627ddcca91ce" +checksum = "2cb5f98188ec1435b68097daa2a37d74b9d17c9caa799466338a8d1544e71b9d" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1005,7 +1005,7 @@ dependencies = [ "aws-smithy-types", "bytes", "fastrand 2.1.1", - "h2", + "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", "http-body 1.0.1", @@ -1039,9 +1039,9 @@ dependencies = [ [[package]] name = "aws-smithy-types" -version = "1.2.6" +version = "1.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03701449087215b5369c7ea17fef0dd5d24cb93439ec5af0c7615f58c3f22605" +checksum = "147100a7bea70fa20ef224a6bad700358305f5dc0f84649c53769761395b355b" dependencies = [ "base64-simd", "bytes", @@ -1088,18 +1088,17 @@ dependencies = [ [[package]] name = "axum" -version = "0.6.20" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" +checksum = "504e3947307ac8326a5437504c517c4b56716c9d98fac0028c2acc7ca47d70ae" dependencies = [ "async-trait", "axum-core", - "bitflags 1.3.2", "bytes", "futures-util", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.30", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", "itoa", "matchit", "memchr", @@ -1108,25 +1107,28 @@ dependencies = [ "pin-project-lite", "rustversion", "serde", - "sync_wrapper 0.1.2", - "tower", + "sync_wrapper 1.0.1", + "tower 0.5.1", "tower-layer", "tower-service", ] [[package]] name = "axum-core" -version = "0.3.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" +checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" dependencies = [ "async-trait", "bytes", "futures-util", - "http 0.2.12", - "http-body 0.4.6", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", "mime", + "pin-project-lite", "rustversion", + "sync_wrapper 1.0.1", "tower-layer", "tower-service", ] @@ -1165,18 +1167,12 @@ name = "baml-types" version = "0.40.1" source = "git+https://github.com/lmnr-ai/lmnr-baml?branch=rust#f04273b61c115b2331733687e1270ee6289ae446" dependencies = [ - "indexmap 2.5.0", + "indexmap 2.6.0", "minijinja", "serde", "serde_json", ] -[[package]] -name = "base64" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" - [[package]] name = "base64" version = "0.21.7" @@ -1237,13 +1233,13 @@ dependencies = [ "lazy_static", "lazycell", "log", - "prettyplease 0.2.22", + "prettyplease", "proc-macro2", "quote", "regex", "rustc-hash 1.1.0", "shlex", - "syn 2.0.77", + "syn", "which", ] @@ -1336,7 +1332,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40723b8fb387abc38f4f4a37c09073622e41dd12327033091ef8950659e6dc0c" dependencies = [ "memchr", - "regex-automata 0.4.7", + "regex-automata 0.4.8", "serde", ] @@ -1354,9 +1350,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.7.1" +version = "1.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8318a53db07bb3f8dca91a600466bdb3f2eaadeedfdbcf02e1accbad9271ba50" +checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3" [[package]] name = "bytes-utils" @@ -1388,9 +1384,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.1.18" +version = "1.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b62ac837cdb5cb22e10a256099b4fc502b1dfe560cb282963a974d7abd80e476" +checksum = "812acba72f0a070b003d3697490d2b55b837230ae7c6c6497f05cc2ddbb8d938" dependencies = [ "jobserver", "libc", @@ -1490,7 +1486,7 @@ dependencies = [ "proc-macro2", "quote", "serde_derive_internals", - "syn 2.0.77", + "syn", ] [[package]] @@ -1748,7 +1744,7 @@ dependencies = [ "proc-macro2", "quote", "strsim 0.11.1", - "syn 2.0.77", + "syn", ] [[package]] @@ -1759,7 +1755,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -1816,7 +1812,7 @@ checksum = "8034092389675178f570469e6c3b0465d3d30b4505c294a6550db47f3c17ad18" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -1846,7 +1842,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -1856,7 +1852,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4abae7035bf79b9877b779505d8cf3749285b80c43941eda66604841889451dc" dependencies = [ "derive_builder_core", - "syn 2.0.77", + "syn", ] [[package]] @@ -1869,7 +1865,7 @@ dependencies = [ "proc-macro2", "quote", "rustc_version", - "syn 2.0.77", + "syn", ] [[package]] @@ -1907,7 +1903,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -1970,7 +1966,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -1990,7 +1986,7 @@ checksum = "de0d48a183585823424a4ce1aa132d174a6a81bd540895822eb4c8373a8e49e8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -2118,8 +2114,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "531e46835a22af56d1e3b66f04844bed63158bc094a628bec1d321d9b4c44bf2" dependencies = [ "bit-set", - "regex-automata 0.4.7", - "regex-syntax 0.8.4", + "regex-automata 0.4.8", + "regex-syntax 0.8.5", ] [[package]] @@ -2151,9 +2147,9 @@ checksum = "b3ea1ec5f8307826a5b71094dd91fc04d4ae75d5709b20ad351c7fb4815c86ec" [[package]] name = "flate2" -version = "1.0.33" +version = "1.0.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "324a1be68054ef05ad64b861cc9eaf1d623d2d8cb25b4bf2cb9cdd902b4bf253" +checksum = "a1b589b4dc103969ad3cf85c950899926ec64300a1a46d76c03a6072957036f0" dependencies = [ "crc32fast", "miniz_oxide", @@ -2286,7 +2282,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -2348,9 +2344,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.31.0" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32085ea23f3234fc7846555e85283ba4de91e21016dc0455a16286d87a292d64" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "glob" @@ -2370,7 +2366,26 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap 2.5.0", + "indexmap 2.6.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "h2" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e8ac6999421f49a846c2d4411f337e53497d8ec55d67753beffa43c5d9205" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http 1.1.0", + "indexmap 2.6.0", "slab", "tokio", "tokio-util", @@ -2430,6 +2445,12 @@ dependencies = [ "allocator-api2", ] +[[package]] +name = "hashbrown" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb" + [[package]] name = "hashlink" version = "0.9.1" @@ -2554,9 +2575,9 @@ dependencies = [ [[package]] name = "httparse" -version = "1.9.4" +version = "1.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9" +checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" [[package]] name = "httpdate" @@ -2580,7 +2601,7 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2", + "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", "httparse", @@ -2603,9 +2624,11 @@ dependencies = [ "bytes", "futures-channel", "futures-util", + "h2 0.4.6", "http 1.1.0", "http-body 1.0.1", "httparse", + "httpdate", "itoa", "pin-project-lite", "smallvec", @@ -2640,7 +2663,7 @@ dependencies = [ "hyper 1.4.1", "hyper-util", "log", - "rustls 0.23.13", + "rustls 0.23.14", "rustls-native-certs 0.8.0", "rustls-pki-types", "tokio", @@ -2651,21 +2674,22 @@ dependencies = [ [[package]] name = "hyper-timeout" -version = "0.4.1" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +checksum = "3203a961e5c83b6f5498933e78b6b263e208c197b63e9c6c53cc82ffd3f63793" dependencies = [ - "hyper 0.14.30", + "hyper 1.4.1", + "hyper-util", "pin-project-lite", "tokio", - "tokio-io-timeout", + "tower-service", ] [[package]] name = "hyper-util" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da62f120a8a37763efb0cf8fdf264b884c7b8b9ac8660b900c8661030c00e6ba" +checksum = "41296eb09f183ac68eec06e03cdbea2e759633d4067b2f6552fc2e009bcad08b" dependencies = [ "bytes", "futures-channel", @@ -2676,16 +2700,15 @@ dependencies = [ "pin-project-lite", "socket2 0.5.7", "tokio", - "tower", "tower-service", "tracing", ] [[package]] name = "iana-time-zone" -version = "0.1.60" +version = "0.1.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" +checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -2738,12 +2761,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5" +checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" dependencies = [ "equivalent", - "hashbrown 0.14.5", + "hashbrown 0.15.0", "serde", ] @@ -2776,7 +2799,7 @@ dependencies = [ "derive_builder", "either", "enumflags2", - "indexmap 2.5.0", + "indexmap 2.6.0", "internal-baml-diagnostics", "internal-baml-jinja", "internal-baml-parser-database", @@ -2811,7 +2834,7 @@ dependencies = [ "anyhow", "baml-types", "colored", - "indexmap 2.5.0", + "indexmap 2.6.0", "log", "minijinja", "serde", @@ -2878,9 +2901,9 @@ dependencies = [ [[package]] name = "ipnet" -version = "2.10.0" +version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "187674a687eed5fe42285b40c6291f9a01517d415fad1c3cbc6a9f778af7fcd4" +checksum = "ddc24109865250148c2e0f3d25d4f0f479571723792d3802153c60922a4fb708" [[package]] name = "is-terminal" @@ -2901,27 +2924,27 @@ checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" [[package]] name = "itertools" -version = "0.10.5" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" dependencies = [ "either", ] [[package]] name = "itertools" -version = "0.11.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" dependencies = [ "either", ] [[package]] name = "itertools" -version = "0.12.1" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" dependencies = [ "either", ] @@ -3056,9 +3079,9 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "libc" -version = "0.2.158" +version = "0.2.159" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" +checksum = "561d97a539a36e26a9a5fad1ea11a3039a67714694aaa379433e580854bc3dc5" [[package]] name = "libloading" @@ -3272,9 +3295,9 @@ dependencies = [ [[package]] name = "multimap" -version = "0.8.3" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" +checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03" [[package]] name = "nom" @@ -3371,9 +3394,9 @@ dependencies = [ [[package]] name = "object" -version = "0.36.4" +version = "0.36.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "084f1a5821ac4c651660a94a7153d27ac9d8a53736203f58b31945ded098070a" +checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" dependencies = [ "memchr", ] @@ -3389,9 +3412,12 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.19.0" +version = "1.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "82881c4be219ab5faaf2ad5e5e5ecdff8c66bd7402ca3160975c93b24961afd1" +dependencies = [ + "portable-atomic", +] [[package]] name = "openssl-probe" @@ -3464,9 +3490,9 @@ dependencies = [ [[package]] name = "parse-size" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "944553dd59c802559559161f9816429058b869003836120e262e8caec061b7ae" +checksum = "487f2ccd1e17ce8c1bfab3a65c89525af41cfad4c8659021a1e9a2aacd73b89b" [[package]] name = "paste" @@ -3501,9 +3527,9 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" -version = "2.7.12" +version = "2.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c73c26c01b8c87956cea613c907c9d6ecffd8d18a2a5908e5de0adfaa185cea" +checksum = "fdbef9d1d47087a895abd220ed25eb4ad973a5e26f6a4367b038c25e28dfc2d9" dependencies = [ "memchr", "thiserror", @@ -3512,9 +3538,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.7.12" +version = "2.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "664d22978e2815783adbdd2c588b455b1bd625299ce36b2a99881ac9627e6d8d" +checksum = "4d3a6e3394ec80feb3b6393c725571754c6188490265c61aaf260810d6b95aa0" dependencies = [ "pest", "pest_generator", @@ -3522,22 +3548,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.12" +version = "2.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2d5487022d5d33f4c30d91c22afa240ce2a644e87fe08caad974d4eab6badbe" +checksum = "94429506bde1ca69d1b5601962c73f4172ab4726571a59ea95931218cb0e930e" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] name = "pest_meta" -version = "2.7.12" +version = "2.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0091754bbd0ea592c4deb3a122ce8ecbb0753b738aa82bc055fcc2eccc8d8174" +checksum = "ac8a071862e93690b6e34e9a5fb8e33ff3734473ac0245b27232222c4906a33f" dependencies = [ "once_cell", "pest", @@ -3551,7 +3577,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", - "indexmap 2.5.0", + "indexmap 2.6.0", ] [[package]] @@ -3571,7 +3597,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -3662,9 +3688,9 @@ dependencies = [ [[package]] name = "pkg-config" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" +checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" [[package]] name = "polling" @@ -3697,6 +3723,12 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "portable-atomic" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc9c68a3f6da06753e9335d63e27f6b9754dd1920d941135b7ea8224f141adb2" + [[package]] name = "powerfmt" version = "0.2.0" @@ -3712,16 +3744,6 @@ dependencies = [ "zerocopy", ] -[[package]] -name = "prettyplease" -version = "0.1.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c8646e95016a7a6c4adea95bafa8a16baab64b583356217f2c85db4a39d9a86" -dependencies = [ - "proc-macro2", - "syn 1.0.109", -] - [[package]] name = "prettyplease" version = "0.2.22" @@ -3729,7 +3751,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "479cf940fbbb3426c32c5d5176f62ad57549a0bb84773423ba8be9d089f5faba" dependencies = [ "proc-macro2", - "syn 2.0.77", + "syn", ] [[package]] @@ -3743,9 +3765,9 @@ dependencies = [ [[package]] name = "prost" -version = "0.11.9" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd" +checksum = "7b0487d90e047de87f984913713b85c601c05609aad5b0df4b4573fbf69aa13f" dependencies = [ "bytes", "prost-derive", @@ -3753,44 +3775,43 @@ dependencies = [ [[package]] name = "prost-build" -version = "0.11.9" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270" +checksum = "0c1318b19085f08681016926435853bbf7858f9c082d0999b80550ff5d9abe15" dependencies = [ "bytes", - "heck 0.4.1", - "itertools 0.10.5", - "lazy_static", + "heck 0.5.0", + "itertools 0.13.0", "log", "multimap", + "once_cell", "petgraph", - "prettyplease 0.1.25", + "prettyplease", "prost", "prost-types", "regex", - "syn 1.0.109", + "syn", "tempfile", - "which", ] [[package]] name = "prost-derive" -version = "0.11.9" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" +checksum = "e9552f850d5f0964a4e4d0bf306459ac29323ddfbae05e35a7c0d35cb0803cc5" dependencies = [ "anyhow", - "itertools 0.10.5", + "itertools 0.13.0", "proc-macro2", "quote", - "syn 1.0.109", + "syn", ] [[package]] name = "prost-types" -version = "0.11.9" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "213622a1460818959ac1181aaeb2dc9c7f63df720db7d788b3e24eacd1983e13" +checksum = "4759aa0d3a6232fb8dbdb97b61de2c20047c68aca932c7ed76da9d788508d670" dependencies = [ "prost", ] @@ -3821,7 +3842,7 @@ dependencies = [ "quinn-proto", "quinn-udp", "rustc-hash 2.0.0", - "rustls 0.23.13", + "rustls 0.23.14", "socket2 0.5.7", "thiserror", "tokio", @@ -3838,7 +3859,7 @@ dependencies = [ "rand", "ring", "rustc-hash 2.0.0", - "rustls 0.23.13", + "rustls 0.23.14", "slab", "thiserror", "tinyvec", @@ -3899,9 +3920,9 @@ dependencies = [ [[package]] name = "raw-cpuid" -version = "11.1.0" +version = "11.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb9ee317cfe3fbd54b36a511efc1edd42e216903c9cd575e686dd68a2ba90d8d" +checksum = "1ab240315c661615f2ee9f0f2cd32d5a7343a84d5ebcccb99d46e6637565e7b0" dependencies = [ "bitflags 2.6.0", ] @@ -3948,23 +3969,23 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.4" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0884ad60e090bf1345b93da0a5de8923c93884cd03f40dfcfddd3b4bee661853" +checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f" dependencies = [ "bitflags 2.6.0", ] [[package]] name = "regex" -version = "1.10.6" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" +checksum = "38200e5ee88914975b69f657f0801b6f6dccafd44fd9326302a4aaeecfacb1d8" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.7", - "regex-syntax 0.8.4", + "regex-automata 0.4.8", + "regex-syntax 0.8.5", ] [[package]] @@ -3978,13 +3999,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" +checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.4", + "regex-syntax 0.8.5", ] [[package]] @@ -4001,9 +4022,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.8.4" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "replace_with" @@ -4013,9 +4034,9 @@ checksum = "e3a8614ee435691de62bcffcf4a66d91b3594bf1428a5722e79103249a095690" [[package]] name = "reqwest" -version = "0.12.7" +version = "0.12.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8f4955649ef5c38cc7f9e8aa41761d48fb9677197daea9984dc54f56aad5e63" +checksum = "f713147fbe92361e52392c73b8c9e48c04c6625bce969ef54dc901e58e042a7b" dependencies = [ "base64 0.22.1", "bytes", @@ -4036,8 +4057,8 @@ dependencies = [ "percent-encoding", "pin-project-lite", "quinn", - "rustls 0.23.13", - "rustls-pemfile 2.1.3", + "rustls 0.23.14", + "rustls-pemfile 2.2.0", "rustls-pki-types", "serde", "serde_json", @@ -4098,7 +4119,7 @@ checksum = "a5a11a05ee1ce44058fa3d5961d05194fdbe3ad6b40f904af764d81b86450e6b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -4227,9 +4248,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.13" +version = "0.23.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2dabaac7466917e566adb06783a81ca48944c6898a1b08b9374106dd671f4c8" +checksum = "415d9944693cb90382053259f89fbb077ea730ad7273047ec63b19bc9b160ba8" dependencies = [ "aws-lc-rs", "log", @@ -4248,7 +4269,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a980454b497c439c274f2feae2523ed8138bbd3d323684e1435fec62f800481" dependencies = [ "log", - "rustls 0.23.13", + "rustls 0.23.14", "rustls-native-certs 0.7.3", "rustls-pki-types", "rustls-webpki 0.102.8", @@ -4273,7 +4294,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5bfb394eeed242e909609f56089eecfe5fda225042e8b171791b9c95f5931e5" dependencies = [ "openssl-probe", - "rustls-pemfile 2.1.3", + "rustls-pemfile 2.2.0", "rustls-pki-types", "schannel", "security-framework", @@ -4286,7 +4307,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcaf18a4f2be7326cd874a5fa579fae794320a0f388d365dca7e480e55f83f8a" dependencies = [ "openssl-probe", - "rustls-pemfile 2.1.3", + "rustls-pemfile 2.2.0", "rustls-pki-types", "schannel", "security-framework", @@ -4303,19 +4324,18 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "2.1.3" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "196fe16b00e106300d3e45ecfcb764fa292a535d7326a29a5875c579c7417425" +checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" dependencies = [ - "base64 0.22.1", "rustls-pki-types", ] [[package]] name = "rustls-pki-types" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0" +checksum = "0e696e35370c65c9c541198af4543ccd580cf17fc25d8e05c5a242b202488c55" [[package]] name = "rustls-webpki" @@ -4405,7 +4425,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -4423,9 +4443,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.11.1" +version = "2.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75da29fe9b9b08fe9d6b22b5b4bcbc75d8db3aa31e639aa56bb62e9d46bfceaf" +checksum = "ea4a292869320c0272d7bc55a5a6aafaff59b4f63404a003887b679a2e05b4b6" dependencies = [ "core-foundation-sys", "libc", @@ -4464,7 +4484,7 @@ checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -4475,7 +4495,7 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -4484,7 +4504,7 @@ version = "1.0.128" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" dependencies = [ - "indexmap 2.5.0", + "indexmap 2.6.0", "itoa", "memchr", "ryu", @@ -4508,14 +4528,14 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] name = "serde_spanned" -version = "0.6.7" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d" +checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" dependencies = [ "serde", ] @@ -4538,7 +4558,7 @@ version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ - "indexmap 2.5.0", + "indexmap 2.6.0", "itoa", "ryu", "serde", @@ -4742,7 +4762,7 @@ dependencies = [ "hashbrown 0.14.5", "hashlink", "hex", - "indexmap 2.5.0", + "indexmap 2.6.0", "log", "memchr", "once_cell", @@ -4771,7 +4791,7 @@ dependencies = [ "quote", "sqlx-core", "sqlx-macros-core", - "syn 2.0.77", + "syn", ] [[package]] @@ -4794,7 +4814,7 @@ dependencies = [ "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", - "syn 2.0.77", + "syn", "tempfile", "tokio", "url", @@ -4960,7 +4980,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.77", + "syn", ] [[package]] @@ -4971,20 +4991,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" -version = "1.0.109" +version = "2.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "syn" -version = "2.0.77" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f35bcdf61fd8e7be6caf75f429fdca8beb3ed76584befb503b1569faee373ed" +checksum = "89132cd0bf050864e1d38dc3bbc07a0eb8e7530af26344d3d2bbbef83499f590" dependencies = [ "proc-macro2", "quote", @@ -5014,7 +5023,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -5032,14 +5041,14 @@ dependencies = [ "cfg-if", "p12-keystore", "rustls-connector", - "rustls-pemfile 2.1.3", + "rustls-pemfile 2.2.0", ] [[package]] name = "tempfile" -version = "3.12.0" +version = "3.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64" +checksum = "f0f2c9fc62d0beef6951ccffd757e241266a2c833136efbe35af6cd2567dca5b" dependencies = [ "cfg-if", "fastrand 2.1.1", @@ -5076,7 +5085,7 @@ checksum = "5999e24eaa32083191ba4e425deb75cdf25efefabe5aaccb7446dd0d4122a3f5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -5101,22 +5110,22 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.63" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" +checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.63" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" +checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -5217,16 +5226,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "tokio-io-timeout" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" -dependencies = [ - "pin-project-lite", - "tokio", -] - [[package]] name = "tokio-macros" version = "2.4.0" @@ -5235,7 +5234,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -5254,7 +5253,7 @@ version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" dependencies = [ - "rustls 0.23.13", + "rustls 0.23.14", "rustls-pki-types", "tokio", ] @@ -5289,7 +5288,7 @@ version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" dependencies = [ - "indexmap 2.5.0", + "indexmap 2.6.0", "serde", "serde_spanned", "toml_datetime", @@ -5307,11 +5306,11 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.22.20" +version = "0.22.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d" +checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" dependencies = [ - "indexmap 2.5.0", + "indexmap 2.6.0", "serde", "serde_spanned", "toml_datetime", @@ -5320,47 +5319,46 @@ dependencies = [ [[package]] name = "tonic" -version = "0.8.3" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f219fad3b929bef19b1f86fbc0358d35daed8f2cac972037ac0dc10bbb8d5fb" +checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52" dependencies = [ "async-stream", "async-trait", "axum", - "base64 0.13.1", + "base64 0.22.1", "bytes", - "futures-core", - "futures-util", - "h2", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.30", + "h2 0.4.6", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.4.1", "hyper-timeout", + "hyper-util", "percent-encoding", "pin-project", "prost", - "prost-derive", + "socket2 0.5.7", "tokio", "tokio-stream", - "tokio-util", - "tower", + "tower 0.4.13", "tower-layer", "tower-service", "tracing", - "tracing-futures", ] [[package]] name = "tonic-build" -version = "0.8.4" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bf5e9b9c0f7e0a7c027dcfaba7b2c60816c7049171f679d99ee2ff65d0de8c4" +checksum = "9557ce109ea773b399c9b9e5dca39294110b74f1f342cb347a80d1fce8c26a11" dependencies = [ - "prettyplease 0.1.25", + "prettyplease", "proc-macro2", "prost-build", + "prost-types", "quote", - "syn 1.0.109", + "syn", ] [[package]] @@ -5383,6 +5381,20 @@ dependencies = [ "tracing", ] +[[package]] +name = "tower" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2873938d487c3cfb9aed7546dc9f2711d867c9f90c46b889989a2cb84eba6b4f" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper 0.1.2", + "tower-layer", + "tower-service", +] + [[package]] name = "tower-layer" version = "0.3.3" @@ -5415,7 +5427,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -5428,16 +5440,6 @@ dependencies = [ "valuable", ] -[[package]] -name = "tracing-futures" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" -dependencies = [ - "pin-project", - "tracing", -] - [[package]] name = "tracing-log" version = "0.2.0" @@ -5486,9 +5488,9 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "ucd-trie" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" +checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" [[package]] name = "unicase" @@ -5501,9 +5503,9 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.15" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" +checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" [[package]] name = "unicode-ident" @@ -5519,30 +5521,30 @@ checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" [[package]] name = "unicode-normalization" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" +checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" dependencies = [ "tinyvec", ] [[package]] name = "unicode-properties" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52ea75f83c0137a9b98608359a5f1af8144876eb67bcb1ce837368e906a9f524" +checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" [[package]] name = "unicode-segmentation" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" [[package]] name = "unicode-width" -version = "0.1.13" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" +checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" [[package]] name = "unicode_categories" @@ -5605,7 +5607,7 @@ checksum = "ee1cd046f83ea2c4e920d6ee9f7c3537ef928d75dce5d84a87c2c5d6b3999a3a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -5681,7 +5683,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.77", + "syn", "wasm-bindgen-shared", ] @@ -5715,7 +5717,7 @@ checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5728,9 +5730,9 @@ checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484" [[package]] name = "wasm-streams" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b65dc4c90b63b118468cf747d8bf3566c1913ef60be765b5730ead9e0a3ba129" +checksum = "4e072d4e72f700fb3443d8fe94a39315df013eef1104903cdb0a2abd322bbecd" dependencies = [ "futures-util", "js-sys", @@ -5751,9 +5753,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.26.5" +version = "0.26.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bd24728e5af82c6c4ec1b66ac4844bdf8156257fccda846ec58b42cd0cdbe6a" +checksum = "841c67bff177718f1d4dfefde8d8f0e78f9b6589319ba88312f567fc5841a958" dependencies = [ "rustls-pki-types", ] @@ -6000,9 +6002,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.6.18" +version = "0.6.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f" +checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" dependencies = [ "memchr", ] @@ -6059,7 +6061,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] diff --git a/app-server/Cargo.toml b/app-server/Cargo.toml index f2aa6dd3..819534a9 100644 --- a/app-server/Cargo.toml +++ b/app-server/Cargo.toml @@ -8,7 +8,7 @@ env_logger = "0.10.0" actix-web = "4" anyhow = "1" futures-util = "0.3.28" -tonic = "0.8" +tonic = "0.12.3" # workaround [AFIT](https://rust-lang.github.io/rfcs/3185-static-async-fn-in-trait.html) # while rust people have hot [debates](https://github.com/rust-lang/rust/pull/115822#issuecomment-1718261458) @@ -16,7 +16,7 @@ tonic = "0.8" async-trait = "0.1" dotenv = "0.15" -prost = "0.11" +prost = "0.13" tokio = { version = "1.24", features = ["macros", "rt-multi-thread"] } tokio-stream = { version = "0.1", features = ["net"] } futures = "0.3" @@ -66,4 +66,4 @@ serde_repr = "0.1.19" num_cpus = "1.16.0" [build-dependencies] -tonic-build = "0.8" +tonic-build = "0.12.3" diff --git a/app-server/build.rs b/app-server/build.rs index ee27c172..2353e74c 100644 --- a/app-server/build.rs +++ b/app-server/build.rs @@ -6,7 +6,7 @@ fn main() -> Result<(), Box> { .build_client(true) .build_server(false) .out_dir("./src/semantic_search/") - .compile(&[proto_file], &["proto"])?; + .compile_protos(&[proto_file], &["proto"])?; // NOTE: Currently need to manually enable this, fix errors with super::super::..., whenever changing proto. tonic_build::configure() @@ -15,7 +15,7 @@ fn main() -> Result<(), Box> { .build_server(true) .include_file("mod.rs") .out_dir("./src/opentelemetry/") - .compile( + .compile_protos( &[ "./proto/opentelemetry/common.proto", "./proto/opentelemetry/resource.proto", diff --git a/app-server/src/api/v1/evaluations.rs b/app-server/src/api/v1/evaluations.rs index 4b002bd2..7ed6bf08 100644 --- a/app-server/src/api/v1/evaluations.rs +++ b/app-server/src/api/v1/evaluations.rs @@ -1,12 +1,12 @@ use actix_web::{post, web, HttpResponse}; use serde::Deserialize; -use serde_json::Value; -use std::{collections::HashMap, sync::Arc}; +use std::sync::Arc; use uuid::Uuid; use crate::{ + ch::evaluation_scores::{insert_evaluation_scores, EvaluationScore}, db::{self, api_keys::ProjectApiKey, DB}, - evaluations::stats::calculate_average_scores, + evaluations::utils::{get_columns_from_points, EvaluationDatapointResult}, names::NameGenerator, routes::types::ResponseResult, }; @@ -15,160 +15,75 @@ use crate::{ #[serde(rename_all = "camelCase")] struct CreateEvaluationRequest { name: Option, + group_id: Option, + points: Vec, } #[post("evaluations")] async fn create_evaluation( req: web::Json, db: web::Data, + clickhouse: web::Data, project_api_key: ProjectApiKey, name_generator: web::Data>, ) -> ResponseResult { let project_id = project_api_key.project_id; let req = req.into_inner(); + let clickhouse = clickhouse.into_inner().as_ref().clone(); + let db = db.into_inner(); let name = if let Some(name) = req.name { name } else { name_generator.next().await }; + let group_id = req.group_id.unwrap_or("default".to_string()); + let points = req.points; - let evaluation = db::evaluations::create_evaluation( - &db.pool, - &name, - db::evaluations::EvaluationStatus::Started, - project_id, - ) - .await?; - Ok(HttpResponse::Ok().json(evaluation)) -} - -#[derive(Deserialize)] -#[serde(rename_all = "camelCase")] -struct UpdateEvaluationRequest { - status: db::evaluations::EvaluationStatus, -} - -#[post("evaluations/{evaluation_id}")] -async fn update_evaluation( - path: web::Path, - req: web::Json, - db: web::Data, - project_api_key: ProjectApiKey, -) -> ResponseResult { - let project_id = project_api_key.project_id; - let evaluation_id = path.into_inner(); - let req = req.into_inner(); - - let mut average_scores = None; - if req.status == db::evaluations::EvaluationStatus::Finished { - // Calculate average scores only once when the evaluation is finished to avoid recalculating them on each update and query - let datapoint_scores = - db::evaluations::get_evaluation_datapoint_scores(&db.pool, evaluation_id).await?; - let average_scores_json = serde_json::to_value(calculate_average_scores(datapoint_scores)) - .map_err(|e| anyhow::anyhow!("Failed to serialize average scores: {}", e))?; - average_scores = Some(average_scores_json); + if points.is_empty() { + return Err(anyhow::anyhow!("Evaluation must have at least one datapoint result").into()); } - let evaluation = db::evaluations::update_evaluation_status( - &db.pool, - project_id, - evaluation_id, - req.status, - average_scores, - ) - .await?; - Ok(HttpResponse::Ok().json(evaluation)) -} - -#[derive(Deserialize)] -#[serde(rename_all = "camelCase")] -struct RequestEvaluationDatapoint { - data: Value, - target: Value, - executor_output: Option, - #[serde(default)] - trace_id: Uuid, - error: Option, - scores: HashMap, -} - -#[derive(Deserialize)] -#[serde(rename_all = "camelCase")] -struct UploadEvaluationDatapointsRequest { - evaluation_id: Uuid, - points: Vec, -} - -#[post("evaluation-datapoints")] -async fn upload_evaluation_datapoints( - req: web::Json, - db: web::Data, - project_api_key: ProjectApiKey, -) -> ResponseResult { - let project_id = project_api_key.project_id; - let db = db.into_inner(); let evaluation = - db::evaluations::get_evaluation(db.clone(), project_id, req.evaluation_id).await?; - - let evaluation_id = evaluation.id; - let statuses = req - .points - .iter() - .map(|point| { - if point.error.is_some() { - db::evaluations::EvaluationDatapointStatus::Error - } else { - db::evaluations::EvaluationDatapointStatus::Success - } - }) - .collect::>(); - - let data = req - .points - .iter() - .map(|point| point.data.clone()) - .collect::>(); + db::evaluations::create_evaluation(&db.pool, &name, project_id, &group_id).await?; + + let columns = get_columns_from_points(&points); + let ids = points.iter().map(|_| Uuid::new_v4()).collect::>(); + + let ids_clone = ids.clone(); + let db_task = tokio::spawn(async move { + db::evaluations::set_evaluation_results( + db.clone(), + evaluation.id, + &ids_clone, + &columns.scores, + &columns.datas, + &columns.targets, + &columns.executor_outputs, + &columns.trace_ids, + ) + .await + }); + + // Flattened scores from all evaluators to be recorded to Clickhouse + // Its length can be longer than the amount of evaluation datapoints since each evaluator can return multiple scores + let ch_evaluation_scores = EvaluationScore::from_evaluation_datapoint_results( + &points, + ids, + project_id, + group_id, + evaluation.id, + ); - let target = req - .points - .iter() - .map(|point| point.target.clone()) - .collect::>(); + let ch_task = tokio::spawn(insert_evaluation_scores( + clickhouse.clone(), + ch_evaluation_scores, + )); - let executor_output = req - .points - .iter() - .map(|point| point.executor_output.clone()) - .collect::>(); + let (db_result, ch_result) = tokio::join!(db_task, ch_task); - let error = req - .points - .iter() - .map(|point| point.error.clone()) - .collect::>(); - let scores = req - .points - .iter() - .map(|point| point.scores.clone()) - .collect::>(); - let trace_ids = req - .points - .iter() - .map(|point| point.trace_id) - .collect::>(); + db_result.map_err(|e| anyhow::anyhow!("Database task failed: {}", e))??; + ch_result.map_err(|e| anyhow::anyhow!("Clickhouse task failed: {}", e))??; - let evaluation_datapoint = db::evaluations::set_evaluation_results( - &db.pool, - evaluation_id, - &statuses, - &scores, - &data, - &target, - &executor_output, - &trace_ids, - &error, - ) - .await?; - Ok(HttpResponse::Ok().json(evaluation_datapoint)) + Ok(HttpResponse::Ok().json(evaluation)) } diff --git a/app-server/src/api/v1/pipelines.rs b/app-server/src/api/v1/pipelines.rs index 1563397f..e45fe0e1 100644 --- a/app-server/src/api/v1/pipelines.rs +++ b/app-server/src/api/v1/pipelines.rs @@ -1,13 +1,13 @@ use std::{collections::HashMap, sync::Arc}; -use actix_web::{post, web, HttpResponse}; +use actix_web::{get, post, web, HttpResponse}; use serde::Deserialize; use uuid::Uuid; use crate::{ api::utils::query_target_pipeline_version, cache::Cache, - db::{api_keys::ProjectApiKey, DB}, + db::{api_keys::ProjectApiKey, trace::CurrentTraceAndSpan, DB}, pipeline::{ nodes::{GraphOutput, GraphRunOutput, NodeInput, RunEndpointEventError, StreamChunk}, runner::{PipelineRunner, PipelineRunnerError}, @@ -19,14 +19,6 @@ use crate::{ }, }; -#[derive(Deserialize)] -#[serde(rename_all = "camelCase")] -struct CurrentTraceAndSpan { - trace_id: Uuid, - #[serde(default)] - parent_span_id: Option, -} - #[derive(Deserialize)] #[serde(rename_all = "camelCase")] struct GraphRequest { @@ -58,11 +50,9 @@ async fn run_pipeline_graph( let inputs = req.inputs; let mut env = req.env; let metadata = req.metadata; - let parent_span_id = req - .current_trace_and_span - .as_ref() - .and_then(|t| t.parent_span_id); - let trace_id = req.current_trace_and_span.map(|t| t.trace_id); + + let current_trace_and_span = req.current_trace_and_span; + env.insert("collection_name".to_string(), project_id.to_string()); let pipeline_version = @@ -94,8 +84,7 @@ async fn run_pipeline_graph( &run_result, &project_id, &pipeline_version_name, - parent_span_id, - trace_id, + current_trace_and_span, None, ) .await @@ -158,8 +147,7 @@ async fn run_pipeline_graph( &run_result, &project_id, &pipeline_version_name, - parent_span_id, - trace_id, + current_trace_and_span, None, ) .await?; diff --git a/app-server/src/ch/evaluation_scores.rs b/app-server/src/ch/evaluation_scores.rs new file mode 100644 index 00000000..62f8370e --- /dev/null +++ b/app-server/src/ch/evaluation_scores.rs @@ -0,0 +1,202 @@ +use anyhow::Result; +use clickhouse::Row; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +use crate::evaluations::utils::EvaluationDatapointResult; + +use super::utils::{execute_query, validate_string_against_injection}; + +/// Evaluation score +#[derive(Row, Serialize)] +pub struct EvaluationScore { + /// Project id, its purpose is to validate user accesses evaluations only from projects they belong to + #[serde(with = "clickhouse::serde::uuid")] + pub project_id: Uuid, + pub group_id: String, + #[serde(with = "clickhouse::serde::uuid")] + pub evaluation_id: Uuid, + #[serde(with = "clickhouse::serde::uuid")] + pub result_id: Uuid, + // Note that one evaluator can produce multiple scores + pub name: String, + pub value: f64, +} + +impl EvaluationScore { + pub fn from_evaluation_datapoint_results( + points: &Vec, + result_ids: Vec, + project_id: Uuid, + group_id: String, + evaluation_id: Uuid, + ) -> Vec { + points + .iter() + .zip(result_ids.iter()) + .flat_map(|(point, result_id)| { + point.scores.iter().map(|(name, value)| { + let name = name.to_string(); + let value = value.clone(); + EvaluationScore { + project_id, + group_id: group_id.clone(), + evaluation_id, + result_id: *result_id, + name: name.to_string(), + value: value.clone(), + } + }) + }) + .collect() + } +} + +pub async fn insert_evaluation_scores( + clickhouse: clickhouse::Client, + evaluation_scores: Vec, +) -> Result<()> { + if evaluation_scores.is_empty() { + return Ok(()); + } + + let ch_insert = clickhouse.insert("evaluation_scores"); + match ch_insert { + Ok(mut ch_insert) => { + for evaluation_score in evaluation_scores { + ch_insert.write(&evaluation_score).await?; + } + let ch_insert_end_res = ch_insert.end().await; + match ch_insert_end_res { + Ok(_) => Ok(()), + Err(e) => Err(anyhow::anyhow!( + "Clickhouse evaluation scores insertion failed: {:?}", + e + )), + } + } + Err(e) => { + return Err(anyhow::anyhow!( + "Failed to insert evaluation scores into Clickhouse: {:?}", + e + )); + } + } +} + +#[derive(Row, Deserialize)] +struct AverageEvaluationScore { + average_value: f64, +} + +pub async fn get_average_evaluation_score( + clickhouse: clickhouse::Client, + project_id: Uuid, + evaluation_id: Uuid, + name: String, +) -> Result { + validate_string_against_injection(&name)?; + + let query = format!( + "SELECT avg(value) as average_value FROM evaluation_scores WHERE project_id = '{}' AND evaluation_id = '{}' AND name = '{}'", + project_id, + evaluation_id, + name + ); + + let rows: Vec = execute_query(&clickhouse, &query).await?; + Ok(rows[0].average_value) +} + +#[derive(Row, Deserialize)] +pub struct EvaluationScoreBucket { + pub lower_bound: f64, + pub upper_bound: f64, + pub height: u64, +} + +pub async fn get_evaluation_score_buckets_based_on_bounds( + clickhouse: clickhouse::Client, + project_id: Uuid, + evaluation_id: Uuid, + name: String, + lower_bound: f64, + upper_bound: f64, + bucket_count: u64, +) -> Result> { + validate_string_against_injection(&name)?; + + let step_size = (upper_bound - lower_bound) / bucket_count as f64; + let interval_nums = (1..=bucket_count) + .map(|num| num.to_string()) + .collect::>() + .join(","); + + // This query uses {:?} with the purpose to render floats like 1.0 as 1.0 instead of 1 + let query = format!( + " +WITH intervals AS ( + SELECT + arrayJoin([{}]) AS interval_num, + {:?} + ((interval_num - 1) * {:?}) AS lower_bound, + CASE + WHEN interval_num = {} THEN {:?} + ELSE {:?} + (interval_num * {:?}) + END AS upper_bound +) +SELECT + intervals.lower_bound, + intervals.upper_bound, + COUNT(CASE + WHEN value >= intervals.lower_bound AND value < intervals.upper_bound THEN 1 + WHEN intervals.interval_num = {} AND value >= intervals.lower_bound AND value <= intervals.upper_bound THEN 1 + ELSE NULL + END) AS height +FROM evaluation_scores +JOIN intervals ON 1 = 1 +WHERE project_id = '{}' +AND evaluation_id = '{}' +AND name = '{}' +GROUP BY intervals.lower_bound, intervals.upper_bound, intervals.interval_num +ORDER BY intervals.interval_num", + interval_nums, lower_bound, step_size, bucket_count, upper_bound, lower_bound, step_size, bucket_count, project_id, evaluation_id, name + ); + + let rows: Vec = execute_query(&clickhouse, &query).await?; + + Ok(rows) +} + +#[derive(Row, Deserialize, Clone)] +pub struct ComparedEvaluationScoresBounds { + pub upper_bound: f64, +} + +pub async fn get_global_evaluation_scores_bounds( + clickhouse: clickhouse::Client, + project_id: Uuid, + evaluation_ids: &Vec, + name: String, +) -> Result { + validate_string_against_injection(&name)?; + + let evaluation_ids_str = evaluation_ids + .iter() + .map(|id| format!("'{}'", id)) + .collect::>() + .join(","); + + let query = format!( + " +SELECT + MAX(value) AS upper_bound +FROM evaluation_scores +WHERE project_id = '{}' + AND evaluation_id IN ({}) + AND name = '{}'", + project_id, evaluation_ids_str, name + ); + + let rows: Vec = execute_query(&clickhouse, &query).await?; + Ok(rows[0].clone()) +} diff --git a/app-server/src/ch/mod.rs b/app-server/src/ch/mod.rs index fa7b80ce..14d7fcad 100644 --- a/app-server/src/ch/mod.rs +++ b/app-server/src/ch/mod.rs @@ -1,6 +1,7 @@ use clickhouse::Row; use serde::{Deserialize, Serialize}; +pub mod evaluation_scores; pub mod events; pub mod modifiers; pub mod spans; diff --git a/app-server/src/ch/spans.rs b/app-server/src/ch/spans.rs index 3ec6f3d3..2822afd3 100644 --- a/app-server/src/ch/spans.rs +++ b/app-server/src/ch/spans.rs @@ -39,6 +39,8 @@ pub struct CHSpan { pub trace_id: Uuid, pub provider: String, pub user_id: String, + // Default value is backwards compatibility or if path attribute is not present + pub path: String, } impl CHSpan { @@ -68,6 +70,7 @@ impl CHSpan { trace_id: span.trace_id, provider: usage.provider_name.unwrap_or(String::from("")), user_id: span_attributes.user_id().unwrap_or(String::from("")), + path: span_attributes.path().unwrap_or(String::from("")), } } } diff --git a/app-server/src/ch/utils.rs b/app-server/src/ch/utils.rs index b8fecd26..104ffd00 100644 --- a/app-server/src/ch/utils.rs +++ b/app-server/src/ch/utils.rs @@ -141,3 +141,15 @@ where Ok(res) } + +/// Trivial SQL injection protection +pub fn validate_string_against_injection(s: &str) -> Result<()> { + let invalid_chars = ["'", "\"", "\\", ";", "*", "/", "--"]; + if invalid_chars.iter().any(|&c| s.contains(c)) + || s.to_lowercase().contains("union") + || s.to_lowercase().contains("select") + { + return Err(anyhow::anyhow!("Invalid characters or SQL keywords")); + } + return Ok(()); +} diff --git a/app-server/src/db/evaluations.rs b/app-server/src/db/evaluations.rs index 6fec0011..400accd8 100644 --- a/app-server/src/db/evaluations.rs +++ b/app-server/src/db/evaluations.rs @@ -2,37 +2,24 @@ use std::{collections::HashMap, sync::Arc}; use anyhow::Result; use chrono::{DateTime, Utc}; -use serde::{Deserialize, Serialize}; +use serde::Serialize; use serde_json::Value; use sqlx::{prelude::FromRow, PgPool}; use uuid::Uuid; use super::DB; -#[derive(sqlx::Type, Deserialize, Serialize, PartialEq)] -#[sqlx(type_name = "evaluation_job_status")] -pub enum EvaluationStatus { - Started, - Finished, - Error, -} - -#[derive(sqlx::Type, Serialize, Clone, Deserialize)] -#[sqlx(type_name = "evaluation_status")] -pub enum EvaluationDatapointStatus { - Success, - Error, -} - #[derive(Serialize, FromRow)] #[serde(rename_all = "camelCase")] pub struct Evaluation { pub id: Uuid, pub created_at: DateTime, pub name: String, - pub status: EvaluationStatus, pub project_id: Uuid, - pub average_scores: Option, + /// Group id is used to group evaluations together within the same project + /// + /// Conceptually, evaluations with different group ids are used to test different features. + pub group_id: String, } #[derive(Serialize, FromRow)] @@ -44,32 +31,29 @@ pub struct EvaluationDatapointPreview { pub data: Value, pub target: Value, pub scores: Value, // HashMap - pub status: EvaluationDatapointStatus, pub executor_output: Option, - pub error: Option, pub trace_id: Uuid, } pub async fn create_evaluation( pool: &PgPool, name: &String, - status: EvaluationStatus, project_id: Uuid, + group_id: &str, ) -> Result { let evaluation = sqlx::query_as::<_, Evaluation>( - "INSERT INTO evaluations (name, status, project_id) - VALUES ($1, $2::evaluation_job_status, $3) + "INSERT INTO evaluations (name, project_id, group_id) + VALUES ($1, $2, $3) RETURNING id, created_at, name, - status, project_id, - average_scores", + group_id", ) .bind(name) - .bind(&status) .bind(project_id) + .bind(group_id) .fetch_one(pool) .await?; @@ -83,7 +67,7 @@ pub async fn get_evaluation( ) -> Result { let evaluation = sqlx::query_as::<_, Evaluation>( "SELECT - id, name, status, project_id, created_at, average_scores + id, name, project_id, created_at, group_id FROM evaluations WHERE id = $1 AND project_id = $2", ) .bind(evaluation_id) @@ -96,7 +80,7 @@ pub async fn get_evaluation( pub async fn get_evaluations(pool: &PgPool, project_id: Uuid) -> Result> { let evaluations = sqlx::query_as::<_, Evaluation>( - "SELECT id, name, status, project_id, created_at, average_scores + "SELECT id, name, project_id, created_at, group_id FROM evaluations WHERE project_id = $1 ORDER BY created_at DESC", ) @@ -107,61 +91,38 @@ pub async fn get_evaluations(pool: &PgPool, project_id: Uuid) -> Result Result> { let evaluations = sqlx::query_as::<_, Evaluation>( - "SELECT id, name, status, project_id, created_at, average_scores + "SELECT id, name, project_id, created_at, group_id FROM evaluations - WHERE project_id = $1 AND status = 'Finished'::evaluation_job_status AND id != $2 + WHERE project_id = $1 + AND group_id = (SELECT group_id FROM evaluations WHERE id = $2) ORDER BY created_at DESC", ) .bind(project_id) - .bind(exclude_id) + .bind(current_evaluation_id) .fetch_all(pool) .await?; Ok(evaluations) } -pub async fn update_evaluation_status( - pool: &PgPool, - project_id: Uuid, - evaluation_id: Uuid, - status: EvaluationStatus, - average_scores: Option, -) -> Result { - let evaluation = sqlx::query_as::<_, Evaluation>( - "UPDATE evaluations - SET status = $3, average_scores = $4 - WHERE id = $2 AND project_id = $1 - RETURNING id, name, status, project_id, created_at, average_scores", - ) - .bind(project_id) - .bind(evaluation_id) - .bind(status) - .bind(average_scores) - .fetch_one(pool) - .await?; - - Ok(evaluation) -} - /// Record evaluation results in the database. /// /// Each target data may contain an empty JSON object, if there is no target data. pub async fn set_evaluation_results( - pool: &PgPool, + db: Arc, evaluation_id: Uuid, - statuses: &Vec, + ids: &Vec, scores: &Vec>, datas: &Vec, targets: &Vec, executor_outputs: &Vec>, trace_ids: &Vec, - error: &Vec>, ) -> Result<()> { let scores = scores .iter() @@ -170,40 +131,37 @@ pub async fn set_evaluation_results( let res = sqlx::query( r#"INSERT INTO evaluation_results ( + id, evaluation_id, - status, scores, data, target, executor_output, trace_id, - index_in_batch, - error + index_in_batch ) - SELECT - $9 as evaluation_id, - status as "status: EvaluationStatus", + SELECT + id, + $8 as evaluation_id, scores, data, target, executor_output, trace_id, - index_in_batch, - error + index_in_batch FROM - UNNEST ($1::evaluation_status[], $2::jsonb[], $3::jsonb[], $4::jsonb[], $5::jsonb[], $6::uuid[], $7::int8[], $8::jsonb[]) - AS tmp_table(status, scores, data, target, executor_output, trace_id, index_in_batch, error)"#, + UNNEST ($1::uuid[], $2::jsonb[], $3::jsonb[], $4::jsonb[], $5::jsonb[], $6::uuid[], $7::int8[]) + AS tmp_table(id, scores, data, target, executor_output, trace_id, index_in_batch)"#, ) - .bind(statuses) + .bind(ids) .bind(scores) .bind(datas) .bind(targets) .bind(executor_outputs) .bind(trace_ids) - .bind(Vec::from_iter(0..statuses.len() as i64)) - .bind(error) + .bind(Vec::from_iter(0..ids.len() as i64)) .bind(evaluation_id) - .execute(pool) + .execute(&db.pool) .await; if let Err(e) = res { @@ -222,13 +180,11 @@ pub async fn get_evaluation_results( id, created_at, evaluation_id, - status, data, target, executor_output, scores, - trace_id, - error + trace_id FROM evaluation_results WHERE evaluation_id = $1 ORDER BY created_at ASC, index_in_batch ASC NULLS FIRST", @@ -248,28 +204,6 @@ pub async fn delete_evaluation(pool: &PgPool, evaluation_id: &Uuid) -> Result<() Ok(()) } -#[derive(FromRow)] -pub struct EvaluationDatapointScores { - pub scores: Value, -} - -pub async fn get_evaluation_datapoint_scores( - pool: &PgPool, - evaluation_id: Uuid, -) -> Result> { - let scores = sqlx::query_as::<_, EvaluationDatapointScores>( - "SELECT - scores - FROM evaluation_results - WHERE evaluation_id = $1", - ) - .bind(evaluation_id) - .fetch_all(pool) - .await?; - - Ok(scores) -} - pub async fn get_evaluation_datapoint( pool: &PgPool, evaluation_result_id: Uuid, @@ -279,13 +213,11 @@ pub async fn get_evaluation_datapoint( id, created_at, evaluation_id, - status, scores, data, target, trace_id, executor_output, - error FROM evaluation_results WHERE id = $1", ) diff --git a/app-server/src/db/labels.rs b/app-server/src/db/labels.rs index 43ecb719..0b483923 100644 --- a/app-server/src/db/labels.rs +++ b/app-server/src/db/labels.rs @@ -14,7 +14,16 @@ pub enum LabelType { CATEGORICAL, } -#[derive(Serialize, sqlx::FromRow)] +#[derive(sqlx::Type, Serialize, Debug, Clone, PartialEq)] +#[sqlx(type_name = "label_source")] +pub enum LabelSource { + #[serde(rename = "Manual")] + MANUAL, + #[serde(rename = "Auto")] + AUTO, +} + +#[derive(Serialize, sqlx::FromRow, Debug, Clone)] #[serde(rename_all = "camelCase")] pub struct LabelClass { pub id: Uuid, @@ -23,10 +32,11 @@ pub struct LabelClass { pub project_id: Uuid, pub label_type: LabelType, pub value_map: Value, // Vec + pub description: Option, } // (type_id, span_id) is a unique constraint -#[derive(Serialize, sqlx::FromRow)] +#[derive(Serialize, sqlx::FromRow, Debug)] #[serde(rename_all = "camelCase")] pub struct DBSpanLabel { pub id: Uuid, @@ -35,8 +45,8 @@ pub struct DBSpanLabel { pub class_id: Uuid, pub created_at: DateTime, pub updated_at: DateTime, - pub last_updated_by: Uuid, // user_id - pub last_updated_email: String, + pub user_id: Option, + pub label_source: LabelSource, } #[derive(Serialize, sqlx::FromRow, Debug)] @@ -50,9 +60,12 @@ pub struct SpanLabel { pub label_type: LabelType, pub class_name: String, pub value_map: Value, // Vec + pub description: Option, + pub updated_at: DateTime, - pub last_updated_by: Uuid, // user_id - pub last_updated_email: String, + pub user_id: Option, + pub user_email: Option, + pub label_source: LabelSource, } pub async fn get_label_classes_by_project_id( @@ -60,7 +73,14 @@ pub async fn get_label_classes_by_project_id( project_id: Uuid, ) -> Result> { let label_classes = sqlx::query_as::<_, LabelClass>( - "SELECT id, created_at, name, project_id, label_type, value_map + "SELECT + id, + created_at, + name, + project_id, + label_type, + value_map, + description FROM label_classes WHERE project_id = $1", ) @@ -78,23 +98,53 @@ pub async fn create_label_class( project_id: Uuid, label_type: &LabelType, value_map: Vec, + description: Option, ) -> Result { let label_class = sqlx::query_as::<_, LabelClass>( - "INSERT INTO label_classes (id, name, project_id, label_type, value_map) - VALUES ($1, $2, $3, $4, $5) - RETURNING id, created_at, name, project_id, label_type, value_map", + "INSERT INTO label_classes ( + id, + name, + project_id, + label_type, + value_map, + description + ) + VALUES ($1, $2, $3, $4, $5, $6) + RETURNING id, created_at, name, project_id, label_type, value_map, description", ) .bind(id) .bind(name) .bind(project_id) .bind(label_type) .bind(serde_json::to_value(value_map).unwrap()) + .bind(description) .fetch_one(pool) .await?; Ok(label_class) } +pub async fn update_label_class( + pool: &PgPool, + project_id: Uuid, + class_id: Uuid, + description: Option, +) -> Result> { + let label_class = sqlx::query_as::<_, LabelClass>( + "UPDATE label_classes + SET description = $1 + WHERE project_id = $2 AND id = $3 + RETURNING id, created_at, name, project_id, label_type, value_map, description", + ) + .bind(description) + .bind(project_id) + .bind(class_id) + .fetch_optional(pool) + .await?; + + Ok(label_class) +} + pub async fn delete_span_label( pool: &PgPool, span_id: Uuid, @@ -103,7 +153,7 @@ pub async fn delete_span_label( let span_label = sqlx::query_as::<_, DBSpanLabel>( "DELETE FROM labels WHERE span_id = $1 AND id = $2 - RETURNING id, span_id, value, class_id, created_at, updated_at, last_updated_by, '' as last_updated_email", + RETURNING id, span_id, value, class_id, created_at, updated_at, user_id, label_source", ) .bind(span_id) .bind(class_id) @@ -117,20 +167,22 @@ pub async fn update_span_label( pool: &PgPool, span_id: Uuid, value: f64, - user_id: Uuid, + user_id: Option, class_id: Uuid, + label_source: LabelSource, ) -> Result { let span_label = sqlx::query_as::<_, DBSpanLabel>( - "INSERT INTO labels (span_id, value, class_id, last_updated_by, updated_at) - VALUES ($1, $2, $3, $4, now()) - ON CONFLICT (span_id, class_id) - DO UPDATE SET value = $2, last_updated_by = $4, updated_at = now() - RETURNING id, span_id, value, class_id, created_at, updated_at, last_updated_by, '' as last_updated_email", + "INSERT INTO labels (span_id, class_id, user_id, value, updated_at, label_source) + VALUES ($1, $2, $3, $4, now(), $5) + ON CONFLICT (span_id, class_id, user_id) + DO UPDATE SET value = $4, updated_at = now(), label_source = $5 + RETURNING id, span_id, value, class_id, created_at, updated_at, user_id, label_source", ) .bind(span_id) - .bind(value) .bind(class_id) .bind(user_id) + .bind(value) + .bind(label_source) .fetch_one(pool) .await?; @@ -146,14 +198,16 @@ pub async fn get_span_labels(pool: &PgPool, span_id: Uuid) -> Result '{user_id: filter_value}'`, this field will contain - /// Some(String::from("metadata"))`. Otherwise is `None`. - pub jsonb_column: Option, - /// The type of the value to filter on. This is used to cast the value to the correct type - pub filter_value_type: Option, } #[derive(Deserialize)] @@ -74,23 +67,11 @@ impl Filter { .into_iter() .filter_map(|value| { let filter: UrlParamFilter = serde_json::from_value(value).ok()?; - let (jsonb_column, filter_column) = if filter.column.starts_with("jsonb::") - { - let mut split = - filter.column.strip_prefix("jsonb::").unwrap().split("::"); - ( - Some(split.next().unwrap().to_string()), - split.next().unwrap().to_string(), - ) - } else { - (None, filter.column.to_string()) - }; + let filter_column = filter.column; Some(Self { filter_value: filter.value, filter_operator: FilterOperator::from_string(&filter.operator), filter_column, - jsonb_column, - filter_value_type: None, // TODO: add this to frontend? }) }) .collect(), @@ -104,14 +85,6 @@ impl Filter { pub fn validate_column(&self) -> bool { validate_sql_string(&self.filter_column) } - - pub fn validate_cast_type(&self) -> bool { - if let Some(filter_value_type) = &self.filter_value_type { - validate_sql_string(filter_value_type) - } else { - true - } - } } #[derive(Deserialize, Debug, Clone)] diff --git a/app-server/src/db/projects.rs b/app-server/src/db/projects.rs index 18a34d16..e0805223 100644 --- a/app-server/src/db/projects.rs +++ b/app-server/src/db/projects.rs @@ -1,15 +1,8 @@ use anyhow::Result; -use serde::{Deserialize, Serialize}; -use sqlx::{prelude::FromRow, PgPool}; +use sqlx::PgPool; use uuid::Uuid; -#[derive(Debug, Deserialize, Serialize, FromRow, Clone)] -#[serde(rename_all = "camelCase")] -pub struct Project { - pub id: Option, - pub name: String, - pub workspace_id: Uuid, -} +use crate::projects::Project; pub async fn get_all_projects_for_user(pool: &PgPool, user_id: &Uuid) -> Result> { let projects = sqlx::query_as::<_, Project>( @@ -47,7 +40,12 @@ pub async fn get_project(pool: &PgPool, project_id: &Uuid) -> Result { Ok(project) } -pub async fn create_project(pool: &PgPool, user_id: &Uuid, project: &Project) -> Result { +pub async fn create_project( + pool: &PgPool, + user_id: &Uuid, + name: &str, + workspace_id: Uuid, +) -> Result { // create project only if user is part of the workspace which owns the project let project = sqlx::query_as::<_, Project>( "INSERT INTO projects (name, workspace_id) @@ -61,8 +59,8 @@ pub async fn create_project(pool: &PgPool, user_id: &Uuid, project: &Project) -> RETURNING id, name, workspace_id", ) - .bind(&project.name) - .bind(project.workspace_id) + .bind(name) + .bind(workspace_id) .bind(user_id) .fetch_one(pool) .await?; diff --git a/app-server/src/db/trace.rs b/app-server/src/db/trace.rs index e6bc6cd0..32eb6882 100644 --- a/app-server/src/db/trace.rs +++ b/app-server/src/db/trace.rs @@ -4,34 +4,43 @@ use anyhow::Result; use chrono::{DateTime, TimeZone, Utc}; use regex::Regex; use serde::{Deserialize, Serialize}; -use serde_json::{json, Map, Value}; +use serde_json::{json, Value}; use sqlx::{FromRow, PgPool, Postgres, QueryBuilder}; use uuid::Uuid; use crate::{ db::modifiers::DateRange, - language_model::{ - providers::anthropic::OtelChatMessageContentPart, ChatMessage, ChatMessageContent, - }, + language_model::{ChatMessage, ChatMessageContent, InstrumentationChatMessageContentPart}, opentelemetry::opentelemetry_proto_trace_v1::Span as OtelSpan, pipeline::{nodes::Message, trace::MetaLog}, traces::{ attributes::{ ASSOCIATION_PROPERTIES_PREFIX, GEN_AI_INPUT_COST, GEN_AI_INPUT_TOKENS, GEN_AI_OUTPUT_COST, GEN_AI_OUTPUT_TOKENS, GEN_AI_REQUEST_MODEL, GEN_AI_RESPONSE_MODEL, - GEN_AI_SYSTEM, GEN_AI_TOTAL_COST, SPAN_TYPE, + GEN_AI_SYSTEM, GEN_AI_TOTAL_COST, SPAN_PATH, SPAN_TYPE, }, SpanUsage, }, }; use super::{ - modifiers::Filter, + modifiers::{Filter, FilterOperator}, utils::{add_date_range_to_query, convert_any_value_to_json_value, span_id_to_uuid}, }; const DEFAULT_VERSION: &str = "0.1.0"; +/// Helper struct to pass current trace info, if exists, if pipeline is called from remote trace context +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CurrentTraceAndSpan { + trace_id: Uuid, + parent_span_id: Uuid, + // Optional for backwards compatibility + #[serde(default)] + parent_span_path: Option, +} + #[derive(sqlx::Type, Deserialize, Serialize, PartialEq, Clone, Debug, Default)] #[sqlx(type_name = "span_type")] pub enum SpanType { @@ -105,7 +114,7 @@ pub struct Trace { #[derive(Serialize, sqlx::FromRow)] #[serde(rename_all = "camelCase")] -pub struct TraceWithEvents { +pub struct TraceWithParentSpanAndEvents { id: Uuid, start_time: DateTime, end_time: Option>, @@ -121,6 +130,15 @@ pub struct TraceWithEvents { cost: f64, success: bool, project_id: Uuid, + + // TODO: maybe use `Span` struct with `sqlx::flatten`, + // after figuring out how to handle start_time and end_time + // (which are both in `spans` and `traces` tables) + parent_span_input: Option, + parent_span_output: Option, + parent_span_name: Option, + parent_span_type: Option, + // 'events' is a list of partial event objects, using Option because of Coalesce events: Option, } @@ -192,26 +210,9 @@ pub struct Span { pub span_type: SpanType, pub start_time: DateTime, pub end_time: DateTime, - pub events: Option, } -/// List of available fields on the span. This is needed for "export to dataset query" -/// so frontend can specify which fields to include in the dataset -#[derive(Deserialize)] -#[serde(rename_all = "camelCase")] -pub enum SpanField { - SpanId, - Name, - ParentSpanId, - TraceId, - StartTime, - EndTime, - Input, - Output, - SpanType, -} - pub struct SpanAttributes { pub attributes: HashMap, } @@ -306,7 +307,7 @@ impl SpanAttributes { serde_json::from_value::(span_type.clone()).unwrap_or_default() } else { // quick hack until we figure how to set span type on auto-instrumentation - if self.attributes.contains_key("gen_ai.system") { + if self.attributes.contains_key(GEN_AI_SYSTEM) { SpanType::LLM } else { SpanType::DEFAULT @@ -314,6 +315,12 @@ impl SpanAttributes { } } + pub fn path(&self) -> Option { + self.attributes + .get(SPAN_PATH) + .and_then(|p| p.as_str().map(|s| s.to_string())) + } + pub fn set_usage(&mut self, usage: &SpanUsage) { self.attributes .insert(GEN_AI_INPUT_TOKENS.to_string(), json!(usage.prompt_tokens)); @@ -341,6 +348,26 @@ impl SpanAttributes { .insert(GEN_AI_SYSTEM.to_string(), json!(provider_name)); } } + + /// Extend the span path. + /// + /// This is a hack which helps not to change traceloop auto-instrumentation code. It will + /// modify autoinstrumented LLM spans to have correct exact span path. + /// + /// NOTE: It may not work very precisely for Langchain auto-instrumentation, since it contains + /// nested spans within autoinstrumentation. Also, it may not work for autoinstrumented vector DB calls. + pub fn extend_span_path(&mut self, span_name: &str) { + if self.attributes.contains_key(GEN_AI_SYSTEM) { + match self.attributes.get(SPAN_PATH) { + Some(serde_json::Value::String(path)) => { + let span_path = format!("{}.{}", path, span_name); + self.attributes + .insert(SPAN_PATH.to_string(), Value::String(span_path)); + } + _ => {} + } + } + } } impl Span { @@ -424,13 +451,15 @@ impl Span { input_messages.push(ChatMessage { role, - content: serde_json::from_str::>(&content) - .map(|parts| { - ChatMessageContent::ContentPartList( - parts.into_iter().map(|part| part.into()).collect(), - ) - }) - .unwrap_or(ChatMessageContent::Text(content.clone())), + content: serde_json::from_str::>( + &content, + ) + .map(|parts| { + ChatMessageContent::ContentPartList( + parts.into_iter().map(|part| part.into()).collect(), + ) + }) + .unwrap_or(ChatMessageContent::Text(content.clone())), }); i += 1; } @@ -461,13 +490,24 @@ impl Span { } pub fn create_parent_span_in_run_trace( - trace_id: Uuid, + current_trace_and_span: Option, run_stats: &crate::pipeline::trace::RunTraceStats, - parent_span_id: Option, name: &String, messages: &HashMap, trace_type: TraceType, ) -> Self { + // First, process current active context (current_trace_and_span) + // If there is both active trace and span, use them. Otherwise, create new trace id and None for parent span id. + let trace_id = current_trace_and_span + .as_ref() + .map(|t| t.trace_id) + .unwrap_or_else(Uuid::new_v4); + let parent_span_id = current_trace_and_span.as_ref().map(|t| t.parent_span_id); + let parent_span_path = current_trace_and_span + .as_ref() + .map(|t| t.parent_span_path.clone()) + .flatten(); + let mut inputs = HashMap::new(); let mut outputs = HashMap::new(); messages @@ -481,11 +521,19 @@ impl Span { } _ => (), }); + + let path = if let Some(parent_span_path) = parent_span_path { + format!("{}.{}", parent_span_path, name) + } else { + name.clone() + }; let mut attributes = HashMap::new(); attributes.insert( format!("{ASSOCIATION_PROPERTIES_PREFIX}trace_type",), - trace_type, + json!(trace_type), ); + attributes.insert(SPAN_PATH.to_string(), json!(path)); + Self { span_id: Uuid::new_v4(), start_time: run_stats.start_time, @@ -502,14 +550,27 @@ impl Span { } } + /// Create spans from messages. + /// + /// At this point, the whole pipeline run acts as a parent span. + /// So trace id, parent span id, and parent span path are all not None. pub fn from_messages( messages: &HashMap, trace_id: Uuid, parent_span_id: Uuid, + parent_span_path: String, ) -> Vec { messages .iter() .filter_map(|(msg_id, message)| { + if !(message.node_type.as_str() == "LLM" + || message.node_type.as_str() == "SemanticSearch") + { + return None; + } + + let span_path = format!("{}.{}", parent_span_path, message.node_name); + let input_values = message .input_message_ids .iter() @@ -529,7 +590,7 @@ impl Span { trace_id, parent_span_id: Some(parent_span_id), name: message.node_name.clone(), - attributes: span_attributes_from_meta_log(message.meta_log.clone()), + attributes: span_attributes_from_data(message.meta_log.clone(), span_path), input: Some(serde_json::to_value(input_values).unwrap()), output: Some(message.value.clone().into()), span_type: match message.node_type.as_str() { @@ -538,56 +599,35 @@ impl Span { }, events: None, }; - match message.node_type.as_str() { - "LLM" | "SemanticSearch" => Some(span), - _ => None, - } + Some(span) }) .collect() } +} - pub fn to_json_value(&self, fields: &Vec) -> Value { - if fields.is_empty() { - return Value::Object(Map::new()); - } - - let mut json_map = Map::new(); - - for field in fields { - match field { - SpanField::SpanId => json_map.insert("spanId".to_string(), json!(self.span_id)), - SpanField::Name => json_map.insert("name".to_string(), json!(self.name)), - SpanField::ParentSpanId => { - json_map.insert("parentSpanId".to_string(), json!(self.parent_span_id)) - } - SpanField::TraceId => json_map.insert("traceId".to_string(), json!(self.trace_id)), - SpanField::StartTime => { - json_map.insert("startTime".to_string(), json!(self.start_time)) - } - SpanField::EndTime => json_map.insert("endTime".to_string(), json!(self.end_time)), - SpanField::Input => json_map.insert("input".to_string(), json!(self.input)), - SpanField::Output => json_map.insert("output".to_string(), json!(self.output)), - SpanField::SpanType => { - json_map.insert("spanType".to_string(), json!(self.span_type)) - } - }; - } +fn span_attributes_from_data(meta_log: Option, span_path: String) -> Value { + let mut attributes = HashMap::new(); - Value::Object(json_map) + if let Some(MetaLog::LLM(llm_log)) = meta_log { + attributes.insert( + GEN_AI_INPUT_TOKENS.to_string(), + json!(llm_log.input_token_count), + ); + attributes.insert( + GEN_AI_OUTPUT_TOKENS.to_string(), + json!(llm_log.output_token_count), + ); + attributes.insert(GEN_AI_RESPONSE_MODEL.to_string(), json!(llm_log.model)); + attributes.insert(GEN_AI_SYSTEM.to_string(), json!(llm_log.provider)); + attributes.insert( + GEN_AI_TOTAL_COST.to_string(), + json!(llm_log.approximate_cost), + ); } -} -fn span_attributes_from_meta_log(meta_log: Option) -> Value { - match meta_log { - Some(MetaLog::LLM(llm_log)) => serde_json::json!({ - GEN_AI_INPUT_TOKENS: llm_log.input_token_count, - GEN_AI_OUTPUT_TOKENS: llm_log.output_token_count, - GEN_AI_RESPONSE_MODEL: llm_log.model, - GEN_AI_SYSTEM: llm_log.provider, - GEN_AI_TOTAL_COST: llm_log.approximate_cost, - }), - _ => serde_json::json!({}), - } + attributes.insert(SPAN_PATH.to_string(), json!(span_path)); + + serde_json::to_value(attributes).unwrap() } #[derive(Serialize)] @@ -724,57 +764,7 @@ pub async fn record_span(pool: &PgPool, span: &Span) -> Result<()> { pub fn add_traces_info_expression( query: &mut QueryBuilder, date_range: &Option, -) -> Result<()> { - query.push( - " - traces_info( - id, - start_time, - end_time, - version, - release, - user_id, - session_id, - metadata, - project_id, - total_token_count, - cost, - success, - trace_type, - latency, - status - ) AS ( - SELECT - t.id, - t.start_time, - t.end_time, - t.version, - t.release, - t.user_id, - t.session_id, - t.metadata, - t.project_id, - t.total_token_count, - t.cost, - t.success, - t.trace_type, - EXTRACT(EPOCH FROM (t.end_time - t.start_time)), - CASE WHEN t.success = true THEN 'Success' ELSE 'Failed' END - FROM traces t - WHERE start_time IS NOT NULL AND end_time IS NOT NULL ", - ); - - add_date_range_to_query(query, date_range, "t.start_time", Some("t.end_time"))?; - - query.push(")"); - - Ok(()) -} - -pub fn add_traces_info_filtered_by_text( - query: &mut QueryBuilder, - date_range: &Option, - text_search_filter: String, + text_search_filter: Option, project_id: Uuid, ) -> Result<()> { query @@ -795,10 +785,12 @@ pub fn add_traces_info_filtered_by_text( traces.cost, traces.success, traces.trace_type, + spans.parent_span_id, spans.name as span_name, spans.attributes as span_attributes, spans.input as span_input, - spans.output as span_output + spans.output as span_output, + spans.span_type FROM spans JOIN @@ -818,8 +810,19 @@ pub fn add_traces_info_filtered_by_text( )?; query.push(" ),"); + query.push( + "parent_span AS ( + SELECT + span_input, + span_output, + span_name, + span_type, + id trace_id + FROM spans_with_trace + WHERE parent_span_id IS NULL + ),", + ); - // After pushing materialized CTE, we need to push the traces_info CTE query.push( " traces_info AS ( @@ -836,20 +839,24 @@ pub fn add_traces_info_filtered_by_text( total_token_count, cost, success, - EXTRACT(EPOCH FROM (end_time - start_time)) as latency - FROM spans_with_trace st WHERE ", + trace_type, + EXTRACT(EPOCH FROM (end_time - start_time)) as latency, + CASE WHEN success = true THEN 'Success' ELSE 'Failed' END status + FROM spans_with_trace st WHERE 1=1", ); - query - .push("(st.span_input::TEXT ILIKE ") - .push_bind(format!("%{}%", &text_search_filter)) - .push(" OR st.span_output::TEXT ILIKE ") - .push_bind(format!("%{}%", &text_search_filter)) - .push(" OR st.span_name::TEXT ILIKE ") - .push_bind(format!("%{}%", &text_search_filter)) - .push(" OR st.span_attributes::TEXT ILIKE ") - .push_bind(format!("%{}%", &text_search_filter)) - .push(")"); + if let Some(text_search_filter) = text_search_filter { + query + .push("AND (st.span_input::TEXT ILIKE ") + .push_bind(format!("%{text_search_filter}%")) + .push(" OR st.span_output::TEXT ILIKE ") + .push_bind(format!("%{text_search_filter}%")) + .push(" OR st.span_name::TEXT ILIKE ") + .push_bind(format!("%{text_search_filter}%")) + .push(" OR st.span_attributes::TEXT ILIKE ") + .push_bind(format!("%{text_search_filter}%")) + .push(")"); + } query.push(")"); @@ -887,23 +894,27 @@ fn add_filters_to_traces_query(query: &mut QueryBuilder, filters: &Opt log::warn!("Invalid column name: {}", filter.filter_column); return; } - query.push(" AND "); - if let Some(jsonb_prefix) = &filter.jsonb_column { - if &filter.filter_column == "event" && jsonb_prefix == "events" { - // temporary hack to allow for includes queries. - // Front-end hackily sends the array of `{"typeName": "my_event_name"}` objects - // as a stringified JSON array, which we parse here. - query.push("trace_events.events @> ").push_bind( - serde_json::from_str::(filter_value_str.as_str()).unwrap(), - ); - return; - } - let mut arg = HashMap::new(); - arg.insert(&filter.filter_column, &filter.filter_value); - let arg = serde_json::to_value(arg).unwrap(); - query.push(jsonb_prefix).push(" @> ").push_bind(arg); + if filter.filter_column.starts_with("event.") { + let template_name = filter.filter_column.strip_prefix("event.").unwrap(); + filter_by_event_value( + query, + template_name.to_string(), + filter.filter_operator.clone(), + filter.filter_value.clone(), + ); return; } + if filter.filter_column.starts_with("label.") { + let label_name = filter.filter_column.strip_prefix("label.").unwrap(); + filter_by_span_label_value( + query, + label_name.to_string(), + filter.filter_operator.clone(), + filter.filter_value.clone(), + ); + return; + } + query.push(" AND "); query.push(&filter.filter_column); query.push(filter.filter_operator.to_sql_operator()); if ["id"] @@ -916,19 +927,59 @@ fn add_filters_to_traces_query(query: &mut QueryBuilder, filters: &Opt .any(|col| col == &filter.filter_column.as_str()) { query.push_bind(filter_value_str.parse::().unwrap_or_default()); + } else if filter.filter_column == "trace_type" { + query.push_bind(filter_value_str); + query.push("::trace_type"); } else { query.push_bind(filter_value_str); } - - if filter.filter_value_type.is_some() && filter.validate_cast_type() { - query - .push("::") - .push(&filter.filter_value_type.clone().unwrap()); - } }); } } +fn filter_by_event_value( + query: &mut QueryBuilder, + template_name: String, + filter_operator: FilterOperator, + event_value: Value, +) { + query.push( + " AND id IN + (SELECT trace_id + FROM spans + JOIN events ON spans.span_id = events.span_id + JOIN event_templates ON events.template_id = event_templates.id + WHERE event_templates.name = + ", + ); + query.push_bind(template_name); + query.push(" AND events.value "); + query.push(filter_operator.to_sql_operator()); + query.push_bind(event_value); + query.push("::jsonb)"); +} + +fn filter_by_span_label_value( + query: &mut QueryBuilder, + label_name: String, + filter_operator: FilterOperator, + label_value: Value, +) { + query.push( + " AND id IN + (SELECT trace_id + FROM spans + JOIN labels ON spans.span_id = labels.span_id + JOIN label_classes ON labels.class_id = label_classes.id + WHERE label_classes.name = ", + ); + query.push_bind(label_name); + query.push(" AND label_classes.value_map ->> labels.value::int4 "); + query.push(filter_operator.to_sql_operator()); + query.push_bind(label_value); + query.push("::text)"); +} + /// Queries traces for a project which match the given filters, with given limit and offset pub async fn get_traces( pool: &PgPool, @@ -938,17 +989,12 @@ pub async fn get_traces( filters: &Option>, date_range: &Option, text_search_filter: Option, -) -> Result> { +) -> Result> { let mut query = QueryBuilder::::new("WITH "); - if let Some(text_search_filter) = text_search_filter { - add_traces_info_filtered_by_text(&mut query, date_range, text_search_filter, project_id)?; - } else { - add_traces_info_expression(&mut query, date_range)?; - }; + add_traces_info_expression(&mut query, date_range, text_search_filter, project_id)?; query.push(", "); query.push(TRACE_EVENTS_EXPRESSION); - // Filtering by project id may be redundant in case of text search filter, but ok for now for simplicity query.push( " SELECT @@ -964,9 +1010,15 @@ pub async fn get_traces( total_token_count, cost, success, - COALESCE(trace_events.events, '[]'::jsonb) AS events + COALESCE(trace_events.events, '[]'::jsonb) AS events, + status, + parent_span.span_input as parent_span_input, + parent_span.span_output as parent_span_output, + parent_span.span_name as parent_span_name, + parent_span.span_type as parent_span_type FROM traces_info LEFT JOIN trace_events ON trace_events.trace_id = traces_info.id + JOIN parent_span ON parent_span.trace_id = traces_info.id WHERE project_id = ", ); query.push_bind(project_id); @@ -980,7 +1032,7 @@ pub async fn get_traces( .push_bind(limit as i64); let traces = query - .build_query_as::<'_, TraceWithEvents>() + .build_query_as::<'_, TraceWithParentSpanAndEvents>() .fetch_all(pool) .await?; @@ -996,16 +1048,7 @@ pub async fn count_traces( text_search_filter: Option, ) -> Result { let mut base_query = QueryBuilder::::new("WITH "); - if let Some(text_search_filter) = text_search_filter { - add_traces_info_filtered_by_text( - &mut base_query, - date_range, - text_search_filter, - project_id, - )?; - } else { - add_traces_info_expression(&mut base_query, date_range)?; - }; + add_traces_info_expression(&mut base_query, date_range, text_search_filter, project_id)?; base_query.push(", "); base_query.push(TRACE_EVENTS_EXPRESSION); base_query.push( diff --git a/app-server/src/db/workspace.rs b/app-server/src/db/workspace.rs index ef742014..9208c313 100644 --- a/app-server/src/db/workspace.rs +++ b/app-server/src/db/workspace.rs @@ -2,10 +2,10 @@ use serde::{Deserialize, Serialize}; use sqlx::{FromRow, PgPool}; use uuid::Uuid; -use super::projects::Project; use super::stats::create_usage_stats_for_workspace; +use crate::projects::Project; -#[derive(Debug, Deserialize, Serialize, FromRow)] +#[derive(Deserialize, Serialize, FromRow)] #[serde(rename_all = "camelCase")] pub struct Workspace { pub id: Uuid, @@ -14,7 +14,6 @@ pub struct Workspace { pub is_free_tier: bool, } -// create an error type with multiple variants #[derive(thiserror::Error, Debug)] pub enum WorkspaceError { #[error("User with email {0} not found")] @@ -23,15 +22,9 @@ pub enum WorkspaceError { NotAllowed, #[error("{0}")] UnhandledError(#[from] anyhow::Error), - #[error("Hit limit of maximum {entity:?}: {limit:?}, current usage: {usage:?}")] - LimitReached { - entity: String, - limit: i64, - usage: i64, - }, } -#[derive(Debug, Serialize)] +#[derive(Serialize)] #[serde(rename_all = "camelCase")] pub struct WorkspaceWithProjects { pub id: Uuid, @@ -100,7 +93,7 @@ pub async fn get_owned_workspaces(pool: &PgPool, user_id: &Uuid) -> anyhow::Resu FROM workspaces JOIN subscription_tiers on workspaces.tier_id = subscription_tiers.id - WHERE id IN ( + WHERE workspaces.id IN ( SELECT workspace_id FROM members_of_workspaces WHERE user_id = $1 AND member_role = 'owner'::workspace_role diff --git a/app-server/src/evaluations/mod.rs b/app-server/src/evaluations/mod.rs index 9d34677f..b5614dd8 100644 --- a/app-server/src/evaluations/mod.rs +++ b/app-server/src/evaluations/mod.rs @@ -1 +1 @@ -pub mod stats; +pub mod utils; diff --git a/app-server/src/evaluations/stats.rs b/app-server/src/evaluations/stats.rs deleted file mode 100644 index 6622286a..00000000 --- a/app-server/src/evaluations/stats.rs +++ /dev/null @@ -1,36 +0,0 @@ -use std::collections::HashMap; - -use crate::db::evaluations::EvaluationDatapointScores; - -pub fn calculate_average_scores( - datapoint_scores: Vec, -) -> HashMap { - let mut values_per_score = HashMap::>::new(); - for score in datapoint_scores { - let score: HashMap = serde_json::from_value(score.scores).unwrap_or_default(); - for (name, value) in score { - values_per_score - .entry(name) - .and_modify(|values| { - values.push(value); - }) - .or_insert(vec![value]); - } - } - - // Map from score name to average value - let averages = values_per_score - .into_iter() - .map(|(name, values)| { - let length = values.len(); - let mean = if length == 0 { - 0.0 - } else { - values.iter().sum::() / length as f64 - }; - (name, mean) - }) - .collect::>(); - - averages -} diff --git a/app-server/src/evaluations/utils.rs b/app-server/src/evaluations/utils.rs new file mode 100644 index 00000000..8fcfb6fa --- /dev/null +++ b/app-server/src/evaluations/utils.rs @@ -0,0 +1,59 @@ +use std::collections::HashMap; + +use serde::Deserialize; +use serde_json::Value; +use uuid::Uuid; + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct EvaluationDatapointResult { + pub data: Value, + pub target: Value, + pub executor_output: Option, + #[serde(default)] + pub trace_id: Uuid, + pub scores: HashMap, +} + +pub struct DatapointColumns { + pub datas: Vec, + pub targets: Vec, + pub executor_outputs: Vec>, + pub trace_ids: Vec, + pub scores: Vec>, +} + +pub fn get_columns_from_points(points: &Vec) -> DatapointColumns { + let datas = points + .iter() + .map(|point| point.data.clone()) + .collect::>(); + + let targets = points + .iter() + .map(|point| point.target.clone()) + .collect::>(); + + let executor_outputs = points + .iter() + .map(|point| point.executor_output.clone()) + .collect::>(); + + let scores = points + .iter() + .map(|point| point.scores.clone()) + .collect::>(); + + let trace_ids = points + .iter() + .map(|point| point.trace_id) + .collect::>(); + + DatapointColumns { + datas, + targets, + executor_outputs, + trace_ids, + scores, + } +} diff --git a/app-server/src/language_model/chat_message.rs b/app-server/src/language_model/chat_message.rs index 38cfa0b0..16c72c6b 100644 --- a/app-server/src/language_model/chat_message.rs +++ b/app-server/src/language_model/chat_message.rs @@ -1,10 +1,13 @@ use serde::{Deserialize, Serialize}; +use super::providers::openai::OpenAIImageUrl; + #[derive(Debug, Serialize, Deserialize, Clone, sqlx::FromRow, PartialEq)] pub struct ChatMessageText { pub text: String, } +/// Chat message image url #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] #[serde(rename_all = "camelCase")] pub struct ChatMessageImageUrl { @@ -44,21 +47,6 @@ pub struct ChatMessage { pub content: ChatMessageContent, } -#[derive(Debug, Deserialize)] -pub struct ChatChunkDelta { - pub content: Option, -} - -#[derive(Debug, Deserialize)] -pub struct ChatChunkChoice { - pub delta: ChatChunkDelta, -} - -#[derive(Debug, Deserialize)] -pub struct ChatCompletionChunk { - pub choices: Vec, -} - #[derive(Debug, Deserialize)] pub struct ChatChoice { message: ChatMessage, @@ -121,3 +109,58 @@ pub struct ChatUsage { #[serde(default)] pub approximate_cost: Option, } + +#[derive(Deserialize)] +pub struct InstrumentationChatMessageImageSource { + media_type: String, + data: String, +} + +#[derive(Deserialize)] +pub struct InstrumentationChatMessageImage { + source: InstrumentationChatMessageImageSource, +} + +#[derive(Deserialize)] +#[serde(untagged)] +pub enum InstrumentationImageUrl { + // TODO: Add support for other providers + OpenAIImageUrl(OpenAIImageUrl), +} + +/// Struct to decode any kind of chat message content part from automatic instrumentation by Traceloop +/// +/// ImageUrl contains different kinds of imageurls generated by autoinstrumentation. +#[derive(Deserialize)] +#[serde(tag = "type")] +pub enum InstrumentationChatMessageContentPart { + #[serde(rename = "text")] + Text(ChatMessageText), + #[serde(rename = "image_url")] + ImageUrl(InstrumentationImageUrl), + #[serde(rename = "image")] + Image(InstrumentationChatMessageImage), +} + +impl Into for InstrumentationChatMessageContentPart { + fn into(self) -> ChatMessageContentPart { + match self { + InstrumentationChatMessageContentPart::Text(text) => ChatMessageContentPart::Text(text), + InstrumentationChatMessageContentPart::ImageUrl(image_url) => match image_url { + InstrumentationImageUrl::OpenAIImageUrl(image_url) => { + ChatMessageContentPart::ImageUrl(ChatMessageImageUrl { + url: image_url.image_url.url, + detail: image_url.image_url.detail, + }) + } + }, + InstrumentationChatMessageContentPart::Image(image) => { + let source = image.source; + ChatMessageContentPart::Image(ChatMessageImage { + media_type: source.media_type, + data: source.data, + }) + } + } + } +} diff --git a/app-server/src/language_model/providers/anthropic.rs b/app-server/src/language_model/providers/anthropic.rs index cd189550..ae1c86da 100644 --- a/app-server/src/language_model/providers/anthropic.rs +++ b/app-server/src/language_model/providers/anthropic.rs @@ -4,8 +4,7 @@ use crate::language_model::chat_message::{ChatChoice, ChatCompletion, ChatMessag use crate::language_model::providers::utils::calculate_cost; use crate::language_model::runner::ExecuteChatCompletion; use crate::language_model::{ - ChatMessageContent, ChatMessageContentPart, ChatMessageImage, ChatMessageImageUrl, - ChatMessageText, LanguageModelProviderName, NodeInfo, + ChatMessageContent, ChatMessageContentPart, LanguageModelProviderName, NodeInfo, }; use crate::pipeline::nodes::{NodeStreamChunk, StreamChunk}; use anyhow::Result; @@ -16,46 +15,6 @@ use serde::{Deserialize, Serialize}; use serde_json::{json, Value}; use tokio::sync::mpsc::Sender; -#[derive(Deserialize)] -pub struct OtelChatMessageImageSource { - media_type: String, - data: String, -} - -#[derive(Deserialize)] -pub struct OtelChatMessageImage { - source: OtelChatMessageImageSource, -} - -#[derive(Deserialize)] -#[serde(tag = "type")] -pub enum OtelChatMessageContentPart { - #[serde(rename = "text")] - Text(ChatMessageText), - #[serde(rename = "image_url")] - ImageUrl(ChatMessageImageUrl), - #[serde(rename = "image")] - Image(OtelChatMessageImage), -} - -impl Into for OtelChatMessageContentPart { - fn into(self) -> ChatMessageContentPart { - match self { - OtelChatMessageContentPart::Text(text) => ChatMessageContentPart::Text(text), - OtelChatMessageContentPart::ImageUrl(image_url) => { - ChatMessageContentPart::ImageUrl(image_url) - } - OtelChatMessageContentPart::Image(image) => { - let source = image.source; - ChatMessageContentPart::Image(ChatMessageImage { - media_type: source.media_type, - data: source.data, - }) - } - } - } -} - #[derive(Clone, Debug)] pub struct Anthropic { client: reqwest::Client, @@ -217,7 +176,6 @@ impl ExecuteChatCompletion for Anthropic { let json_user_message = to_value(&user_message)?; body["messages"] = serde_json::json!(vec![json_user_message]); } else { - // Assume message content enum->String will be serialized as string body["system"] = serde_json::json!(messages[0].content); let messages = messages diff --git a/app-server/src/language_model/providers/openai.rs b/app-server/src/language_model/providers/openai.rs index 278bc5e4..a9c8574e 100644 --- a/app-server/src/language_model/providers/openai.rs +++ b/app-server/src/language_model/providers/openai.rs @@ -33,6 +33,18 @@ impl OpenAI { } } +#[derive(Deserialize)] +pub struct ImageUrl { + pub url: String, + #[serde(default)] + pub detail: Option, +} + +#[derive(Deserialize)] +pub struct OpenAIImageUrl { + pub image_url: ImageUrl, +} + #[derive(Debug, Deserialize)] struct OpenAIChatCompletion { choices: Vec, @@ -203,11 +215,14 @@ impl ExecuteChatCompletion for OpenAI { let messages = if is_o1 { &messages .iter() - .filter_map(|message| { - if message.role != "system" { - Some(message.clone()) + .map(|message| { + if message.role == "system" { + ChatMessage { + role: "user".to_string(), + content: message.content.clone(), + } } else { - None + message.clone() } }) .collect() diff --git a/app-server/src/main.rs b/app-server/src/main.rs index 4aa9a544..cdd41c26 100644 --- a/app-server/src/main.rs +++ b/app-server/src/main.rs @@ -56,6 +56,7 @@ mod language_model; mod names; mod opentelemetry; mod pipeline; +mod projects; mod routes; mod runtime; mod semantic_search; @@ -63,7 +64,7 @@ mod traces; const DEFAULT_CACHE_SIZE: u64 = 100; // entries -pub fn tonic_error_to_io_error(err: tonic::transport::Error) -> io::Error { +fn tonic_error_to_io_error(err: tonic::transport::Error) -> io::Error { io::Error::new(io::ErrorKind::Other, err) } @@ -103,8 +104,7 @@ fn main() -> anyhow::Result<()> { Arc::new(MokaCache::new(DEFAULT_CACHE_SIZE)); caches.insert(TypeId::of::(), pipeline_version_cache); - let cache = Cache::new(caches); - let cache = Arc::new(cache); + let cache = Arc::new(Cache::new(caches)); let db_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set"); @@ -239,6 +239,8 @@ fn main() -> anyhow::Result<()> { .await .unwrap(); + let name_generator = Arc::new(NameGenerator::new()); + HttpServer::new(move || { let auth = HttpAuthentication::bearer(auth::validator); let project_auth = HttpAuthentication::bearer(auth::project_validator); @@ -260,8 +262,6 @@ fn main() -> anyhow::Result<()> { clickhouse.clone(), )); - let name_generator = Arc::new(NameGenerator::new()); - App::new() .wrap(Logger::default()) .wrap(NormalizePath::trim()) @@ -287,8 +287,6 @@ fn main() -> anyhow::Result<()> { .service(api::v1::pipelines::run_pipeline_graph) .service(api::v1::traces::get_events_for_session) .service(api::v1::evaluations::create_evaluation) - .service(api::v1::evaluations::upload_evaluation_datapoints) - .service(api::v1::evaluations::update_evaluation) .service(api::v1::metrics::process_metrics) .service(api::v1::traces::process_traces) .app_data(PayloadConfig::new(10 * 1024 * 1024)), @@ -356,7 +354,6 @@ fn main() -> anyhow::Result<()> { .service(routes::pipelines::get_pipeline_version) .service(routes::pipelines::get_version) .service(routes::pipelines::get_templates) - .service(routes::pipelines::create_template) .service(routes::pipelines::run_pipeline_interrupt_graph) .service(routes::pipelines::update_target_pipeline_version) .service(routes::api_keys::create_project_api_key) @@ -365,7 +362,10 @@ fn main() -> anyhow::Result<()> { .service(routes::evaluations::get_evaluation) .service(routes::evaluations::delete_evaluation) .service(routes::evaluations::get_evaluation_datapoint) - .service(routes::evaluations::get_evaluation_stats) + .service(routes::evaluations::get_evaluation_score_stats) + .service( + routes::evaluations::get_evaluation_score_distribution, + ) .service(routes::datasets::get_datasets) .service(routes::datasets::create_dataset) .service(routes::datasets::get_dataset) @@ -379,20 +379,17 @@ fn main() -> anyhow::Result<()> { .service(routes::datasets::delete_all_datapoints) .service(routes::datasets::index_dataset) .service(routes::evaluations::get_evaluations) - .service(routes::evaluations::get_finished_evaluation_infos) .service(routes::evaluations::get_evaluation) .service(routes::evaluations::get_evaluation_datapoint) .service(routes::traces::get_traces) .service(routes::traces::get_single_trace) .service(routes::traces::get_single_span) - .service(routes::traces::get_trace_id_for_span) .service(routes::traces::get_sessions) .service(routes::labels::create_label_class) .service(routes::labels::get_label_types) .service(routes::labels::get_span_labels) .service(routes::labels::update_span_label) .service(routes::labels::delete_span_label) - .service(routes::traces::export_span) .service(routes::events::get_event_templates) .service(routes::events::get_event_template) .service(routes::events::update_event_template) diff --git a/app-server/src/opentelemetry/mod.rs b/app-server/src/opentelemetry/mod.rs index c63fb49d..37329e20 100644 --- a/app-server/src/opentelemetry/mod.rs +++ b/app-server/src/opentelemetry/mod.rs @@ -1,3 +1,4 @@ +// This file is @generated by prost-build. pub mod opentelemetry { pub mod proto { pub mod collector { diff --git a/app-server/src/opentelemetry/opentelemetry.proto.collector.trace.v1.rs b/app-server/src/opentelemetry/opentelemetry.proto.collector.trace.v1.rs index 80c59b64..6bfa29bc 100644 --- a/app-server/src/opentelemetry/opentelemetry.proto.collector.trace.v1.rs +++ b/app-server/src/opentelemetry/opentelemetry.proto.collector.trace.v1.rs @@ -1,4 +1,4 @@ -#[allow(clippy::derive_partial_eq_without_eq)] +// This file is @generated by prost-build. #[derive(Clone, PartialEq, ::prost::Message)] pub struct ExportTraceServiceRequest { /// An array of ResourceSpans. @@ -11,7 +11,6 @@ pub struct ExportTraceServiceRequest { super::super::super::super::super::opentelemetry_proto_trace_v1::ResourceSpans, >, } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ExportTraceServiceResponse { /// The details of a partially successful export request. @@ -32,7 +31,6 @@ pub struct ExportTraceServiceResponse { #[prost(message, optional, tag = "1")] pub partial_success: ::core::option::Option, } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ExportTracePartialSuccess { /// The number of rejected spans. @@ -53,38 +51,49 @@ pub struct ExportTracePartialSuccess { } /// Generated server implementations. pub mod trace_service_server { - #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with TraceServiceServer. #[async_trait] - pub trait TraceService: Send + Sync + 'static { + pub trait TraceService: std::marker::Send + std::marker::Sync + 'static { /// For performance reasons, it is recommended to keep this RPC /// alive for the entire life of the application. async fn export( &self, request: tonic::Request, - ) -> Result, tonic::Status>; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; } /// Service that can be used to push spans between one Application instrumented with /// OpenTelemetry and a collector, or between a collector and a central collector (in this /// case spans are sent/received to/from multiple Applications). #[derive(Debug)] - pub struct TraceServiceServer { - inner: _Inner, + pub struct TraceServiceServer { + inner: Arc, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, + max_decoding_message_size: Option, + max_encoding_message_size: Option, } - struct _Inner(Arc); - impl TraceServiceServer { + impl TraceServiceServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { - let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), send_compression_encodings: Default::default(), + max_decoding_message_size: None, + max_encoding_message_size: None, } } pub fn with_interceptor( @@ -108,12 +117,28 @@ pub mod trace_service_server { self.send_compression_encodings.enable(encoding); self } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.max_decoding_message_size = Some(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.max_encoding_message_size = Some(limit); + self + } } impl tonic::codegen::Service> for TraceServiceServer where T: TraceService, - B: Body + Send + 'static, - B::Error: Into + Send + 'static, + B: Body + std::marker::Send + 'static, + B::Error: Into + std::marker::Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -121,11 +146,10 @@ pub mod trace_service_server { fn poll_ready( &mut self, _cx: &mut Context<'_>, - ) -> Poll> { + ) -> Poll> { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { - let inner = self.inner.clone(); match req.uri().path() { "/opentelemetry.proto.collector.trace.v1.TraceService/Export" => { #[allow(non_camel_case_types)] @@ -143,22 +167,29 @@ pub mod trace_service_server { &mut self, request: tonic::Request, ) -> Self::Future { - let inner = self.0.clone(); - let fut = async move { (*inner).export(request).await }; + let inner = Arc::clone(&self.0); + let fut = async move { + ::export(&inner, request).await + }; Box::pin(fut) } } let accept_compression_encodings = self.accept_compression_encodings; let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { - let inner = inner.0; let method = ExportSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) .apply_compression_config( accept_compression_encodings, send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, ); let res = grpc.unary(method, req).await; Ok(res) @@ -167,40 +198,39 @@ pub mod trace_service_server { } _ => { Box::pin(async move { - Ok( - http::Response::builder() - .status(200) - .header("grpc-status", "12") - .header("content-type", "application/grpc") - .body(empty_body()) - .unwrap(), - ) + let mut response = http::Response::new(empty_body()); + let headers = response.headers_mut(); + headers + .insert( + tonic::Status::GRPC_STATUS, + (tonic::Code::Unimplemented as i32).into(), + ); + headers + .insert( + http::header::CONTENT_TYPE, + tonic::metadata::GRPC_CONTENT_TYPE, + ); + Ok(response) }) } } } } - impl Clone for TraceServiceServer { + impl Clone for TraceServiceServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { inner, accept_compression_encodings: self.accept_compression_encodings, send_compression_encodings: self.send_compression_encodings, + max_decoding_message_size: self.max_decoding_message_size, + max_encoding_message_size: self.max_encoding_message_size, } } } - impl Clone for _Inner { - fn clone(&self) -> Self { - Self(self.0.clone()) - } - } - impl std::fmt::Debug for _Inner { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{:?}", self.0) - } - } - impl tonic::server::NamedService for TraceServiceServer { - const NAME: &'static str = "opentelemetry.proto.collector.trace.v1.TraceService"; + /// Generated gRPC service name + pub const SERVICE_NAME: &str = "opentelemetry.proto.collector.trace.v1.TraceService"; + impl tonic::server::NamedService for TraceServiceServer { + const NAME: &'static str = SERVICE_NAME; } } diff --git a/app-server/src/opentelemetry/opentelemetry_proto_common_v1.rs b/app-server/src/opentelemetry/opentelemetry_proto_common_v1.rs index f2450f76..e9c1a576 100644 --- a/app-server/src/opentelemetry/opentelemetry_proto_common_v1.rs +++ b/app-server/src/opentelemetry/opentelemetry_proto_common_v1.rs @@ -1,7 +1,7 @@ +// This file is @generated by prost-build. /// AnyValue is used to represent any type of attribute value. AnyValue may contain a /// primitive value such as a string or integer or it may contain an arbitrary nested /// object containing arrays, key-value lists and primitives. -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AnyValue { /// The value is one of the listed fields. It is valid for all values to be unspecified @@ -13,7 +13,6 @@ pub struct AnyValue { pub mod any_value { /// The value is one of the listed fields. It is valid for all values to be unspecified /// in which case this AnyValue is considered to be "empty". - #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Value { #[prost(string, tag = "1")] @@ -34,7 +33,6 @@ pub mod any_value { } /// ArrayValue is a list of AnyValue messages. We need ArrayValue as a message /// since oneof in AnyValue does not allow repeated fields. -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ArrayValue { /// Array of values. The array may be empty (contain 0 elements). @@ -46,7 +44,6 @@ pub struct ArrayValue { /// a list of KeyValue messages (e.g. in Span) we use `repeated KeyValue` directly to /// avoid unnecessary extra wrapping (which slows down the protocol). The 2 approaches /// are semantically equivalent. -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct KeyValueList { /// A collection of key/value pairs of key-value pairs. The list may be empty (may @@ -58,7 +55,6 @@ pub struct KeyValueList { } /// KeyValue is a key-value pair that is used to store Span attributes, Link /// attributes, etc. -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct KeyValue { #[prost(string, tag = "1")] @@ -68,7 +64,6 @@ pub struct KeyValue { } /// InstrumentationScope is a message representing the instrumentation scope information /// such as the fully qualified name and version. -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct InstrumentationScope { /// An empty instrumentation scope name means the name is unknown. diff --git a/app-server/src/opentelemetry/opentelemetry_proto_resource_v1.rs b/app-server/src/opentelemetry/opentelemetry_proto_resource_v1.rs index a6d41aa8..9b804594 100644 --- a/app-server/src/opentelemetry/opentelemetry_proto_resource_v1.rs +++ b/app-server/src/opentelemetry/opentelemetry_proto_resource_v1.rs @@ -1,5 +1,5 @@ +// This file is @generated by prost-build. /// Resource information. -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Resource { /// Set of attributes that describe the resource. diff --git a/app-server/src/opentelemetry/opentelemetry_proto_trace_v1.rs b/app-server/src/opentelemetry/opentelemetry_proto_trace_v1.rs index acfc7e64..b989815d 100644 --- a/app-server/src/opentelemetry/opentelemetry_proto_trace_v1.rs +++ b/app-server/src/opentelemetry/opentelemetry_proto_trace_v1.rs @@ -1,3 +1,4 @@ +// This file is @generated by prost-build. /// TracesData represents the traces data that can be stored in a persistent storage, /// OR can be embedded by other protocols that transfer OTLP traces data but do /// not implement the OTLP protocol. @@ -8,7 +9,6 @@ /// /// When new fields are added into this message, the OTLP request MUST be updated /// as well. -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TracesData { /// An array of ResourceSpans. @@ -20,7 +20,6 @@ pub struct TracesData { pub resource_spans: ::prost::alloc::vec::Vec, } /// A collection of ScopeSpans from a Resource. -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ResourceSpans { /// The resource for the spans in this message. @@ -41,7 +40,6 @@ pub struct ResourceSpans { pub schema_url: ::prost::alloc::string::String, } /// A collection of Spans produced by an InstrumentationScope. -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ScopeSpans { /// The instrumentation scope information for the spans in this message. @@ -64,7 +62,6 @@ pub struct ScopeSpans { /// A Span represents a single operation performed by a single component of the system. /// /// The next available field id is 17. -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Span { /// A unique identifier for a trace. All spans from the same trace share @@ -194,7 +191,6 @@ pub struct Span { pub mod span { /// Event is a time-stamped annotation of the span, consisting of user-supplied /// text description and key-value pairs. - #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Event { /// time_unix_nano is the time the event occurred. @@ -220,7 +216,6 @@ pub mod span { /// different trace. For example, this can be used in batching operations, /// where a single batch handler processes multiple requests from different /// traces or when the handler receives a request from a different project. - #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Link { /// A unique identifier of a trace that this linked span is part of. The ID is a @@ -307,12 +302,12 @@ pub mod span { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - SpanKind::Unspecified => "SPAN_KIND_UNSPECIFIED", - SpanKind::Internal => "SPAN_KIND_INTERNAL", - SpanKind::Server => "SPAN_KIND_SERVER", - SpanKind::Client => "SPAN_KIND_CLIENT", - SpanKind::Producer => "SPAN_KIND_PRODUCER", - SpanKind::Consumer => "SPAN_KIND_CONSUMER", + Self::Unspecified => "SPAN_KIND_UNSPECIFIED", + Self::Internal => "SPAN_KIND_INTERNAL", + Self::Server => "SPAN_KIND_SERVER", + Self::Client => "SPAN_KIND_CLIENT", + Self::Producer => "SPAN_KIND_PRODUCER", + Self::Consumer => "SPAN_KIND_CONSUMER", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -331,7 +326,6 @@ pub mod span { } /// The Status type defines a logical error model that is suitable for different /// programming environments, including REST APIs and RPC APIs. -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Status { /// A developer-facing human readable error message. @@ -373,9 +367,9 @@ pub mod status { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - StatusCode::Unset => "STATUS_CODE_UNSET", - StatusCode::Ok => "STATUS_CODE_OK", - StatusCode::Error => "STATUS_CODE_ERROR", + Self::Unset => "STATUS_CODE_UNSET", + Self::Ok => "STATUS_CODE_OK", + Self::Error => "STATUS_CODE_ERROR", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -424,10 +418,10 @@ impl SpanFlags { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - SpanFlags::DoNotUse => "SPAN_FLAGS_DO_NOT_USE", - SpanFlags::TraceFlagsMask => "SPAN_FLAGS_TRACE_FLAGS_MASK", - SpanFlags::ContextHasIsRemoteMask => "SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK", - SpanFlags::ContextIsRemoteMask => "SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK", + Self::DoNotUse => "SPAN_FLAGS_DO_NOT_USE", + Self::TraceFlagsMask => "SPAN_FLAGS_TRACE_FLAGS_MASK", + Self::ContextHasIsRemoteMask => "SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK", + Self::ContextIsRemoteMask => "SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK", } } /// Creates an enum from field names used in the ProtoBuf definition. diff --git a/app-server/src/pipeline/runner.rs b/app-server/src/pipeline/runner.rs index 02fdae56..9f6f4100 100644 --- a/app-server/src/pipeline/runner.rs +++ b/app-server/src/pipeline/runner.rs @@ -2,7 +2,7 @@ use std::{collections::HashSet, sync::Arc}; use crate::{ api::v1::traces::RabbitMqSpanMessage, - db::trace::{Span, TraceType}, + db::trace::{CurrentTraceAndSpan, Span, TraceType}, engine::{engine::EngineOutput, Engine}, routes::pipelines::GraphInterruptMessage, traces::{OBSERVATIONS_EXCHANGE, OBSERVATIONS_ROUTING_KEY}, @@ -197,8 +197,7 @@ impl PipelineRunner { run_output: &Result, project_id: &Uuid, pipeline_version_name: &String, - parent_span_id: Option, - trace_id: Option, + current_trace_and_span: Option, trace_type: Option, ) -> Result<()> { let engine_output = match run_output { @@ -208,9 +207,8 @@ impl PipelineRunner { }; let run_stats = RunTraceStats::from_messages(&engine_output.messages); let parent_span = Span::create_parent_span_in_run_trace( - trace_id.unwrap_or_else(Uuid::new_v4), + current_trace_and_span, &run_stats, - parent_span_id, pipeline_version_name, &engine_output.messages, trace_type.unwrap_or_default(), @@ -220,6 +218,7 @@ impl PipelineRunner { &engine_output.messages, parent_span.trace_id, parent_span.span_id, + parent_span.get_attributes().path().unwrap(), ); let parent_span_mq_message = RabbitMqSpanMessage { project_id: *project_id, diff --git a/app-server/src/projects/mod.rs b/app-server/src/projects/mod.rs new file mode 100644 index 00000000..0967c721 --- /dev/null +++ b/app-server/src/projects/mod.rs @@ -0,0 +1,59 @@ +use std::sync::Arc; + +use anyhow::Result; +use serde::{Deserialize, Serialize}; +use sqlx::{prelude::FromRow, PgPool}; +use uuid::Uuid; + +use crate::{ + cache::Cache, + db::{self, user::User}, + semantic_search::SemanticSearch, +}; + +#[derive(Deserialize, Serialize, FromRow, Clone)] +#[serde(rename_all = "camelCase")] +pub struct Project { + pub id: Uuid, + pub name: String, + pub workspace_id: Uuid, +} + +pub async fn create_project( + pool: &PgPool, + cache: Arc, + semantic_search: Arc, + user_id: &Uuid, + name: &str, + workspace_id: Uuid, +) -> Result { + let project = db::projects::create_project(pool, &user_id, name, workspace_id).await?; + log::info!( + "Created new project: id: {}, name: {}, workspace_id: {}", + project.id, + project.name, + project.workspace_id + ); + + let workspace_api_keys = + db::workspace::get_user_api_keys_in_workspace(pool, &project.workspace_id).await?; + + // Invalidate user cache for all users in workspace + for api_key in workspace_api_keys { + let remove_res = cache.remove::(&api_key).await; + match remove_res { + Ok(_) => log::info!( + "Invalidated user cache for user in workspace: {}", + project.workspace_id + ), + Err(e) => log::error!("Could not invalidate user cache for user: {}", e), + } + } + + semantic_search + .create_collection(project.id.to_string()) + .await?; + log::info!("Created new index collection for project: {}", project.id); + + Ok(project) +} diff --git a/app-server/src/routes/auth.rs b/app-server/src/routes/auth.rs index 3b7a76c1..c3768afd 100644 --- a/app-server/src/routes/auth.rs +++ b/app-server/src/routes/auth.rs @@ -1,11 +1,10 @@ use actix_web::{post, web, HttpResponse}; +use anyhow::Result; use log::info; use serde::{Deserialize, Serialize}; -use uuid::Uuid; use crate::{ db::{ - self, user::{get_api_key_for_user_from_email, write_api_key, write_user, ApiKey, User}, utils::generate_random_key, DB, @@ -55,23 +54,54 @@ async fn signin(params: web::Json, db: web::Data) -> ResponseR user_id, name: String::from("default"), }; + validate_user_email(&user.email)?; - // TODO: Validate email before creating user write_user(&db.pool, &user.id, &user.email, &user.name).await?; write_api_key(&db.pool, &api_key.api_key, &api_key.user_id, &api_key.name).await?; - // create new workspace for user - let workspace_id = Uuid::new_v4(); - let workspace_name = format!("{}'s workspace", name); - let workspace = - db::workspace::create_new_workspace(&db.pool, workspace_id, workspace_name).await?; - info!("Created new workspace: {:?}", workspace); - db::workspace::add_owner_to_workspace(&db.pool, &user_id, &workspace.id).await?; - info!("Added user to workspace: {:?}", workspace); - let res = SignInResponse { api_key: api_key.api_key, is_new_user_created: true, }; + Ok(HttpResponse::Ok().json(res)) } + +fn validate_user_email(email: &str) -> Result<()> { + let email_regex = + regex::Regex::new(r"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$").unwrap(); + if !email_regex.is_match(email) { + return Err(anyhow::anyhow!("Invalid email format")); + } + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_validate_user_email_valid() { + assert!(validate_user_email("user@example.com").is_ok()); + assert!(validate_user_email("user.name+tag@example.co.uk").is_ok()); + assert!(validate_user_email("user123@subdomain.example.com").is_ok()); + } + + #[test] + fn test_validate_user_email_invalid() { + assert!(validate_user_email("").is_err()); + assert!(validate_user_email("smth").is_err()); + assert!(validate_user_email("user@").is_err()); + assert!(validate_user_email("user@.com").is_err()); + assert!(validate_user_email("@example.com").is_err()); + assert!(validate_user_email("user@example").is_err()); + assert!(validate_user_email("user@exam ple.com").is_err()); + } + + #[test] + fn test_validate_user_email_edge_cases() { + assert!(validate_user_email("a@b.co").is_ok()); + assert!(validate_user_email("user+tag@example.museum").is_ok()); + assert!(validate_user_email("user@123.456.789.0").is_err()); + } +} diff --git a/app-server/src/routes/error.rs b/app-server/src/routes/error.rs index 455bf597..e7e9c56a 100644 --- a/app-server/src/routes/error.rs +++ b/app-server/src/routes/error.rs @@ -84,27 +84,12 @@ Set the target version for the pipeline in the pipeline builder."), error_message: Some(Value::String(format!("User not found: {}", email))), } } - - pub fn limit_error(error_message: &str) -> Self { - Self::RequestError { - error_code: "api.LimitReached".to_string(), - error_message: Some(Value::String(error_message.to_string())), - } - } } pub fn workspace_error_to_http_error(e: WorkspaceError) -> Error { match e { WorkspaceError::UserNotFound(email) => Error::user_not_found(email), WorkspaceError::UnhandledError(e) => Error::InternalAnyhowError(e), - WorkspaceError::LimitReached { - entity, - limit, - usage, - } => Error::limit_error(&format!( - "Limit reached for {}. Limit: {}. Current {}: {}", - entity, limit, entity, usage - )), WorkspaceError::NotAllowed => Error::Forbidden, } } diff --git a/app-server/src/routes/evaluations.rs b/app-server/src/routes/evaluations.rs index 7782b5ce..311b783f 100644 --- a/app-server/src/routes/evaluations.rs +++ b/app-server/src/routes/evaluations.rs @@ -2,13 +2,22 @@ use actix_web::{delete, get, web, HttpResponse}; use serde::{Deserialize, Serialize}; use uuid::Uuid; -use crate::db::{ - evaluations::{self, Evaluation, EvaluationDatapointPreview}, - DB, +use crate::{ + ch::evaluation_scores::{ + get_average_evaluation_score, get_evaluation_score_buckets_based_on_bounds, + get_global_evaluation_scores_bounds, EvaluationScoreBucket, + }, + db::{ + evaluations::{self, Evaluation, EvaluationDatapointPreview}, + DB, + }, }; use super::ResponseResult; +const DEFAULT_LOWER_BOUND: f64 = 0.0; +const DEFAULT_BUCKET_COUNT: u64 = 10; + #[delete("evaluations/{evaluation_id}")] async fn delete_evaluation(path: web::Path<(Uuid, Uuid)>, db: web::Data) -> ResponseResult { let (_project_id, evaluation_id) = path.into_inner(); @@ -17,39 +26,37 @@ async fn delete_evaluation(path: web::Path<(Uuid, Uuid)>, db: web::Data) -> Ok(HttpResponse::Ok().finish()) } -#[get("evaluations")] -async fn get_evaluations(db: web::Data, path: web::Path) -> ResponseResult { - let project_id = path.into_inner(); - let evaluations = evaluations::get_evaluations(&db.pool, project_id).await?; - Ok(HttpResponse::Ok().json(evaluations)) -} - #[derive(Deserialize)] #[serde(rename_all = "camelCase")] -pub struct GetEvaluationInfos { - pub only_finished: bool, - pub exclude_id: Uuid, +pub struct GetEvaluationsQuery { + #[serde(default)] + current_evaluation_id: Option, } -#[get("evaluation-infos")] -async fn get_finished_evaluation_infos( - project_id: web::Path, +#[get("evaluations")] +async fn get_evaluations( db: web::Data, - req: web::Query, + path: web::Path, + query: web::Query, ) -> ResponseResult { - let project_id = project_id.into_inner(); - let req = req.into_inner(); - let only_finished = req.only_finished; - let exclude_id = req.exclude_id; - - if !only_finished { - return Err(anyhow::anyhow!("Only finished evaluations are supported").into()); - } - - let evaluation_infos = - evaluations::get_finished_evaluation_infos(&db.pool, project_id, exclude_id).await?; + let project_id = path.into_inner(); + let query = query.into_inner(); + let current_evaluation_id = query.current_evaluation_id; + + let evaluations = match current_evaluation_id { + Some(current_evaluation_id) => { + // TODO: Currently, this query takes care of filtering out by group id, need to make it more explicit + evaluations::get_evaluations_grouped_by_current_evaluation( + &db.pool, + project_id, + current_evaluation_id, + ) + .await? + } + None => evaluations::get_evaluations(&db.pool, project_id).await?, + }; - Ok(HttpResponse::Ok().json(evaluation_infos)) + Ok(HttpResponse::Ok().json(evaluations)) } #[derive(Serialize)] @@ -101,14 +108,131 @@ async fn get_evaluation_datapoint( Ok(HttpResponse::Ok().json(result)) } -#[get("evaluations/{evaluation_id}/stats")] -async fn get_evaluation_stats(path: web::Path<(Uuid, Uuid)>, db: web::Data) -> ResponseResult { - let (project_id, evaluation_id) = path.into_inner(); - let db = db.into_inner(); +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct GetEvaluationScoreStatsQuery { + evaluation_id: Uuid, + score_name: String, +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +pub struct GetEvaluationScoreStatsResponse { + average_value: f64, +} - let evaluation = evaluations::get_evaluation(db.clone(), project_id, evaluation_id).await?; - // For now, if the evaluation is not finished, just return the empty averages so that frontend doesn't crash - let averages = evaluation.average_scores.unwrap_or_default(); +#[get("evaluation-score-stats")] +async fn get_evaluation_score_stats( + path: web::Path, + clickhouse: web::Data, + query: web::Query, +) -> ResponseResult { + let project_id = path.into_inner(); + let clickhouse = clickhouse.into_inner().as_ref().clone(); + let query = query.into_inner(); + let evaluation_id = query.evaluation_id; + let score_name = query.score_name; + + let average_value = + get_average_evaluation_score(clickhouse, project_id, evaluation_id, score_name).await?; + + let response = GetEvaluationScoreStatsResponse { average_value }; + Ok(HttpResponse::Ok().json(response)) +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct GetEvaluationScoreDistributionQuery { + evaluation_ids: String, + score_name: String, +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +pub struct GetEvaluationScoreDistributionResponseBucket { + lower_bound: f64, + upper_bound: f64, + /// Heights in the same order as the evaluation ids provided in the request + heights: Vec, +} + +/// Get the score distribution where global lower and upper bounds for all requested evaluation ids are calculated +/// +/// Currently, distributes into 10 buckets +#[get("evaluation-score-distribution")] +async fn get_evaluation_score_distribution( + path: web::Path, + clickhouse: web::Data, + query: web::Query, +) -> ResponseResult { + let project_id = path.into_inner(); + let clickhouse = clickhouse.into_inner().as_ref().clone(); + let query = query.into_inner(); + let score_name = query.score_name; + let evaluation_ids_str = query.evaluation_ids; + + let evaluation_ids = evaluation_ids_str + .split(',') + .map(|id| Uuid::parse_str(id).unwrap()) + .collect::>(); + if evaluation_ids.is_empty() { + return Err(anyhow::anyhow!("No evaluation ids provided").into()); + } + + // Get bounds among all evaluations + let global_bounds = get_global_evaluation_scores_bounds( + clickhouse.clone(), + project_id, + &evaluation_ids, + score_name.clone(), + ) + .await?; + // TODO: Figure out better way to handle this in both backend and frontend + if global_bounds.upper_bound < DEFAULT_LOWER_BOUND { + return Err(anyhow::anyhow!( + "Upper bound is less than lower bound: {} < {}", + global_bounds.upper_bound, + DEFAULT_LOWER_BOUND + ) + .into()); + } + + let evaluation_buckets: Vec> = + futures::future::try_join_all(evaluation_ids.into_iter().map(|evaluation_id| { + let clickhouse = clickhouse.clone(); + let score_name = score_name.clone(); + async move { + get_evaluation_score_buckets_based_on_bounds( + clickhouse, + project_id, + evaluation_id, + score_name, + DEFAULT_LOWER_BOUND, + global_bounds.upper_bound, + DEFAULT_BUCKET_COUNT, + ) + .await + } + })) + .await?; + + let mut res_buckets: Vec = Vec::new(); + + for i in 0..DEFAULT_BUCKET_COUNT as usize { + // Simply get the lower and upper bounds from the first evaluation, since they are the same for all evaluations + let lower_bound = evaluation_buckets[0][i].lower_bound; + let upper_bound = evaluation_buckets[0][i].upper_bound; + + let mut heights: Vec = Vec::new(); + for buckets in &evaluation_buckets { + heights.push(buckets[i].height); + } + res_buckets.push(GetEvaluationScoreDistributionResponseBucket { + lower_bound, + upper_bound, + heights, + }); + } - Ok(HttpResponse::Ok().json(averages)) + Ok(HttpResponse::Ok().json(res_buckets)) } diff --git a/app-server/src/routes/labels.rs b/app-server/src/routes/labels.rs index b5f6bb3e..32e1fd6d 100644 --- a/app-server/src/routes/labels.rs +++ b/app-server/src/routes/labels.rs @@ -3,7 +3,12 @@ use serde::Deserialize; use serde_json::Value; use uuid::Uuid; -use crate::db::{self, labels::LabelType, user::User, DB}; +use crate::db::{ + self, + labels::{LabelSource, LabelType}, + user::User, + DB, +}; use super::ResponseResult; @@ -21,6 +26,8 @@ struct CreateLabelClassRequest { name: String, label_type: LabelType, value_map: Vec, + #[serde(default)] + description: Option, } #[post("label-classes")] @@ -34,11 +41,41 @@ pub async fn create_label_class( let name = req.name; let label_type = req.label_type; let value_map = req.value_map; + let description = req.description; let id = Uuid::new_v4(); + let label_class = db::labels::create_label_class( + &db.pool, + id, + name, + project_id, + &label_type, + value_map, + description, + ) + .await?; + + Ok(HttpResponse::Ok().json(label_class)) +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct UpdateLabelClassRequest { + description: Option, +} + +#[post("label-classes/{class_id}")] +pub async fn update_label_class( + path: web::Path<(Uuid, Uuid)>, + req: web::Json, + db: web::Data, +) -> ResponseResult { + let (project_id, class_id) = path.into_inner(); + let req = req.into_inner(); + let description = req.description; + let label_class = - db::labels::create_label_class(&db.pool, id, name, project_id, &label_type, value_map) - .await?; + db::labels::update_label_class(&db.pool, project_id, class_id, description).await?; Ok(HttpResponse::Ok().json(label_class)) } @@ -63,7 +100,15 @@ pub async fn update_span_label( let value = req.value; let user_id = user.id; - let label = db::labels::update_span_label(&db.pool, span_id, value, user_id, class_id).await?; + let label = db::labels::update_span_label( + &db.pool, + span_id, + value, + Some(user_id), + class_id, + LabelSource::MANUAL, + ) + .await?; Ok(HttpResponse::Ok().json(label)) } diff --git a/app-server/src/routes/projects.rs b/app-server/src/routes/projects.rs index 5f0ea7de..e5da569a 100644 --- a/app-server/src/routes/projects.rs +++ b/app-server/src/routes/projects.rs @@ -2,11 +2,13 @@ use std::sync::Arc; use actix_web::{delete, get, post, web, HttpResponse}; use log::{error, info}; +use serde::Deserialize; use uuid::Uuid; use crate::{ cache::Cache, db::{self, user::User, DB}, + projects, routes::ResponseResult, semantic_search::SemanticSearch, }; @@ -39,7 +41,10 @@ async fn delete_project( let project = db::projects::get_project(&db.pool, &project_id).await?; db::projects::delete_project(&db.pool, &project_id).await?; - info!("Deleted project: {:?}", project); + info!( + "Deleted project: id: {}, name: {}, workspace_id: {}", + project.id, project.name, project.workspace_id + ); let user_keys = db::workspace::get_user_api_keys_in_workspace(&db.pool, &project.workspace_id).await?; @@ -63,39 +68,33 @@ async fn delete_project( Ok(HttpResponse::Ok().finish()) } +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct CreateProjectRequest { + name: String, + workspace_id: Uuid, +} + #[post("")] async fn create_project( user: User, db: web::Data, cache: web::Data, - project: web::Json, semantic_search: web::Data>, + req: web::Json, ) -> ResponseResult { - let project = db::projects::create_project(&db.pool, &user.id, &project).await?; - info!("Created new project: {:?}", project); - - let workspace_api_keys = - db::workspace::get_user_api_keys_in_workspace(&db.pool, &project.workspace_id).await?; - - // Invalidate user cache for all users in workspace - for api_key in workspace_api_keys { - let remove_res = cache.remove::(&api_key).await; - match remove_res { - Ok(_) => info!( - "Invalidated user cache for user in workspace: {}", - project.workspace_id - ), - Err(e) => error!("Could not invalidate user cache for user: {}", e), - } - } - - semantic_search - .create_collection(project.id.unwrap().to_string()) - .await?; - info!( - "Created new index collection for project: {}", - project.id.unwrap() - ); - + let req = req.into_inner(); + let cache = cache.into_inner(); + let semantic_search = semantic_search.into_inner().as_ref().clone(); + + let project = projects::create_project( + &db.pool, + cache.clone(), + semantic_search.clone(), + &user.id, + &req.name, + req.workspace_id, + ) + .await?; Ok(HttpResponse::Ok().json(project)) } diff --git a/app-server/src/routes/traces.rs b/app-server/src/routes/traces.rs index 2f47e742..0b5360e2 100644 --- a/app-server/src/routes/traces.rs +++ b/app-server/src/routes/traces.rs @@ -1,15 +1,13 @@ use super::{GetMetricsQueryParams, ResponseResult}; use super::{PaginatedGetQueryParams, PaginatedResponse, DEFAULT_PAGE_SIZE}; use crate::ch::utils::get_bounds; -use crate::db::datasets; -use crate::db::trace::SpanField; use crate::{ ch::{self, modifiers::GroupByInterval, Aggregation}, db::{ self, events::EventWithTemplateName, modifiers::{DateRange, Filter, RelativeDateInterval}, - trace::{Session, Span, Trace, TraceWithEvents}, + trace::{Session, Span, Trace, TraceWithParentSpanAndEvents}, DB, }, }; @@ -35,8 +33,6 @@ pub async fn get_traces( filter_column: "trace_type".to_string(), filter_operator: db::modifiers::FilterOperator::Eq, filter_value: Value::String("DEFAULT".to_string()), - jsonb_column: None, - filter_value_type: Some(String::from("trace_type")), }); let date_range = query_params.date_range; let text_search_filter = query_params.search; @@ -65,7 +61,7 @@ pub async fn get_traces( true }; - let response = PaginatedResponse:: { + let response = PaginatedResponse:: { total_count, items: traces, any_in_project, @@ -120,63 +116,6 @@ pub async fn get_single_span(params: web::Path<(Uuid, Uuid)>, db: web::Data) Ok(HttpResponse::Ok().json(span_with_events)) } -#[derive(Deserialize)] -#[serde(rename_all = "camelCase")] -struct ExportSpanRequest { - /// Dataset ID to export the span to. If not provided, - /// name for new dataset must be provided. - #[serde(default)] - dataset_id: Option, - /// Name for new dataset to create and export the span to. Only read if - /// dataset_id is not provided. - #[serde(default)] - dataset_name: Option, - - fields: Vec, -} - -#[post("spans/{span_id}/export")] -pub async fn export_span( - path: web::Path<(Uuid, Uuid)>, - req: web::Json, - db: web::Data, -) -> ResponseResult { - let (project_id, span_id) = path.into_inner(); - let req = req.into_inner(); - if req.dataset_id.is_none() && req.dataset_name.is_none() { - return Err(anyhow::anyhow!("Either dataset_id or dataset_name must be provided").into()); - } - - let dataset_id = if let Some(dataset_id) = req.dataset_id { - dataset_id - } else { - let dataset = - datasets::create_dataset(&db.pool, &req.dataset_name.unwrap(), project_id).await?; - dataset.id - }; - - let span = db::trace::get_span(&db.pool, span_id).await?; - let json_span = span.to_json_value(&req.fields); - db::datapoints::insert_raw_data(&db.pool, &dataset_id, &[json_span].to_vec()).await?; - - Ok(HttpResponse::Ok().finish()) -} - -#[get("trace-id-for-span/{span_id}")] -pub async fn get_trace_id_for_span( - params: web::Path<(Uuid, Uuid)>, - db: web::Data, -) -> ResponseResult { - let (_project_id, span_id) = params.into_inner(); - - // TODO: if querying the entire span with input and output is inefficient, - // we can just query the trace_id in a separate db function - let span = db::trace::get_span(&db.pool, span_id).await?; - let trace_id = span.trace_id; - - Ok(HttpResponse::Ok().json(trace_id)) -} - #[derive(Deserialize)] #[serde(rename_all = "camelCase")] enum TraceMetric { diff --git a/app-server/src/routes/workspace.rs b/app-server/src/routes/workspace.rs index 4cff2351..f37377b9 100644 --- a/app-server/src/routes/workspace.rs +++ b/app-server/src/routes/workspace.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use actix_web::{get, post, web, HttpResponse}; use serde::Deserialize; use uuid::Uuid; @@ -8,10 +10,12 @@ use crate::{ db::{ self, user::{get_by_email, User}, - workspace::WorkspaceError, + workspace::{WorkspaceError, WorkspaceWithProjects}, DB, }, + projects, routes::ResponseResult, + semantic_search::SemanticSearch, }; #[get("")] @@ -35,36 +39,63 @@ struct AddUserRequest { } #[derive(Deserialize)] +#[serde(rename_all = "camelCase")] struct CreateWorkspaceRequest { name: String, + #[serde(default)] + project_name: Option, } #[post("")] async fn create_workspace( user: User, db: web::Data, + cache: web::Data, + semantic_search: web::Data>, req: web::Json, ) -> ResponseResult { - let name = req.into_inner().name; - - let max_workspaces = 1; // for now, we only allow one workspace per user - let created_workspaces = db::workspace::get_owned_workspaces(&db.pool, &user.id) - .await? - .len() as i64; - - if max_workspaces > 0 && created_workspaces >= max_workspaces { - return Err(workspace_error_to_http_error( - WorkspaceError::LimitReached { - entity: "workspaces".to_string(), - limit: max_workspaces, - usage: created_workspaces, - }, - )); - } + let req = req.into_inner(); + let name = req.name; + let project_name = req.project_name; + + let cache = cache.into_inner(); + let semantic_search = semantic_search.into_inner().as_ref().clone(); + let workspace = db::workspace::create_new_workspace(&db.pool, Uuid::new_v4(), name).await?; + log::info!( + "Created new workspace: id {}, name {}, tier_name {}, is_free_tier {}", + workspace.id, + workspace.name, + workspace.tier_name, + workspace.is_free_tier + ); db::workspace::add_owner_to_workspace(&db.pool, &user.id, &workspace.id).await?; + log::info!("Added owner {} to workspace: {}", user.id, workspace.id); + + let projects = if let Some(project_name) = project_name { + let project = projects::create_project( + &db.pool, + cache.clone(), + semantic_search.clone(), + &user.id, + &project_name, + workspace.id, + ) + .await?; + + vec![project] + } else { + vec![] + }; - Ok(HttpResponse::Ok().json(workspace)) + let response = WorkspaceWithProjects { + id: workspace.id, + name: workspace.name, + tier_name: workspace.tier_name, + projects, + }; + + Ok(HttpResponse::Ok().json(response)) } #[post("{workspace_id}/users")] diff --git a/app-server/src/semantic_search/semantic_search_grpc.rs b/app-server/src/semantic_search/semantic_search_grpc.rs index 97843d99..32d023c8 100644 --- a/app-server/src/semantic_search/semantic_search_grpc.rs +++ b/app-server/src/semantic_search/semantic_search_grpc.rs @@ -1,4 +1,4 @@ -#[allow(clippy::derive_partial_eq_without_eq)] +// This file is @generated by prost-build. #[derive(Clone, PartialEq, ::prost::Message)] pub struct IndexRequest { #[prost(message, repeated, tag = "1")] @@ -10,7 +10,6 @@ pub struct IndexRequest { } /// Nested message and enum types in `IndexRequest`. pub mod index_request { - #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Datapoint { #[prost(string, tag = "1")] @@ -26,13 +25,11 @@ pub mod index_request { pub id: ::prost::alloc::string::String, } } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct IndexResponse { #[prost(string, tag = "1")] pub status: ::prost::alloc::string::String, } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DeleteEmbeddingsRequest { #[prost(message, repeated, tag = "1")] @@ -42,13 +39,11 @@ pub struct DeleteEmbeddingsRequest { #[prost(enumeration = "Model", tag = "3")] pub model: i32, } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DeleteEmbeddingsResponse { #[prost(string, tag = "1")] pub status: ::prost::alloc::string::String, } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct RequestPayload { #[prost(map = "string, string", tag = "1")] @@ -57,7 +52,6 @@ pub struct RequestPayload { ::prost::alloc::string::String, >, } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryRequest { #[prost(string, tag = "1")] @@ -73,7 +67,6 @@ pub struct QueryRequest { #[prost(enumeration = "Model", tag = "6")] pub model: i32, } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryResponse { #[prost(message, repeated, tag = "1")] @@ -81,7 +74,6 @@ pub struct QueryResponse { } /// Nested message and enum types in `QueryResponse`. pub mod query_response { - #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryPoint { #[prost(float, tag = "1")] @@ -97,7 +89,6 @@ pub mod query_response { >, } } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GenerateEmbeddingsRequest { #[prost(string, repeated, tag = "1")] @@ -105,7 +96,6 @@ pub struct GenerateEmbeddingsRequest { #[prost(enumeration = "Model", tag = "2")] pub model: i32, } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GenerateEmbeddingsResponse { #[prost(message, repeated, tag = "1")] @@ -113,14 +103,12 @@ pub struct GenerateEmbeddingsResponse { } /// Nested message and enum types in `GenerateEmbeddingsResponse`. pub mod generate_embeddings_response { - #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Embeddings { #[prost(float, repeated, tag = "1")] pub values: ::prost::alloc::vec::Vec, } } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CalculateSimilarityScoresRequest { #[prost(message, repeated, tag = "1")] @@ -132,7 +120,6 @@ pub struct CalculateSimilarityScoresRequest { } /// Nested message and enum types in `CalculateSimilarityScoresRequest`. pub mod calculate_similarity_scores_request { - #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ComparedContents { #[prost(string, tag = "1")] @@ -141,13 +128,11 @@ pub mod calculate_similarity_scores_request { pub second: ::prost::alloc::string::String, } } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CalculateSimilarityScoresResponse { #[prost(float, repeated, tag = "1")] pub scores: ::prost::alloc::vec::Vec, } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CreateCollectionRequest { #[prost(string, tag = "1")] @@ -155,19 +140,16 @@ pub struct CreateCollectionRequest { #[prost(enumeration = "Model", tag = "2")] pub model: i32, } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CreateCollectionResponse { #[prost(string, tag = "1")] pub status: ::prost::alloc::string::String, } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DeleteCollectionsRequest { #[prost(string, tag = "1")] pub collection_name: ::prost::alloc::string::String, } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DeleteCollectionsResponse { #[prost(string, tag = "1")] @@ -187,8 +169,8 @@ impl Model { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - Model::GteBase => "GTE_BASE", - Model::CohereMultilingual => "COHERE_MULTILINGUAL", + Self::GteBase => "GTE_BASE", + Self::CohereMultilingual => "COHERE_MULTILINGUAL", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -202,7 +184,13 @@ impl Model { } /// Generated client implementations. pub mod semantic_search_client { - #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] use tonic::codegen::*; use tonic::codegen::http::Uri; #[derive(Debug, Clone)] @@ -213,7 +201,7 @@ pub mod semantic_search_client { /// Attempt to create a new client by connecting to a given endpoint. pub async fn connect(dst: D) -> Result where - D: std::convert::TryInto, + D: TryInto, D::Error: Into, { let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; @@ -224,8 +212,8 @@ pub mod semantic_search_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + Send + 'static, - ::Error: Into + Send, + T::ResponseBody: Body + std::marker::Send + 'static, + ::Error: Into + std::marker::Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -250,7 +238,7 @@ pub mod semantic_search_client { >, , - >>::Error: Into + Send + Sync, + >>::Error: Into + std::marker::Send + std::marker::Sync, { SemanticSearchClient::new(InterceptedService::new(inner, interceptor)) } @@ -269,17 +257,32 @@ pub mod semantic_search_client { self.inner = self.inner.accept_compressed(encoding); self } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } /// Embeds datapoints and adds them to the storage. pub async fn index( &mut self, request: impl tonic::IntoRequest, - ) -> Result, tonic::Status> { + ) -> std::result::Result, tonic::Status> { self.inner .ready() .await .map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, + tonic::Status::unknown( format!("Service was not ready: {}", e.into()), ) })?; @@ -287,19 +290,24 @@ pub mod semantic_search_client { let path = http::uri::PathAndQuery::from_static( "/semantic_search_grpc.SemanticSearch/Index", ); - self.inner.unary(request.into_request(), path, codec).await + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("semantic_search_grpc.SemanticSearch", "Index")); + self.inner.unary(req, path, codec).await } /// Deletes the embeddings pub async fn delete_embeddings( &mut self, request: impl tonic::IntoRequest, - ) -> Result, tonic::Status> { + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { self.inner .ready() .await .map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, + tonic::Status::unknown( format!("Service was not ready: {}", e.into()), ) })?; @@ -307,19 +315,26 @@ pub mod semantic_search_client { let path = http::uri::PathAndQuery::from_static( "/semantic_search_grpc.SemanticSearch/DeleteEmbeddings", ); - self.inner.unary(request.into_request(), path, codec).await + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "semantic_search_grpc.SemanticSearch", + "DeleteEmbeddings", + ), + ); + self.inner.unary(req, path, codec).await } /// Queries the index for similar text. pub async fn query( &mut self, request: impl tonic::IntoRequest, - ) -> Result, tonic::Status> { + ) -> std::result::Result, tonic::Status> { self.inner .ready() .await .map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, + tonic::Status::unknown( format!("Service was not ready: {}", e.into()), ) })?; @@ -327,19 +342,24 @@ pub mod semantic_search_client { let path = http::uri::PathAndQuery::from_static( "/semantic_search_grpc.SemanticSearch/Query", ); - self.inner.unary(request.into_request(), path, codec).await + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("semantic_search_grpc.SemanticSearch", "Query")); + self.inner.unary(req, path, codec).await } /// Creates a new collection. pub async fn create_collection( &mut self, request: impl tonic::IntoRequest, - ) -> Result, tonic::Status> { + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { self.inner .ready() .await .map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, + tonic::Status::unknown( format!("Service was not ready: {}", e.into()), ) })?; @@ -347,19 +367,29 @@ pub mod semantic_search_client { let path = http::uri::PathAndQuery::from_static( "/semantic_search_grpc.SemanticSearch/CreateCollection", ); - self.inner.unary(request.into_request(), path, codec).await + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "semantic_search_grpc.SemanticSearch", + "CreateCollection", + ), + ); + self.inner.unary(req, path, codec).await } /// Delete collection. pub async fn delete_collections( &mut self, request: impl tonic::IntoRequest, - ) -> Result, tonic::Status> { + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { self.inner .ready() .await .map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, + tonic::Status::unknown( format!("Service was not ready: {}", e.into()), ) })?; @@ -367,19 +397,29 @@ pub mod semantic_search_client { let path = http::uri::PathAndQuery::from_static( "/semantic_search_grpc.SemanticSearch/DeleteCollections", ); - self.inner.unary(request.into_request(), path, codec).await + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "semantic_search_grpc.SemanticSearch", + "DeleteCollections", + ), + ); + self.inner.unary(req, path, codec).await } /// Generates embeddings for provided texts pub async fn generate_embeddings( &mut self, request: impl tonic::IntoRequest, - ) -> Result, tonic::Status> { + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { self.inner .ready() .await .map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, + tonic::Status::unknown( format!("Service was not ready: {}", e.into()), ) })?; @@ -387,13 +427,21 @@ pub mod semantic_search_client { let path = http::uri::PathAndQuery::from_static( "/semantic_search_grpc.SemanticSearch/GenerateEmbeddings", ); - self.inner.unary(request.into_request(), path, codec).await + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "semantic_search_grpc.SemanticSearch", + "GenerateEmbeddings", + ), + ); + self.inner.unary(req, path, codec).await } /// Calculate similarity score for pairs of texts pub async fn calculate_similarity_scores( &mut self, request: impl tonic::IntoRequest, - ) -> Result< + ) -> std::result::Result< tonic::Response, tonic::Status, > { @@ -401,8 +449,7 @@ pub mod semantic_search_client { .ready() .await .map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, + tonic::Status::unknown( format!("Service was not ready: {}", e.into()), ) })?; @@ -410,7 +457,15 @@ pub mod semantic_search_client { let path = http::uri::PathAndQuery::from_static( "/semantic_search_grpc.SemanticSearch/CalculateSimilarityScores", ); - self.inner.unary(request.into_request(), path, codec).await + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "semantic_search_grpc.SemanticSearch", + "CalculateSimilarityScores", + ), + ); + self.inner.unary(req, path, codec).await } } } diff --git a/app-server/src/traces/attributes.rs b/app-server/src/traces/attributes.rs index f8913f20..1d45e077 100644 --- a/app-server/src/traces/attributes.rs +++ b/app-server/src/traces/attributes.rs @@ -25,6 +25,7 @@ pub const GEN_AI_OUTPUT_COST: &str = "gen_ai.usage.output_cost"; // Custom lmnr attributes pub const ASSOCIATION_PROPERTIES_PREFIX: &str = "lmnr.association.properties."; pub const SPAN_TYPE: &str = "lmnr.span.type"; +pub const SPAN_PATH: &str = "lmnr.span.path"; pub const EVENT_TYPE: &str = "lmnr.event.type"; pub const EVENT_DATA: &str = "lmnr.event.data"; pub const EVENT_ENV: &str = "lmnr.event.env"; diff --git a/app-server/src/traces/events.rs b/app-server/src/traces/events.rs index f15bdb93..b944e517 100644 --- a/app-server/src/traces/events.rs +++ b/app-server/src/traces/events.rs @@ -222,7 +222,6 @@ pub async fn evaluate_event( &project_id, &format!("{}.{}", evaluate_event.evaluator, pipeline_version.name), None, - None, Some(db::trace::TraceType::EVENT), ) .await?; diff --git a/app-server/src/traces/mod.rs b/app-server/src/traces/mod.rs index d4f659df..a5d739e4 100644 --- a/app-server/src/traces/mod.rs +++ b/app-server/src/traces/mod.rs @@ -92,9 +92,11 @@ pub async fn observation_collector( trace_attributes.add_cost(span_usage.total_cost); trace_attributes.add_tokens(span_usage.total_tokens); span_attributes.set_usage(&span_usage); - span.set_attributes(&span_attributes); } + span_attributes.extend_span_path(&span.name); + span.set_attributes(&span_attributes); + let update_attrs_res = trace::update_trace_attributes( &db.pool, &rabbitmq_span_message.project_id, diff --git a/clickhouse/001000-initial.sql b/clickhouse/001000-initial.sql index dc527061..7235e7bb 100644 --- a/clickhouse/001000-initial.sql +++ b/clickhouse/001000-initial.sql @@ -16,12 +16,12 @@ CREATE TABLE spans prompt_tokens Int64, completion_tokens Int64, total_tokens Int64, - user_id String + user_id String, + path String DEFAULT '' ) ENGINE = MergeTree() ORDER BY (project_id, start_time, trace_id, span_id) -SETTINGS index_granularity = 8192 -SETTINGS flatten_nested=0; +SETTINGS index_granularity = 8192; CREATE TABLE events ( @@ -35,4 +35,16 @@ CREATE TABLE events ( ) ENGINE MergeTree() ORDER BY (project_id, template_id, id) -SETTINGS index_granularity = 8192 SETTINGS flatten_nested=0 +SETTINGS index_granularity = 8192 SETTINGS flatten_nested=0; + +CREATE TABLE evaluation_scores ( + project_id UUID, + group_id String, + evaluation_id UUID, + result_id UUID, + name String, + value Float64 +) ENGINE = MergeTree() +ORDER BY (project_id, group_id, evaluation_id, name) +SETTINGS index_granularity = 8192 +SETTINGS flatten_nested=0; diff --git a/frontend/app/api/limits/user/workspaces/route.ts b/frontend/app/api/limits/user/workspaces/route.ts deleted file mode 100644 index 07804b6d..00000000 --- a/frontend/app/api/limits/user/workspaces/route.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { getServerSession } from 'next-auth' -import { authOptions } from '@/lib/auth' -import { fetcher } from '@/lib/utils'; - -export async function GET(): Promise { - - const session = await getServerSession(authOptions) - const user = session!.user - - return await fetcher(`/limits/user/workspaces`, { - method: 'GET', - headers: { - 'Content-Type': 'application/json', - Authorization: `Bearer ${user.apiKey}` - }, - }) -} diff --git a/frontend/app/api/projects/[projectId]/evaluation-score-distribution/route.ts b/frontend/app/api/projects/[projectId]/evaluation-score-distribution/route.ts new file mode 100644 index 00000000..5b10e193 --- /dev/null +++ b/frontend/app/api/projects/[projectId]/evaluation-score-distribution/route.ts @@ -0,0 +1,17 @@ +import { getServerSession } from 'next-auth' +import { authOptions } from '@/lib/auth' +import { fetcher } from '@/lib/utils'; +import { NextRequest } from 'next/server'; + +export async function GET(req: NextRequest, { params }: { params: { projectId: string } }): Promise { + const projectId = params.projectId; + const session = await getServerSession(authOptions) + const user = session!.user + + return await fetcher(`/projects/${projectId}/evaluation-score-distribution?${req.nextUrl.searchParams.toString()}`, { + method: 'GET', + headers: { + Authorization: `Bearer ${user.apiKey}` + }, + }) +} diff --git a/frontend/app/api/projects/[projectId]/evaluations/[evaluationId]/stats/route.ts b/frontend/app/api/projects/[projectId]/evaluation-score-stats/route.ts similarity index 57% rename from frontend/app/api/projects/[projectId]/evaluations/[evaluationId]/stats/route.ts rename to frontend/app/api/projects/[projectId]/evaluation-score-stats/route.ts index 21d5f973..9480170f 100644 --- a/frontend/app/api/projects/[projectId]/evaluations/[evaluationId]/stats/route.ts +++ b/frontend/app/api/projects/[projectId]/evaluation-score-stats/route.ts @@ -1,14 +1,14 @@ import { getServerSession } from 'next-auth' import { authOptions } from '@/lib/auth' import { fetcher } from '@/lib/utils'; +import { NextRequest } from 'next/server'; -export async function GET(req: Request, { params }: { params: { projectId: string, evaluationId: string } }): Promise { +export async function GET(req: NextRequest, { params }: { params: { projectId: string } }): Promise { const projectId = params.projectId; - const evaluationId = params.evaluationId; const session = await getServerSession(authOptions) const user = session!.user - return await fetcher(`/projects/${projectId}/evaluations/${evaluationId}/stats`, { + return await fetcher(`/projects/${projectId}/evaluation-score-stats?${req.nextUrl.searchParams.toString()}`, { method: 'GET', headers: { Authorization: `Bearer ${user.apiKey}` diff --git a/frontend/app/api/projects/[projectId]/evaluations/route.ts b/frontend/app/api/projects/[projectId]/evaluations/route.ts index d6e1656f..d5f1aa77 100644 --- a/frontend/app/api/projects/[projectId]/evaluations/route.ts +++ b/frontend/app/api/projects/[projectId]/evaluations/route.ts @@ -14,19 +14,3 @@ export async function GET(req: Request, { params }: { params: { projectId: strin }, }) } - -export async function POST(req: Request, { params }: { params: { projectId: string } }): Promise { - const projectId = params.projectId; - const session = await getServerSession(authOptions) - const user = session!.user - const body = await req.json() - - return await fetcher(`/projects/${projectId}/evaluations`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - Authorization: `Bearer ${user.apiKey}` - }, - body: JSON.stringify(body) - }) -} diff --git a/frontend/app/api/projects/[projectId]/event-templates/route.ts b/frontend/app/api/projects/[projectId]/event-templates/route.ts index bc5bed69..8e2b8490 100644 --- a/frontend/app/api/projects/[projectId]/event-templates/route.ts +++ b/frontend/app/api/projects/[projectId]/event-templates/route.ts @@ -22,3 +22,22 @@ export async function POST(req: Request, { params }: { params: { projectId: stri return res } + +export async function GET(req: Request, { params }: { params: { projectId: string } }): Promise { + + const projectId = params.projectId; + + const session = await getServerSession(authOptions) + const user = session!.user + + const res = await fetcher(`/projects/${projectId}/event-templates`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${user.apiKey}` + }, + }) + + return res +} + diff --git a/frontend/app/api/projects/[projectId]/prompt-copilot/run/route.ts b/frontend/app/api/projects/[projectId]/prompt-copilot/run/route.ts deleted file mode 100644 index 4e7034c3..00000000 --- a/frontend/app/api/projects/[projectId]/prompt-copilot/run/route.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { getServerSession } from 'next-auth' -import { authOptions } from '@/lib/auth' -import { type NextRequest } from 'next/server' - - -export async function POST(req: NextRequest, { params }: { params: { projectId: string } }): Promise { - - const session = await getServerSession(authOptions) - - if (!session) { - return new Response(null, { status: 401 }) - } - - const body = await req.json() - - const res = await fetch(`${process.env.BACKEND_URL}/v1/endpoint/run`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - Authorization: `Bearer ${process.env.LAMINAR_API_KEY}` - }, - body: JSON.stringify({ - ...body, - env: { - OPENAI_API_KEY: process.env.OPENAI_API_KEY, - }, - endpoint: 'prompt_copilot', - }) - }) - - return res -} diff --git a/frontend/app/api/projects/[projectId]/trace-id-for-span/[spanId]/route.ts b/frontend/app/api/projects/[projectId]/trace-id-for-span/[spanId]/route.ts deleted file mode 100644 index 8583f82a..00000000 --- a/frontend/app/api/projects/[projectId]/trace-id-for-span/[spanId]/route.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { authOptions } from "@/lib/auth"; -import { fetcher } from "@/lib/utils"; -import { getServerSession } from "next-auth"; - -export async function GET(req: Request, { params }: { params: { projectId: string, spanId: string } }): Promise { - const projectId = params.projectId; - const spanId = params.spanId; - - const session = await getServerSession(authOptions) - const user = session!.user - - return fetcher(`/projects/${projectId}/trace-id-for-span/${spanId}`, { - method: 'GET', - headers: { - Authorization: `Bearer ${user.apiKey}` - }, - }) -} \ No newline at end of file diff --git a/frontend/app/on-sign-up/page.tsx b/frontend/app/on-sign-up/page.tsx new file mode 100644 index 00000000..1d58b81a --- /dev/null +++ b/frontend/app/on-sign-up/page.tsx @@ -0,0 +1,33 @@ +import { authOptions } from '@/lib/auth'; +import { getServerSession } from 'next-auth'; +import { redirect } from 'next/navigation'; + +import { UserContextProvider } from '@/contexts/user-context'; +import { Metadata } from 'next'; +import OnboardingHeader from '@/components/onboarding/onboarding-header'; +import CreateFirstWorkspaceAndProject from '@/components/onboarding/create-first-workspace-and-project'; + +export const metadata: Metadata = { + title: 'Create workspace and project', +} + +export default async function ProjectsPage() { + + const session = await getServerSession(authOptions); + if (!session) { + redirect('/sign-in'); + } + if (!session.user.isNewUserCreated) { + redirect('/projects'); + } + const user = session.user; + + return ( + +
+ + +
+
+ ) +} diff --git a/frontend/app/project/[projectId]/evaluations/[evaluationId]/page.tsx b/frontend/app/project/[projectId]/evaluations/[evaluationId]/page.tsx index 53f56492..c40e6b0a 100644 --- a/frontend/app/project/[projectId]/evaluations/[evaluationId]/page.tsx +++ b/frontend/app/project/[projectId]/evaluations/[evaluationId]/page.tsx @@ -3,24 +3,13 @@ import { getServerSession } from 'next-auth'; import { redirect } from 'next/navigation'; import { Metadata } from 'next'; import { fetcherJSON } from '@/lib/utils'; -import { EvaluationResultsInfo } from '@/lib/evaluation/types'; import Evaluation from '@/components/evaluation/evaluation'; -const URL_QUERY_PARAMS = { - COMPARE_EVAL_ID: 'comparedEvaluationId', -} - export const metadata: Metadata = { title: 'Evaluation results', } -export default async function EvaluationPage({ - params, - searchParams, -}: { - params: { projectId: string, evaluationId: string }, - searchParams?: { [key: string]: string | string[] | undefined }, -}) { +export default async function EvaluationPage({params}: {params: { projectId: string, evaluationId: string }}) { const session = await getServerSession(authOptions); if (!session) { @@ -29,8 +18,6 @@ export default async function EvaluationPage({ const user = session.user; - const compareEvalId = searchParams?.[URL_QUERY_PARAMS.COMPARE_EVAL_ID] as string | undefined; - const getEvaluationInfo = fetcherJSON(`/projects/${params.projectId}/evaluations/${params.evaluationId}`, { method: 'GET', headers: { @@ -38,36 +25,19 @@ export default async function EvaluationPage({ } }); - function getComparedEvaluationInfo() { - return new Promise((resolve, reject) => { - if (compareEvalId) { - fetcherJSON(`/projects/${params.projectId}/evaluations/${compareEvalId}`, { - method: 'GET', - headers: { - Authorization: `Bearer ${user.apiKey}` - } - }) - .then(resolve) - .catch(reject); - } else { - resolve(undefined); - } - }); - } - - const getEvaluations = fetcherJSON(`/projects/${params.projectId}/evaluation-infos?excludeId=${params.evaluationId}&onlyFinished=true`, { + // Expect backend to return only evaluations from the current group based on the current evaluation id + const getEvaluations = fetcherJSON(`/projects/${params.projectId}/evaluations?currentEvaluationId=${params.evaluationId}`, { method: 'GET', headers: { Authorization: `Bearer ${user.apiKey}` } }); - const [evaluationInfo, comparedEvaluationInfo, evaluations] = await Promise.all([getEvaluationInfo, getComparedEvaluationInfo(), getEvaluations]); + const [evaluationInfo, evaluations] = await Promise.all([getEvaluationInfo, getEvaluations]); return ( ); diff --git a/frontend/app/sign-in/page.tsx b/frontend/app/sign-in/page.tsx index b9e6486d..dde48aef 100644 --- a/frontend/app/sign-in/page.tsx +++ b/frontend/app/sign-in/page.tsx @@ -12,7 +12,7 @@ export default async function SignInPage({ searchParams?: { [key: string]: string | string[] | undefined } }) { const session = await getServerSession() - let callbackUrl = searchParams?.callbackUrl ?? '/projects'; + let callbackUrl = searchParams?.callbackUrl ?? '/on-sign-up'; if (Array.isArray(callbackUrl)) { callbackUrl = callbackUrl[0] } diff --git a/frontend/components/dataset/add-datapoints-dialog.tsx b/frontend/components/dataset/add-datapoints-dialog.tsx index 6643d79e..32a3c24e 100644 --- a/frontend/components/dataset/add-datapoints-dialog.tsx +++ b/frontend/components/dataset/add-datapoints-dialog.tsx @@ -25,7 +25,7 @@ export default function AddDatapointsDialog({ datasetId, onUpdate }: AddDatapoin diff --git a/frontend/components/dataset/dataset-panel.tsx b/frontend/components/dataset/dataset-panel.tsx index d5b25260..896ec4b9 100644 --- a/frontend/components/dataset/dataset-panel.tsx +++ b/frontend/components/dataset/dataset-panel.tsx @@ -1,14 +1,13 @@ import { useProjectContext } from "@/contexts/project-context"; import { ChevronsRight } from "lucide-react"; import { Skeleton } from "../ui/skeleton"; -import Ide from "../ui/ide"; import { Label } from "../ui/label"; import { ScrollArea } from "../ui/scroll-area"; import { Button } from "../ui/button"; import Mono from "../ui/mono"; import { Datapoint } from "@/lib/dataset/types"; import Formatter from "../ui/formatter"; -import { use, useEffect, useState } from "react"; +import { useEffect, useState } from "react"; import { useRouter } from "next/navigation"; interface DatasetPanelProps { @@ -17,7 +16,9 @@ interface DatasetPanelProps { onClose: () => void; } -const deepEqual = (x: Object, y: Object): boolean => { +const deepEqual = (x: Object | null, y: Object | null): boolean => { + if (x == null && y == null) return true; + if (x == null || y == null) return false; const ok = Object.keys, tx = typeof x, ty = typeof y; return x && y && tx === 'object' && tx === ty ? ( ok(x).length === ok(y).length && @@ -30,18 +31,21 @@ export default function DatasetPanel({ datasetId, datapoint, onClose }: DatasetP // datapoint is DatasetDatapoint, i.e. result of one execution on a data point const [newData, setNewData] = useState>(datapoint.data); const [newTarget, setNewTarget] = useState>(datapoint.target); + const [newMetadata, setNewMetadata] = useState | null>(datapoint.metadata); const [isValidJsonData, setIsValidJsonData] = useState(true); const [isValidJsonTarget, setIsValidJsonTarget] = useState(true); + const [isValidJsonMetadata, setIsValidJsonMetadata] = useState(true); const router = useRouter(); useEffect(() => { setNewData(datapoint.data); setNewTarget(datapoint.target); + setNewMetadata(datapoint.metadata); }, [datapoint]) return (
-
+
diff --git a/frontend/components/datasets/datasets.tsx b/frontend/components/datasets/datasets.tsx index d4ca4355..7ae906e1 100644 --- a/frontend/components/datasets/datasets.tsx +++ b/frontend/components/datasets/datasets.tsx @@ -23,6 +23,7 @@ import { DataTable } from '../ui/datatable'; import Header from '../ui/header'; import { TableCell, TableRow } from '../ui/table'; import { PaginatedResponse } from '@/lib/types'; +import Mono from '../ui/mono'; export default function Datasets() { @@ -50,6 +51,13 @@ export default function Datasets() { } const columns: ColumnDef[] = [ + { + cell: ({ row }) => { + return {row.original.id} + }, + size: 300, + header: "ID", + }, { accessorKey: "name", header: "name", diff --git a/frontend/components/evaluation/chart.tsx b/frontend/components/evaluation/chart.tsx new file mode 100644 index 00000000..dadf70a1 --- /dev/null +++ b/frontend/components/evaluation/chart.tsx @@ -0,0 +1,127 @@ +import { + ChartConfig, + ChartContainer, + ChartTooltip, + ChartTooltipContent, +} from "@/components/ui/chart" +import { useProjectContext } from "@/contexts/project-context" +import { cn, swrFetcher } from "@/lib/utils" +import { Bar, BarChart, CartesianGrid, XAxis } from "recharts" +import useSWR from "swr" +import { Skeleton } from "../ui/skeleton" +import React, { useEffect, useState } from "react" +import { usePathname, useSearchParams } from "next/navigation" + +const URL_QUERY_PARAMS = { + COMPARE_EVAL_ID: 'comparedEvaluationId', +} + +const getEvaluationIdFromPathname = (pathName: string) => { + if (pathName.endsWith('/')) { + pathName = pathName.slice(0, -1); + } + const pathParts = pathName.split('/'); + return pathParts[pathParts.length - 1]; +} + +type BucketRow = { + lowerBound: number; + upperBound: number; + heights: number[]; +} + +const getTransformedData = (data: []) => { + return data.map((row: BucketRow, index: number) => { + return { + index, + height: row.heights[0], + comparedHeight: row.heights.length > 1 ? row.heights[1] : undefined, + } + }) +} + +function renderTick(tickProps: any) { + const { x, y, payload } = tickProps; + const { value, offset } = payload; + // console.log(`x: ${x}, y: ${y}`) + // console.log(`Value: ${value}, ${typeof value}, offset: ${offset}`) + + // Value is equal to index starting from 0 + // So we calculate percentage ticks/marks by multiplying value by 10 + return ( + + + + {value * 10}% + + {value === 9 &&( + <> + + + 100% + + + )} + + ); +} + +interface ChartProps { + scoreName: string + className?: string +} + +export default function Chart({ scoreName, className }: ChartProps) { + const pathName = usePathname(); + const searchParams = new URLSearchParams(useSearchParams().toString()); + const { projectId } = useProjectContext(); + + const [evaluationId, setEvaluationId] = useState(getEvaluationIdFromPathname(pathName)); + const [comparedEvaluationId, setComparedEvaluationId] = useState(searchParams.get(URL_QUERY_PARAMS.COMPARE_EVAL_ID)); + + const { data, isLoading, error } = useSWR(`/api/projects/${projectId}/evaluation-score-distribution?evaluationIds=${evaluationId + (comparedEvaluationId ? `,${comparedEvaluationId}` : '')}&scoreName=${scoreName}`, swrFetcher); + + useEffect(() => { + setEvaluationId(getEvaluationIdFromPathname(pathName)); + }, [pathName]); + + useEffect(() => { + setComparedEvaluationId(searchParams.get(URL_QUERY_PARAMS.COMPARE_EVAL_ID)); + }, [searchParams]); + + const chartConfig = { + ["index"]: { + color: "hsl(var(--chart-1))", + }, + } satisfies ChartConfig + + return ( +
+ {/*
+ Score distribution: {scoreName} +
*/} +
+ + {(isLoading || !data || error) ? : ( + + + + } + /> + {comparedEvaluationId && } + + + )} + +
+
+ ) +} diff --git a/frontend/components/evaluation/evaluation-datapoint-error.tsx b/frontend/components/evaluation/evaluation-datapoint-error.tsx deleted file mode 100644 index 22609697..00000000 --- a/frontend/components/evaluation/evaluation-datapoint-error.tsx +++ /dev/null @@ -1,109 +0,0 @@ -import { EvaluationDatapoint, EvaluationDatapointError } from '@/lib/evaluation/types'; -import { Label } from '../ui/label'; -import { Button } from '../ui/button'; -import { useRouter } from 'next/navigation'; -import { useProjectContext } from '@/contexts/project-context'; - -// Checks if an object is empty -function isEmpty(obj: any) { - for (const prop in obj) { - if (Object.hasOwn(obj, prop)) { - return false; - } - } - - return true; -} - -interface EvaluationDatapointErrorProps { - datapoint: EvaluationDatapoint; -} - -/** - * Evaluation datapoint error component - * - * If it's related to wrong input-output matching or missing env vars, it will show the error. - * Otherwise, it will show nothing - */ -export default function EvaluationDatapointErr({ datapoint }: EvaluationDatapointErrorProps) { - const router = useRouter(); - const { projectId } = useProjectContext(); - const error = datapoint.error as EvaluationDatapointError; - - return ( - ( - (error.errorType === "GraphError" && (error.error.startsWith("Graph input is missing:"))) - || error.errorType === "InvalidSchemasError" - || error.errorType === "MissingEnvVarsError") ? - - ( -
- - -
- - {(error.errorType === "GraphError") && (error.error.startsWith("Graph input is missing:")) && (!!error.executorInputNodeNames) && ( -
-
Executor inputs do not match
-
Executor inputs
-
    - {error.executorInputNodeNames.map((name) => ( -
  • {name}
  • - ))} -
-
Incoming inputs
- {(isEmpty(datapoint.data)) ?
No data keys
: (
    - {Object.keys(datapoint.data).map((name) => ( -
  • {name} (data)
  • - ))} -
)} -
- )} - - {error.errorType === "GraphError" && (error.error.startsWith("Graph input is missing:")) && (!!error.evaluatorInputNodeNames) && ( -
-
Evaluator inputs do not match
-
Evaluator inputs
-
    - {error.evaluatorInputNodeNames.map((name) => ( -
  • {name}
  • - ))} -
-
Incoming inputs
-
    - {Object.keys(datapoint.target).map((name) => ( -
  • {name} (target)
  • - ))} - {!datapoint.executorTrace ? ( - Object.keys(datapoint.data).map((name) => ( -
  • {name} (data)
  • - )) - ) : ( - Object.keys(datapoint.executorOutput!).map((name) => ( -
  • {name} (executor output)
  • - )) - )} -
-
- )} - - {error.errorType === "InvalidSchemasError" && ( -
-
BAML schemas are invalid
-
{error.error}
-
- )} - - {error.errorType === "MissingEnvVarsError" && ( -
-
{error.error}
- -
- )} -
-
- ) : ( - <> - ) - ) -} \ No newline at end of file diff --git a/frontend/components/evaluation/evaluation-panel.tsx b/frontend/components/evaluation/evaluation-panel.tsx deleted file mode 100644 index d4ab2622..00000000 --- a/frontend/components/evaluation/evaluation-panel.tsx +++ /dev/null @@ -1,120 +0,0 @@ -import { useProjectContext } from "@/contexts/project-context"; -import { EvaluationDatapoint, EvaluationDatapointPreview, EvaluationDatapointPreviewWithCompared } from "@/lib/evaluation/types"; -import { ChevronsRight } from "lucide-react"; -import { Skeleton } from "../ui/skeleton"; -import { Label } from "../ui/label"; -import { ScrollArea } from "../ui/scroll-area"; -import { Button } from "../ui/button"; -import Mono from "../ui/mono"; -import useSWR from "swr"; -import { swrFetcher } from "@/lib/utils"; -import EvaluationDatapointErr from "./evaluation-datapoint-error"; -import React, { useEffect } from "react"; -import { RunTrace } from "@/lib/traces/types"; -import { Separator } from "../ui/separator"; -import Formatter from "../ui/formatter"; - -interface EvaluationPanelProps { - datapointPreview: EvaluationDatapointPreviewWithCompared; - onClose: () => void; -} - -function EvalDatapointView({ datapoint, executorTrace, evaluatorTrace }: { datapoint: EvaluationDatapoint, executorTrace: RunTrace | null, evaluatorTrace: RunTrace | null }) { - - return ( - -
-
- - - - - - - - - { - !!datapoint.error && ( - - ) - } -
-
-
- ) -} - -export default function EvaluationPanel({ datapointPreview, onClose }: EvaluationPanelProps) { - const { projectId } = useProjectContext(); - // datapoint is EvaluationDatapoint, i.e. result of one execution on a data point - const { data: datapoint }: { data: EvaluationDatapoint } = useSWR(`/api/projects/${projectId}/evaluations/${datapointPreview.evaluationId}/datapoints/${datapointPreview.id}`, swrFetcher); - const { data: comparedDatapoint }: { data: EvaluationDatapoint } = useSWR(`/api/projects/${projectId}/evaluations/${datapointPreview.comparedEvaluationId}/datapoints/${datapointPreview.comparedId}`, swrFetcher); - - const [executorTrace, setExecutorTrace] = React.useState(null); - const [evaluatorTrace, setEvaluatorTrace] = React.useState(null); - const [comparedExecutorTrace, setComparedExecutorTrace] = React.useState(null); - const [comparedEvaluatorTrace, setComparedEvaluatorTrace] = React.useState(null); - - // useEffect(() => { - // if (datapoint) { - // fetch(`/api/projects/${projectId}/traces/trace/${datapoint.executorTrace?.runId}`).then((res) => res.json()).then((data) => { - // setExecutorTrace(data); - // }); - // fetch(`/api/projects/${projectId}/traces/trace/${datapoint.evaluatorTrace?.runId}`).then((res) => res.json()).then((data) => { - // setEvaluatorTrace(data); - // }); - // } - // }, [datapoint]); - - // useEffect(() => { - // if (comparedDatapoint) { - // fetch(`/api/projects/${projectId}/traces/trace/${comparedDatapoint.executorTrace?.runId}`).then((res) => res.json()).then((data) => { - // setComparedExecutorTrace(data); - // }); - // fetch(`/api/projects/${projectId}/traces/trace/${comparedDatapoint.evaluatorTrace?.runId}`).then((res) => res.json()).then((data) => { - // setComparedEvaluatorTrace(data); - // }); - // } - // }, [comparedDatapoint]); - - return (
-
- -
- Run -
- - {datapointPreview.id} - -
-
- {datapoint && - - } - - { - !datapoint && ( -
- - - -
- ) - } - - {comparedDatapoint && } - - {comparedDatapoint && ( - - )} - -
-
- ) -} \ No newline at end of file diff --git a/frontend/components/evaluation/evaluation-stats.tsx b/frontend/components/evaluation/evaluation-stats.tsx deleted file mode 100644 index 44de4b24..00000000 --- a/frontend/components/evaluation/evaluation-stats.tsx +++ /dev/null @@ -1,77 +0,0 @@ -import { useProjectContext } from "@/contexts/project-context"; -import { EvaluationStats as EvaluationStatsType } from "@/lib/evaluation/types"; -import { cn, swrFetcher } from "@/lib/utils"; -import useSWR from "swr"; -import { Button } from "../ui/button"; -import { ArrowRight, RefreshCw } from "lucide-react"; - -const Diff = ({ originalVal, comparedVal, decimalPlaces }: { originalVal: number, comparedVal: number, decimalPlaces: number }) => { - const diff = originalVal - comparedVal; - return ( -
= 0 ? "text-green-500" : "text-red-500"))}> - {diff >= 0 ? "+" : ""}{decimalPlaces !== 0 ? diff.toFixed(decimalPlaces) : diff} -
- ) -} - -interface EvaluationStatsProps { - evaluationId: string; - comparedEvaluationId?: string; -} - -export default function EvaluationStats({ - evaluationId, - comparedEvaluationId, -}: EvaluationStatsProps) { - const { projectId } = useProjectContext(); - - const { data, error, mutate, isLoading } = useSWR(`/api/projects/${projectId}/evaluations/${evaluationId}/stats`, swrFetcher); - const { data: comparedData, error: comparedError, mutate: comparedMutate, isLoading: comparedIsLoading } = useSWR(comparedEvaluationId ? `/api/projects/${projectId}/evaluations/${comparedEvaluationId}/stats` : null, swrFetcher); - const evaluationStats = data as EvaluationStatsType; - const comparedStats = comparedData as EvaluationStatsType; - - if (error || comparedError) return
Error fetching stats. Please try again
; - - return ( -
-
-

Mean scores

- -
-
- {Object.entries(evaluationStats ?? {}).map(([scoreName, score], index) => ( -
-
-
- {scoreName} -
- {comparedEvaluationId && evaluationStats && comparedStats && comparedStats[scoreName] !== undefined && - - - } -
-
- {comparedStats && ( - <> -
- {comparedStats[scoreName]?.toFixed(2)} -
- - - )} -
- {evaluationStats ? score.toFixed(2) : '-'} -
-
-
- ))} -
-
- ) -} diff --git a/frontend/components/evaluation/evaluation.tsx b/frontend/components/evaluation/evaluation.tsx index 5c2d353f..830ca70a 100644 --- a/frontend/components/evaluation/evaluation.tsx +++ b/frontend/components/evaluation/evaluation.tsx @@ -1,13 +1,9 @@ 'use client'; -import { Evaluation as EvaluationType, EvaluationDatapointPreview, EvaluationDatapointPreviewWithCompared, EvaluationResultsInfo } from "@/lib/evaluation/types"; +import { Evaluation as EvaluationType, EvaluationDatapointPreviewWithCompared, EvaluationResultsInfo } from "@/lib/evaluation/types"; import { ColumnDef } from "@tanstack/react-table"; -import { useEffect, useMemo, useState } from "react"; +import { useEffect, useState } from "react"; import { DataTable } from "../ui/datatable"; -import { useUserContext } from "@/contexts/user-context"; -import { createClient } from "@supabase/supabase-js"; -import { SUPABASE_ANON_KEY, SUPABASE_URL, USE_REALTIME } from "@/lib/const"; -import EvaluationStats from "./evaluation-stats"; import { useProjectContext } from "@/contexts/project-context"; import Header from "../ui/header"; import { usePathname, useRouter, useSearchParams } from "next/navigation"; @@ -15,10 +11,10 @@ import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from ". import { mergeOriginalWithComparedDatapoints } from "@/lib/evaluation/utils"; import { ArrowRight } from "lucide-react"; import { Button } from "../ui/button"; -import { cn } from "@/lib/utils"; -import ClientTimestampFormatter from "../client-timestamp-formatter"; import { Resizable } from "re-resizable"; import TraceView from "../traces/trace-view"; +import Chart from "./chart"; +import ScoreCard from "./score-card"; const URL_QUERY_PARAMS = { COMPARE_EVAL_ID: 'comparedEvaluationId', @@ -26,13 +22,11 @@ const URL_QUERY_PARAMS = { interface EvaluationProps { evaluationInfo: EvaluationResultsInfo; - comparedEvaluationInfo?: EvaluationResultsInfo; evaluations: EvaluationType[]; } export default function Evaluation({ evaluationInfo, - comparedEvaluationInfo, evaluations, }: EvaluationProps) { const router = useRouter(); @@ -41,252 +35,230 @@ export default function Evaluation({ const { projectId } = useProjectContext(); - const [evaluation, setEvaluation] = useState(evaluationInfo.evaluation); - const [comparedEvaluation, setComparedEvaluation] = useState(comparedEvaluationInfo?.evaluation); + const evaluation = evaluationInfo.evaluation; + const [comparedEvaluation, setComparedEvaluation] = useState(null); + + useEffect(() => { + const comparedEvaluationId = searchParams.get(URL_QUERY_PARAMS.COMPARE_EVAL_ID); + handleComparedEvaluationChange(comparedEvaluationId ?? null); + }, []) let defaultResults = evaluationInfo.results as EvaluationDatapointPreviewWithCompared[]; + const [results, setResults] = useState(defaultResults); let scoreColumns = new Set(); - for (const row of evaluationInfo.results) { + for (const row of defaultResults) { for (const key of Object.keys(row.scores ?? {})) { scoreColumns.add(key); } } - if (comparedEvaluationInfo) { - defaultResults = mergeOriginalWithComparedDatapoints(evaluationInfo.results, comparedEvaluationInfo.results); - for (const row of comparedEvaluationInfo?.results ?? []) { - for (const key of Object.keys(row.scores ?? {})) { - scoreColumns.add(key); - } - } - } - const [results, setResults] = useState(defaultResults); + // This is ok to search for selected datapoint among defaultResults before we have pagination const [selectedDatapoint, setSelectedDatapoint] = useState(defaultResults.find((result) => result.id === searchParams.get('datapointId')) ?? null); + + // Selected score name must usually not be undefined, as we expect to have at least one score, it's done just to not throw error if there are no scores + const [selectedScoreName, setSelectedScoreName] = useState(scoreColumns.size > 0 ? Array.from(scoreColumns)[0] : undefined); - let columns: ColumnDef[] = [] - - - if (comparedEvaluation) { - columns = [ - { - accessorKey: "status", - header: "Status", - }, - { - accessorFn: (row) => JSON.stringify(row.data), - header: "Data", - }, - { - accessorFn: (row) => row.target ? JSON.stringify(row.target) : "-", - header: "Target", - }, - ]; - columns = columns.concat(Array.from(scoreColumns).map((scoreColumn) => ({ - header: scoreColumn, - cell: (row) => { - return
-
{row.row.original.comparedScores?.[scoreColumn] ?? "-"}
- -
{row.row.original.scores?.[scoreColumn] ?? "-"}
-
- }, - }))); - - } else { - columns = [ - { - accessorKey: "status", - header: "Status", - }, - { - accessorFn: (row) => JSON.stringify(row.data), - header: "Data", - }, - { - accessorFn: (row) => row.target ? JSON.stringify(row.target) : "-", - header: "Target", - }, - { - accessorFn: (row) => row.executorOutput ? JSON.stringify(row.executorOutput) : "-", - header: "Output", - }, - ]; - columns = columns.concat(Array.from(scoreColumns).map((scoreColumn) => ({ + // Columns used when there is no compared evaluation + let defaultColumns: ColumnDef[] = [ + { + accessorFn: (row) => JSON.stringify(row.data), + header: "Data", + }, + { + accessorFn: (row) => row.target ? JSON.stringify(row.target) : "-", + header: "Target", + }, + { + accessorFn: (row) => row.executorOutput ? JSON.stringify(row.executorOutput) : "-", + header: "Output", + }, + ]; + defaultColumns = defaultColumns.concat(Array.from(scoreColumns).map((scoreColumn) => ({ header: scoreColumn, accessorFn: (row) => row.scores?.[scoreColumn] ?? "-", size: 150, }))); - } - const { supabaseAccessToken } = useUserContext() - const supabase = useMemo(() => { - return USE_REALTIME - ? createClient( - SUPABASE_URL, - SUPABASE_ANON_KEY, - { - global: { - headers: { - Authorization: `Bearer ${supabaseAccessToken}`, - }, - }, - } - ) - : null - }, []) - - supabase?.realtime.setAuth(supabaseAccessToken) - - useEffect(() => { - if (evaluation.status !== 'Finished') { - supabase - ?.channel('table-db-changes') - .on( - 'postgres_changes', - { - event: 'INSERT', - schema: 'public', - table: 'evaluation_results', - filter: `evaluation_id=eq.${evaluation.id}` - }, - (payload) => { - if (payload.eventType === 'INSERT') { - const camelCasePayload = Object.keys(payload.new).reduce((acc: Record, key) => { - const camelCaseKey = key.replace(/_([a-z])/g, (_match, letter) => letter.toUpperCase()); - acc[camelCaseKey] = payload.new[key]; - return acc; - }, {}); - - // mutate(key => key === `/api/projects/${projectId}/evaluations/${evaluation.id}/stats`); - setResults((prev) => [...prev, camelCasePayload as EvaluationDatapointPreview]); - } - } - ) - .on( - 'postgres_changes', - { - event: 'UPDATE', - schema: 'public', - table: 'evaluations', - filter: `id=eq.${evaluation.id}` - }, - (payload) => { - if (payload.eventType === 'UPDATE') { - setEvaluation((prev) => ({ ...prev, status: payload.new.status })); - } - } - ) - .subscribe() - } - - // remove all channels on unmount - return () => { - supabase?.removeAllChannels() - } - }, []) + const [columns, setColumns] = useState(defaultColumns); const handleRowClick = (row: EvaluationDatapointPreviewWithCompared) => { setSelectedDatapoint(row); searchParams.set('datapointId', row.id); + router.push(`${pathName}?${searchParams.toString()}`); } - const handleComparedEvaluationChange = (comparedEvaluationId: string) => { + const handleComparedEvaluationChange = (comparedEvaluationId: string | null) => { + if (comparedEvaluationId === undefined) { + console.warn('comparedEvaluationId is undefined'); + return; + } + + if (comparedEvaluationId === null) { + setComparedEvaluation(null); + setResults(evaluationInfo.results); + setColumns(defaultColumns); + searchParams.delete(URL_QUERY_PARAMS.COMPARE_EVAL_ID); + router.push(`${pathName}?${searchParams.toString()}`); + return; + } + fetch(`/api/projects/${projectId}/evaluations/${comparedEvaluationId}`) .then(res => res.json()) .then((comparedEvaluation) => { setComparedEvaluation(comparedEvaluation.evaluation); // evaluationInfo.results are always fixed, but the compared results (comparedEvaluation.results) change setResults(mergeOriginalWithComparedDatapoints(evaluationInfo.results, comparedEvaluation.results)); + let columnsWithCompared: ColumnDef[] = [ + { + accessorFn: (row) => JSON.stringify(row.data), + header: "Data", + }, + { + accessorFn: (row) => row.target ? JSON.stringify(row.target) : "-", + header: "Target", + }, + ]; + columnsWithCompared = columnsWithCompared.concat(Array.from(scoreColumns).map((scoreColumn) => ({ + header: scoreColumn, + cell: (row) => { + return
+
{row.row.original.comparedScores?.[scoreColumn] ?? "-"}
+ +
{row.row.original.scores?.[scoreColumn] ?? "-"}
+
+ }, + }))); + setColumns(columnsWithCompared); }) - searchParams.set(URL_QUERY_PARAMS.COMPARE_EVAL_ID, comparedEvaluationId); router.push(`${pathName}?${searchParams.toString()}`); } + // It will reload the page + const handleEvaluationChange = (evaluationId: string) => { + // change last part of pathname + const currentPathName = pathName.endsWith('/') ? pathName.slice(0, -1) : pathName; + const pathParts = currentPathName.split('/'); + pathParts[pathParts.length - 1] = evaluationId; + router.push(`${pathParts.join('/')}?${searchParams.toString()}`); + } + return ( -
+
+
+
+ +
+
+
+ +
+
+ {!!comparedEvaluation && ( + + )} +
+
+ +
+
-
-
- -
-
-

- {evaluation.name} -

-
- {!!comparedEvaluation && ( - - )} -
-
-
-
- row.id} - focusedRowId={selectedDatapoint?.id} - paginated - onRowClick={(row) => handleRowClick(row.original)} - /> -
-
- {!selectedDatapoint && - - } + {selectedScoreName && ( +
+
+ +
+
+ { + + } +
+ )} +
+ row.id} + focusedRowId={selectedDatapoint?.id} + paginated + onRowClick={(row) => handleRowClick(row.original)} + />
- {selectedDatapoint &&
); -} +} \ No newline at end of file diff --git a/frontend/components/evaluation/score-card.tsx b/frontend/components/evaluation/score-card.tsx new file mode 100644 index 00000000..390534a1 --- /dev/null +++ b/frontend/components/evaluation/score-card.tsx @@ -0,0 +1,79 @@ +import { useProjectContext } from "@/contexts/project-context"; +import { swrFetcher } from "@/lib/utils"; +import { usePathname, useSearchParams } from "next/navigation"; +import { useEffect, useState } from "react"; +import useSWR from "swr"; +import { Skeleton } from "../ui/skeleton"; +import { ArrowRight } from "lucide-react"; + +const URL_QUERY_PARAMS = { + COMPARE_EVAL_ID: 'comparedEvaluationId', +} + +const getEvaluationIdFromPathname = (pathName: string) => { + if (pathName.endsWith('/')) { + pathName = pathName.slice(0, -1); + } + const pathParts = pathName.split('/'); + return pathParts[pathParts.length - 1]; +} + +interface ScoreCardProps { + scoreName: string; +} + +export default function ScoreCard({scoreName}: ScoreCardProps) { + const pathName = usePathname(); + const searchParams = new URLSearchParams(useSearchParams().toString()); + const { projectId } = useProjectContext(); + + const [evaluationId, setEvaluationId] = useState(getEvaluationIdFromPathname(pathName)); + const [comparedEvaluationId, setComparedEvaluationId] = useState(searchParams.get(URL_QUERY_PARAMS.COMPARE_EVAL_ID)); + + const { data, isLoading, error } = useSWR(`/api/projects/${projectId}/evaluation-score-stats?evaluationId=${evaluationId}&scoreName=${scoreName}`, swrFetcher); + const { data: comparedData, isLoading: isComparedLoading, error: isComparedError } = useSWR( + comparedEvaluationId + ? `/api/projects/${projectId}/evaluation-score-stats?evaluationId=${comparedEvaluationId}&scoreName=${scoreName}` + : null, + swrFetcher + ); + + useEffect(() => { + setEvaluationId(getEvaluationIdFromPathname(pathName)); + }, [pathName]); + + useEffect(() => { + setComparedEvaluationId(searchParams.get(URL_QUERY_PARAMS.COMPARE_EVAL_ID)); + }, [searchParams]); + + return ( +
+ {(isLoading || !data || error) ? : ( +
+

{scoreName}

+
+
Average
+
+ {!isComparedLoading && comparedData && !isComparedError && comparedData.averageValue != null && ( +
+
{comparedData.averageValue?.toFixed(2)}
+ +
+ )} +
{data.averageValue?.toFixed(2)}
+
+ {!isComparedLoading && comparedData && !isComparedError && comparedData.averageValue != null && ( +
= comparedData.averageValue ? 'text-green-400' : 'text-red-400')}`}> + {data.averageValue >= comparedData.averageValue ? 'â–²' : 'â–¼'} + {Math.abs(data.averageValue - comparedData.averageValue).toFixed(2)} + {comparedData.averageValue !== 0 && ( + ({((data.averageValue - comparedData.averageValue) / comparedData.averageValue * 100).toFixed(2)}%) + )} +
+ )} +
+
+ )} +
+ ) +} diff --git a/frontend/components/evaluations/evaluations.tsx b/frontend/components/evaluations/evaluations.tsx index 7fce1eee..a70bed6c 100644 --- a/frontend/components/evaluations/evaluations.tsx +++ b/frontend/components/evaluations/evaluations.tsx @@ -3,13 +3,13 @@ import { useProjectContext } from "@/contexts/project-context"; import { Evaluation } from "@/lib/evaluation/types"; import { ColumnDef } from "@tanstack/react-table"; -import CreateEvaluationDialog from "./create-evaluation-dialog"; import ClientTimestampFormatter from "../client-timestamp-formatter"; import { useRouter } from "next/navigation"; import { DataTable } from "../ui/datatable"; import Mono from "../ui/mono"; import Header from "../ui/header"; import EvalsPagePlaceholder from "./page-placeholder"; +import { useUserContext } from "@/contexts/user-context"; export interface EvaluationProps { evaluations: Evaluation[]; @@ -19,10 +19,12 @@ export default function Evaluations({ evaluations }: EvaluationProps) { const { projectId } = useProjectContext(); const router = useRouter(); + const { email } = useUserContext(); + const columns: ColumnDef[] = [ { - accessorKey: "status", - header: "Status", + accessorKey: "groupId", + header: "Group id", size: 120 }, { diff --git a/frontend/components/evaluations/page-placeholder.tsx b/frontend/components/evaluations/page-placeholder.tsx index b35e8ed3..36f5a775 100644 --- a/frontend/components/evaluations/page-placeholder.tsx +++ b/frontend/components/evaluations/page-placeholder.tsx @@ -1,11 +1,32 @@ -import { Card } from "../ui/card"; +import { useState } from "react"; import Code from "../ui/code"; - +import { Tabs, TabsContent, TabsList, TabsTrigger } from "../ui/tabs"; +import { PYTHON_INSTALL, TYPESCRIPT_INSTALL } from "@/lib/const"; +import { useProjectContext } from "@/contexts/project-context"; export default function EvalsPagePlaceholder() { - const tsString = `import { evaluate } from '@lmnr-ai/lmnr'; + const { projectId } = useProjectContext(); + const [tabValue, setTabValue] = useState('python'); + + const pythonEval = `from lmnr import evaluate + +evaluate( + data=[ + { + "data": {"country": "Canada", "capital": "Ottawa"}, + "target": {"capital": "Ottawa"} + } + ], + executor=lambda data: data["capital"], + evaluators={ + "is_correct": lambda output, target: int(output == target["capital"]) + }, + group_id="my_first_feature", + project_api_key='' +)` + const tsEval = `import { evaluate } from '@lmnr-ai/lmnr'; -evaluate( 'my-evaluation', { +evaluate({ data: [ { data: { country: 'Canada', capital: 'Ottawa' }, @@ -16,24 +37,73 @@ evaluate( 'my-evaluation', { evaluators: [ (output, target) => output === target.capital ], + groupId: 'my_first_feature', + config: { + projectApiKey: '' + } }) ` - return (
-
-

+
+

Evaluations

+

You don{"'"}t have any evaluations in this project yet. To run an evaluation you can start by following the example below. - Read the docs. -

-
- -
+

+

Install Laminar

+ + + Python + Typescript + +
+ + + + + + +
+
+

Generate API key

+

+ Go to + settings page + to generate an API key and use it in your code. +

+

Run your first evaluation

+ + + Python + Typescript + +
+ + + + + + +
+
+

+ Read the docs + to learn more. +

+

Run your app

+

+ Run your Python or Typescript app. Refresh the page to see evaluations. +

+

Cannot run evaluations?

+

+ Message us + and we{"'"}ll be happy to help. +

); -} \ No newline at end of file +} diff --git a/frontend/components/event/event-view.tsx b/frontend/components/event/event-view.tsx index 53366d7d..39b3525b 100644 --- a/frontend/components/event/event-view.tsx +++ b/frontend/components/event/event-view.tsx @@ -9,6 +9,8 @@ import { useProjectContext } from "@/contexts/project-context"; import useSWR from "swr"; import { convertToLocalTimeWithMillis, swrFetcher } from "@/lib/utils"; import { useRouter } from "next/navigation"; +import { Span } from "@/lib/traces/types"; +import { trace } from "console"; interface EventViewProps { onClose: () => void; @@ -20,10 +22,11 @@ export default function EventView({ event, }: EventViewProps) { const { projectId } = useProjectContext(); - const { data: traceId, isLoading } = useSWR( - `/api/projects/${projectId}/trace-id-for-span/${event.spanId}`, + const { data: span, isLoading } = useSWR( + `/api/projects/${projectId}/spans/${event.spanId}`, swrFetcher - ) as { data: string, isLoading: boolean }; + ); + const traceId = span?.traceId; const router = useRouter(); return ( @@ -58,7 +61,7 @@ export default function EventView({ const timestamp = new Date(event.timestamp); const startTime = new Date(timestamp.getTime() - 60_000).toISOString(); const endTime = new Date(timestamp.getTime() + 60_000).toISOString(); - router.push(`/project/${projectId}/traces?selectedId=${traceId}&startDate=${startTime}&endDate=${endTime}`); + router.push(`/project/${projectId}/traces?traceId=${traceId}&startDate=${startTime}&endDate=${endTime}&spanId=${event.spanId}`); }} disabled={!traceId || isLoading} diff --git a/frontend/components/onboarding/create-first-workspace-and-project.tsx b/frontend/components/onboarding/create-first-workspace-and-project.tsx new file mode 100644 index 00000000..f293aa1b --- /dev/null +++ b/frontend/components/onboarding/create-first-workspace-and-project.tsx @@ -0,0 +1,79 @@ +'use client'; + +import React, { useState } from 'react'; +import { Input } from '@/components/ui/input'; +import { Button } from '@/components/ui/button'; +import { useRouter } from 'next/navigation'; +import { Label } from '../ui/label'; +import { WorkspaceWithProjects } from '@/lib/workspaces/types'; +import { Loader } from 'lucide-react'; + +interface CreateFirstWorkspaceAndProjectProps { + name?: string | null; +} + +// TODO: Pass user's name, so that we can pre-fill the workspace name with "{user's name} workspace" +export default function CreateFirstWorkspaceAndProject({ name }: CreateFirstWorkspaceAndProjectProps) { + const [workspaceName, setWorkspaceName] = useState(name ? `${name}'s workspace` : ''); + const [projectName, setProjectName] = useState(''); + const [isLoading, setIsLoading] = useState(false); + + const router = useRouter(); + + const handleButtonClick = async () => { + setIsLoading(true); + + const res = await fetch('/api/workspaces', { + method: 'POST', + body: JSON.stringify({ + name: workspaceName, + projectName + }) + }); + + const newWorkspace = await res.json() as WorkspaceWithProjects; + + setIsLoading(false); + + // As we want user to start from traces page, redirect to it + // Expect the workspace to contain exactly one created project + router.push(`/project/${newWorkspace.projects[0].id}/traces`) + }; + + return ( +
+
+

Create workspace and first project

+
+ + setWorkspaceName(e.target.value)} + /> +
+
+ + setProjectName(e.target.value)} + /> +
+
+ +
+
+
+ ); +} diff --git a/frontend/components/onboarding/onboarding-header.tsx b/frontend/components/onboarding/onboarding-header.tsx new file mode 100644 index 00000000..a9778694 --- /dev/null +++ b/frontend/components/onboarding/onboarding-header.tsx @@ -0,0 +1,20 @@ +import Link from 'next/link'; +import Image from 'next/image'; +import icon from '@/assets/logo/icon_light.svg'; +import { ChevronRight } from 'lucide-react'; + +interface OnboardingHeaderProps {} + +export default function OnboardingHeader({}: OnboardingHeaderProps) { + return ( +
+ + Laminar AI icon + + +
+ Create workspace and project +
+
+ ); +} diff --git a/frontend/components/pipeline/nodes/llm.tsx b/frontend/components/pipeline/nodes/llm.tsx index 7793d9c8..a840011d 100644 --- a/frontend/components/pipeline/nodes/llm.tsx +++ b/frontend/components/pipeline/nodes/llm.tsx @@ -42,9 +42,7 @@ export default function LLM({ className='w-full nowheel nodrag' value={data.prompt} defaultInputs={defaultInputs} - disabled={isPromptDisabled} onUpdate={(value, inputs, edgeIdsToRemove) => { - updateNodeData(data.id, { dynamicInputs: inputs, prompt: value @@ -101,7 +99,11 @@ export default function LLM({ } as LLMNode) setSelectedModelId(model.id); }} /> - + {isPromptDisabled && ( + + )} )} diff --git a/frontend/components/pipeline/pipeline-history.tsx b/frontend/components/pipeline/pipeline-history.tsx index 50141699..f6ac519d 100644 --- a/frontend/components/pipeline/pipeline-history.tsx +++ b/frontend/components/pipeline/pipeline-history.tsx @@ -7,7 +7,6 @@ import { useProjectContext } from "@/contexts/project-context"; import { swrFetcher } from "@/lib/utils"; import { PipelineVersion } from "@/lib/pipeline/types"; import { use, useEffect, useState } from "react"; -import SpanCards from "../traces/span-cards"; import { ChevronsRight } from "lucide-react"; import StatusLabel from "../ui/status-label"; import { ResizableHandle, ResizablePanel, ResizablePanelGroup } from "../ui/resizable"; diff --git a/frontend/components/profile/subscription-tier-card.tsx b/frontend/components/profile/subscription-tier-card.tsx deleted file mode 100644 index 1e3b87a5..00000000 --- a/frontend/components/profile/subscription-tier-card.tsx +++ /dev/null @@ -1,28 +0,0 @@ -import { Label } from "../ui/label"; -import { Button } from "../ui/button"; -import Link from "next/link"; -import { formatTimestamp } from "@/lib/utils"; - -export default function SubscriptionTierCard() { - return ( -
-
Subscription
- {/* */} - {/* */} - {/* {stats.planName.toLowerCase().trim() === 'free' && ( -
- - - -
- )} - {stats.planName.toLowerCase().trim() === 'pro' && ( -
- - - -
- )} */} -
- ) -} diff --git a/frontend/components/profile/usage.tsx b/frontend/components/profile/usage.tsx deleted file mode 100644 index d71519f7..00000000 --- a/frontend/components/profile/usage.tsx +++ /dev/null @@ -1,39 +0,0 @@ -'use client'; - -import { useUserContext } from "@/contexts/user-context"; -import { Label } from "../ui/label"; -import useSWR from "swr"; -import SubscriptionTierCard from "./subscription-tier-card"; -import { swrFetcher } from "@/lib/utils"; -import { Skeleton } from "../ui/skeleton"; -import UserUsage from "./user-usage"; -import WorkspaceCards from "./workspace-cards"; -import { Workspace } from "@/lib/workspaces/types"; - -export default function Usage() { - const user = useUserContext(); - - const { - data: ownedWorkspaces, - isLoading: isWorkspacesLoading, - error: workspacesError - } = useSWR('/api/workspaces?accessLevel=owner', swrFetcher); - - return ( - (isWorkspacesLoading || workspacesError || !ownedWorkspaces) - ? () - : ( -
-
-
- - -
- - {/* */} -
- -
- ) - ) -} diff --git a/frontend/components/profile/user-usage.tsx b/frontend/components/profile/user-usage.tsx deleted file mode 100644 index b6e25420..00000000 --- a/frontend/components/profile/user-usage.tsx +++ /dev/null @@ -1,71 +0,0 @@ -import { Progress } from "../ui/progress"; -import { swrFetcher } from "@/lib/utils"; -import useSWR from "swr"; - -// interface UserUsageProps { -// stats: UserStats; -// } - -export default function UserUsage({ }) { - // const { data: storageStats }: { data: StorageStats } = useSWR('/api/limits/user/storage', swrFetcher); - - // const storageMiB = storageStats?.storageMib ?? 0; - // const storageMiBLimit = stats?.storageLimit; - - // const spansThisMonth = stats.spansThisMonth; - // const spansLimit = stats.spansLimit; - // const eventsThisMonth = stats.eventsThisMonth; - // const eventsLimit = stats.eventsLimit; - - // return ( - //
- //
- //
Spans
- //
- //
{spansThisMonth} / {spansLimit}
- //
- // - //
- //
- //
Events
- //
- //
{eventsThisMonth} / {eventsLimit}
- //
- // - //
- //
- //
Storage
- //
- //
{storageMiB.toFixed(2)} MB / {storageMiBLimit} MB
- //
- // - //
- // {(stats.spansOverLimit > 0) && ( - //
- //
Additional spans usage
- //
- //
- // {stats.spansOverLimit} @ ${stats.spansOverLimitCost} - //
- //
- //
- // )} - // {(stats.eventsOverLimit > 0) && ( - //
- //
Additional events usage
- //
- //
- // {stats.eventsOverLimit} @ ${stats.eventsOverLimitCost} - //
- //
- //
- // )} - //
- // ) -} diff --git a/frontend/components/profile/workspace-cards.tsx b/frontend/components/profile/workspace-cards.tsx deleted file mode 100644 index b6c0ad7a..00000000 --- a/frontend/components/profile/workspace-cards.tsx +++ /dev/null @@ -1,15 +0,0 @@ -import { Workspace } from "@/lib/workspaces/types"; - -interface WorkspaceCardsProps { - workspaces: Workspace[]; -} - -export default function WorkspaceCards({ workspaces, }: WorkspaceCardsProps) { - return ( -
-
Workspaces Usage
-
-
-
- ) -} \ No newline at end of file diff --git a/frontend/components/project/project-navbar-collapsed.tsx b/frontend/components/project/project-navbar-collapsed.tsx index a133739c..e7e75b07 100644 --- a/frontend/components/project/project-navbar-collapsed.tsx +++ b/frontend/components/project/project-navbar-collapsed.tsx @@ -59,12 +59,6 @@ export default function ProjectNavbarCollapsed({ projectId }: ProjectNavBarProps icon: Database, current: false }, - { - name: 'evaluations', - href: `/project/${projectId}/evaluations`, - icon: Gauge, - current: false - }, // { // name: 'env variables', // href: `/project/${projectId}/env`, diff --git a/frontend/components/projects/workspace-create-dialog.tsx b/frontend/components/projects/workspace-create-dialog.tsx index f519fded..d86194df 100644 --- a/frontend/components/projects/workspace-create-dialog.tsx +++ b/frontend/components/projects/workspace-create-dialog.tsx @@ -1,4 +1,4 @@ -import { Workspace } from "@/lib/workspaces/types" +import { WorkspaceWithProjects } from "@/lib/workspaces/types" import { useRouter } from "next/navigation" import { useState } from "react" import { @@ -33,7 +33,7 @@ export default function WorkspaceCreateDialog({ onWorkspaceCreate }: WorkspaceCr }) }); - const newWorkspace = await res.json() as Workspace; + const newWorkspace = await res.json() as WorkspaceWithProjects; onWorkspaceCreate?.(); router.push(`/workspace/${newWorkspace.id}`); diff --git a/frontend/components/traces/add-label-popover.tsx b/frontend/components/traces/add-label-popover.tsx index 03155a77..854c1684 100644 --- a/frontend/components/traces/add-label-popover.tsx +++ b/frontend/components/traces/add-label-popover.tsx @@ -2,13 +2,14 @@ import { LabelClass, LabelType, SpanLabel } from "@/lib/traces/types"; import { cn, swrFetcher } from "@/lib/utils"; import { useState } from "react"; import useSWR from "swr"; -import { Plus, X } from "lucide-react"; +import { Plus, Tag, X } from "lucide-react"; import { Button } from "../ui/button"; import { Popover, PopoverContent, PopoverTrigger } from "../ui/popover"; import { useProjectContext } from "@/contexts/project-context"; import { Table, TableBody, TableCell, TableRow } from "../ui/table"; import { AddLabel } from "./add-label"; import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "../ui/select"; +import { useUserContext } from "@/contexts/user-context"; interface AddLabelPopoverProps { spanId: string; @@ -24,8 +25,9 @@ export function AddLabelPopover({ const { data: labels, mutate: mutateLabels } = useSWR(`/api/projects/${projectId}/spans/${spanId}/labels`, swrFetcher); const [mode, setMode] = useState<'add' | 'list'>('list'); + const { email } = useUserContext(); + const addLabel = async (value: string, labelClass: LabelClass) => { - console.log(value, labelClass.valueMap) const response = await fetch(`/api/projects/${projectId}/spans/${spanId}/labels`, { method: 'POST', headers: { @@ -67,10 +69,13 @@ export function AddLabelPopover({ return labelClass.valueMap[index] } + const findLabel = (labelClassId: string): SpanLabel | undefined => + labels?.find(label => label.classId === labelClassId && label.userEmail === email) + return ( - +
@@ -93,7 +98,7 @@ export function AddLabelPopover({ {labelClasses?.map(labelClass => -
label.classId === labelClass.id) ? 'text-white' : '')}> +

{labelClass.name}

@@ -102,15 +107,14 @@ export function AddLabelPopover({ {labelClass.labelType === LabelType.BOOLEAN && label.classId === labelClass.id)?.value as number, labelClass) || undefined} + value={indexToValue(findLabel(labelClass.id)?.value as number, labelClass) || undefined} onChange={value => { - console.log(value) addLabel(value, labelClass) }} /> } {labelClass.labelType === LabelType.CATEGORICAL && label.classId === labelClass.id)?.value as number, labelClass) || undefined} + value={indexToValue(findLabel(labelClass.id)?.value as number, labelClass) || undefined} values={labelClass.valueMap} onChange={(value) => { addLabel(value, labelClass) @@ -125,7 +129,7 @@ export function AddLabelPopover({ variant="ghost" size="icon" onClick={() => { - removeLabel(labels?.find(label => label.classId === labelClass.id)?.id as string) + removeLabel(findLabel(labelClass.id)?.id as string) }}> @@ -154,7 +158,6 @@ export function AddLabelPopover({ function LabelBooleanInput({ value, onChange }: { value: string | undefined, onChange: (value: string) => void }) { - console.log(value) return (
diff --git a/frontend/components/traces/add-label.tsx b/frontend/components/traces/add-label.tsx index 06af91c5..c4d1f086 100644 --- a/frontend/components/traces/add-label.tsx +++ b/frontend/components/traces/add-label.tsx @@ -10,6 +10,7 @@ import { Button } from "../ui/button"; import { Popover, PopoverContent, PopoverTrigger } from "../ui/popover"; import { useProjectContext } from "@/contexts/project-context"; import { Table, TableBody, TableCell, TableRow } from "../ui/table"; +import DefaultTextarea from "../ui/default-textarea"; interface AddLabelProps { spanId: string; @@ -28,6 +29,7 @@ export function AddLabel({ const [value, setValue] = useState(''); const [isSaving, setIsSaving] = useState(false); const [valueMap, setValueMap] = useState(["", ""]); + const [description, setDescription] = useState(null); const saveLabel = async () => { setIsSaving(true); @@ -39,7 +41,8 @@ export function AddLabel({ body: JSON.stringify({ name: typeName, labelType: selectedType, - valueMap + valueMap, + description, }), }); newLabel = await res.json() as LabelClass; @@ -62,6 +65,15 @@ export function AddLabel({ setTypeName(e.target.value)} />
+
+ + setDescription(e.target.value)} + minRows={3} + /> +
{selectedType === LabelType.CATEGORICAL && - (
diff --git a/frontend/components/traces/export-spans-dialog.tsx b/frontend/components/traces/export-spans-dialog.tsx index e60d93b0..96b6f33e 100644 --- a/frontend/components/traces/export-spans-dialog.tsx +++ b/frontend/components/traces/export-spans-dialog.tsx @@ -1,57 +1,86 @@ import { useProjectContext } from "@/contexts/project-context"; import { useState } from "react"; -import { Dialog, DialogContent, DialogHeader, DialogTrigger, DialogTitle, DialogFooter } from "../ui/dialog"; +import { Dialog, DialogContent, DialogHeader, DialogTrigger, DialogTitle } from "../ui/dialog"; import { Button } from "../ui/button"; import DatasetSelect from "../ui/dataset-select"; -import { ExportableSpanColumns } from "@/lib/traces/types"; -import { Checkbox } from "../ui/checkbox"; +import { Span } from "@/lib/traces/types"; import { Label } from "../ui/label"; -import { Loader } from "lucide-react"; +import { Database, Loader } from "lucide-react"; import { cn } from "@/lib/utils"; import { useToast } from "@/lib/hooks/use-toast"; import { Dataset } from "@/lib/dataset/types"; - +import Formatter from "../ui/formatter"; interface ExportSpansDialogProps { - spanId: string; + span: Span; } - export default function ExportSpansDialog({ - spanId + span }: ExportSpansDialogProps) { const { projectId } = useProjectContext(); const [isDialogOpen, setIsDialogOpen] = useState(false); const [isLoading, setIsLoading] = useState(false); const [selectedDataset, setSelectedDataset] = useState(null); - const [selectedColumns, setSelectedColumns] = useState>(new Set([ - ExportableSpanColumns.Name, - ExportableSpanColumns.Input, - ExportableSpanColumns.Output, - ])); - const toggleSelectedColumn = (column: ExportableSpanColumns) => { - const newSelectedColumns = new Set(selectedColumns); - if (newSelectedColumns.has(column)) { - newSelectedColumns.delete(column); - } else { - newSelectedColumns.add(column); + const { toast } = useToast(); + + const [data, setData] = useState(span.input); + const [target, setTarget] = useState(span.output); + const [isDataValid, setIsDataValid] = useState(true); + const [isTargetValid, setIsTargetValid] = useState(true); + + const [metadata, setMetadata] = useState({}); + const [isMetadataValid, setIsMetadataValid] = useState(true); + + const isJsonValid = (json: string): boolean => { + try { + JSON.parse(json); + return true; + } catch (e) { + return false; } - setSelectedColumns(newSelectedColumns); }; - const { toast } = useToast(); + const handleDataChange = (value: string) => { + const isValid = isJsonValid(value); + setIsDataValid(isValid); + if (isValid) { + setData(JSON.parse(value)); + } + }; + + const handleTargetChange = (value: string) => { + const isValid = isJsonValid(value); + setIsTargetValid(isValid); + if (isValid) { + setTarget(JSON.parse(value)); + } + }; + + const handleMetadataChange = (value: string) => { + const isValid = isJsonValid(value); + setIsMetadataValid(isValid); + if (isValid) { + setMetadata(JSON.parse(value)); + } + }; const exportSpan = async () => { if (!selectedDataset) { return; }; setIsLoading(true); - const res = await fetch(`/api/projects/${projectId}/spans/${spanId}/export`, { + const res = await fetch(`/api/projects/${projectId}/datasets/${selectedDataset.id}/datapoints`, { method: 'POST', body: JSON.stringify({ - datasetId: selectedDataset?.id, - fields: Array.from(selectedColumns), + datapoints: [ + { + data: data, + target: target, + metadata: metadata, + } + ] }), }); setIsLoading(false); @@ -78,35 +107,71 @@ export default function ExportSpansDialog({ } }}> - + - - - Select dataset and columns + + +
+ Export span to dataset + +
- setSelectedDataset(dataset)} /> -
- {(Object.values(ExportableSpanColumns)).map((column) => ( -
- toggleSelectedColumn(column)} +
+
+
+ + setSelectedDataset(dataset)} /> +
+
+ + + {!isDataValid && ( +

Invalid JSON format

+ )} +
+
+ + + {!isTargetValid && ( +

Invalid JSON format

+ )} +
+
+ + - + {!isMetadataValid && ( +

Invalid JSON format

+ )}
- ))} +
- - - diff --git a/frontend/components/traces/page-placeholder.tsx b/frontend/components/traces/page-placeholder.tsx index 6276fcb8..62113e7f 100644 --- a/frontend/components/traces/page-placeholder.tsx +++ b/frontend/components/traces/page-placeholder.tsx @@ -1,8 +1,21 @@ +import { useState } from "react"; +import { useProjectContext } from "@/contexts/project-context"; import Code from "../ui/code"; import { Tabs, TabsContent, TabsList, TabsTrigger } from "../ui/tabs"; +import { PYTHON_INSTALL, TYPESCRIPT_INSTALL } from "@/lib/const"; export default function TracesPagePlaceholder() { + const { projectId } = useProjectContext(); + const [tabValue, setTabValue] = useState('python'); + + const pythonInitialization = `from lmnr import Laminar as L +L.initialize(project_api_key="")` + + const typescriptInitialization = `import { Laminar as L } from '@lmnr-ai/lmnr'; +L.initialize({projectApiKey: ""}); +` + const pythonString = `from lmnr import observe, Laminar as L L.initialize(project_api_key="") @@ -15,7 +28,7 @@ def function_to_trace(...): ... ` - const typescriptString = `import { Laminar as L } from '@lmnr-ai/lmnr'; + const typescriptString = `import { Laminar as L, observe } from '@lmnr-ai/lmnr'; L.initialize({ projectApiKey: "" }); // line above automatically instruments common @@ -31,13 +44,62 @@ const function_to_trace = return (
-
-

- You don{"'"}t have any traces in this project yet. - To start sending traces, instrument your code like this. - Read the docs. -

- +
+

Quickstart

+

+ You don{"'"}t have any traces in this project yet. + Let{"'"}s send first few traces. +

+

Install Laminar

+ + + Python + Typescript + +
+ + + + + + +
+
+

Generate API key

+

+ Go to + settings page + to generate an API key and use it in your code. +

+

Kickstart with just 2 lines

+

+ If you already have Python or Typescript code, which uses LLM provider libraries, add 2 lines to auto-instrument your app. + This will automatically instrument all major LLM providers (e.g. OpenAI, Anthropic), LLM frameworks including LangChain and LlamaIndex, and even vector DB calls. +

+ + + Python + Typescript + +
+ + + + + + +
+
+

+ Read the docs + to learn more. + Also you can see simple app examples in the docs. +

+

Adding manual instrumentation (Optional)

+

+ If you want to trace your own functions to see their durations, inputs and outputs, or want to group LLM calls or other spans into one trace, you can use @observe decorator in Python or async observe function in JavaScript/TypeScript. +

+ Python Typescript @@ -51,6 +113,17 @@ const function_to_trace =
+

Run your app

+

+ Run your Python or Typescript app. Refresh the page to see traces. +

+

Cannot send traces?

+

+ Check troubleshooting guide + to learn more or + message us + and we{"'"}ll be happy to help. +

diff --git a/frontend/components/traces/span-cards.tsx b/frontend/components/traces/span-cards.tsx deleted file mode 100644 index ad68a7bf..00000000 --- a/frontend/components/traces/span-cards.tsx +++ /dev/null @@ -1,172 +0,0 @@ -import React, { useEffect, useRef, useState } from 'react' -import { SpanCard } from './span-card' -import { getDurationString } from '@/lib/flow/utils' -import { ScrollArea, ScrollBar } from '../ui/scroll-area' -import { Label } from '../ui/label' -import { Span, TraceWithSpans } from '@/lib/traces/types' -import { CircleDollarSign, Clock3, Coins } from 'lucide-react' -import { SpanView } from './span-view' -import Timeline from './timeline' -import { cn } from '@/lib/utils' -import { usePathname, useRouter, useSearchParams } from 'next/navigation' - -interface SpanCardsProps { - trace: TraceWithSpans -} - - -export default function SpanCards({ trace }: SpanCardsProps) { - const spans = trace.spans - - const childSpans = {} as { [key: string]: Span[] } - - const topLevelSpans = spans.filter(span => !span.parentSpanId) - - for (const span of spans) { - if (span.parentSpanId) { - if (!childSpans[span.parentSpanId]) { - childSpans[span.parentSpanId] = [] - } - childSpans[span.parentSpanId].push(span) - } - } - const searchParams = new URLSearchParams(useSearchParams().toString()); - const [selectedSpan, setSelectedSpan] = useState(searchParams.get('spanId') ? spans.find(span => span.spanId === searchParams.get('spanId')) || null : null); - const router = useRouter(); - const pathName = usePathname(); - const ref = useRef(null) - const container = useRef(null) - const traceTreePanel = useRef(null) - const [containerHeight, setContainerHeight] = useState(0) - const [containerWidth, setContainerWidth] = useState(0) - // here timelineWidth refers to the width of the trace tree panel and waterfall timeline - const [timelineWidth, setTimelineWidth] = useState(0) - - useEffect(() => { - if (!container.current) { - return - } - - const resizeObserver = new ResizeObserver((entries) => { - for (let entry of entries) { - const { width, height } = entry.contentRect; - setContainerHeight(height); - setContainerWidth(width) - } - }); - resizeObserver.observe(container.current); - - return () => { - resizeObserver.disconnect(); - } - - }, [container.current]) - - useEffect(() => { - - // if no span is selected, timeline should take full width - if (!selectedSpan) { - setTimelineWidth(containerWidth) - } else { - // if a span is selected, waterfall is hidden, so timeline should take the width of the trace tree panel - setTimelineWidth(traceTreePanel.current!.getBoundingClientRect().width + 1) - } - - }, [containerWidth, selectedSpan]) - - return ( -
-
-
- - - - - - {!selectedSpan && ( - - )} - - -
-
-
-
- - -
-
- - -
-
- - -
-
-
- { - topLevelSpans.map((span, index) => ( -
- { - setSelectedSpan(span) - setTimelineWidth(traceTreePanel.current!.getBoundingClientRect().width + 1) - searchParams.set('spanId', span.spanId) - router.push(`${pathName}?${searchParams.toString()}`); - }} - /> -
- )) - - } -
-
-
- -
- -
-
-
- {selectedSpan && ( -
- { - setSelectedSpan(null) - searchParams.delete('spanId') - router.push(`${pathName}?${searchParams.toString()}`); - setTimelineWidth(container.current!.getBoundingClientRect().width) - }} /> -
- )} -
- ) -} diff --git a/frontend/components/traces/span-labels.tsx b/frontend/components/traces/span-labels.tsx index b9e51b2c..01de9e56 100644 --- a/frontend/components/traces/span-labels.tsx +++ b/frontend/components/traces/span-labels.tsx @@ -4,17 +4,10 @@ import { swrFetcher } from "@/lib/utils"; import useSWR from "swr"; import { DataTable } from "../ui/datatable"; import { useEffect } from "react"; -import { Row } from "@tanstack/react-table"; +import { ColumnDef } from "@tanstack/react-table"; -import { - DropdownMenu, - DropdownMenuContent, - DropdownMenuItem, - DropdownMenuTrigger, -} from "@/components/ui/dropdown-menu"; -import { MoreVertical } from "lucide-react"; -import { Button } from "@/components/ui/button"; import { eventEmitter } from "@/lib/event-emitter"; +import ClientTimestampFormatter from "../client-timestamp-formatter"; interface SpanLabelsProps { spanId: string; @@ -43,7 +36,7 @@ export default function SpanLabels({ }; }, [mutate]); - const columns = [ + const columns: ColumnDef[] = [ { accessorKey: 'className', header: 'Name', @@ -57,7 +50,19 @@ export default function SpanLabels({ return row.valueMap?.[row.value] ?? ''; }, header: 'Value', - } + }, + { + accessorFn: (row: SpanLabel) => { + return row.userEmail ?? (row.labelSource === 'Auto' ? 'Auto-labeled' : '-'); + }, + header: 'User', + }, + { + accessorKey: 'updatedAt', + header: 'Updated At', + cell: row => + + }, ]; return ( diff --git a/frontend/components/traces/span-view.tsx b/frontend/components/traces/span-view.tsx index b9a7b2c0..62e7ddab 100644 --- a/frontend/components/traces/span-view.tsx +++ b/frontend/components/traces/span-view.tsx @@ -1,15 +1,12 @@ -import { getDurationString, isChatMessageList, renderNodeInput } from "@/lib/flow/utils"; -import { GraphMessage } from "@/lib/pipeline/types"; -import { useEffect, useState } from "react"; +import { getDurationString, isChatMessageList } from "@/lib/flow/utils"; import useSWR from "swr"; import { useProjectContext } from "@/contexts/project-context"; -import { formatTimestamp, swrFetcher } from "@/lib/utils"; +import { swrFetcher } from "@/lib/utils"; import { Skeleton } from "../ui/skeleton"; import { Tabs, TabsContent, TabsList, TabsTrigger } from '../ui/tabs' import { ScrollArea } from "../ui/scroll-area"; import Formatter from "../ui/formatter"; import { Span, SpanType } from "@/lib/traces/types"; -import { Button } from "../ui/button"; import { Activity, ArrowRight, Braces, CircleDollarSign, Clock3, Coins, Gauge, MessageCircleMore, X } from "lucide-react"; import SpanEvents from "./span-events"; import ChatMessageListTab from "./chat-message-list-tab"; @@ -19,71 +16,68 @@ import { AddLabelPopover } from "./add-label-popover"; import ExportSpansDialog from "./export-spans-dialog"; interface SpanViewProps { - spanPreview: Span; - onCloseClick?: () => void; + spanId: string; } -type TabName = 'span' | 'events' | 'attributes' | 'labels'; - -export function SpanView({ spanPreview, onCloseClick }: SpanViewProps) { +export function SpanView({ spanId }: SpanViewProps) { const { projectId } = useProjectContext(); - const [selectedTab, setSelectedTab] = useState('span') + const { data: span }: { data: Span } = useSWR(`/api/projects/${projectId}/spans/${spanId}`, swrFetcher) - const { data: span }: { data: Span } = useSWR(`/api/projects/${projectId}/spans/${spanPreview.spanId}`, swrFetcher) + if (!span) { + return ( +
+ + + +
+ ) + } return ( <> setSelectedTab(value as TabName)} >
- {spanPreview.spanType === SpanType.DEFAULT && } - {spanPreview.spanType === SpanType.LLM && } - {spanPreview.spanType === SpanType.EXECUTOR && } - {spanPreview.spanType === SpanType.EVALUATOR && } - {spanPreview.spanType === SpanType.EVALUATION && } + {span.spanType === SpanType.DEFAULT && } + {span.spanType === SpanType.LLM && } + {span.spanType === SpanType.EXECUTOR && } + {span.spanType === SpanType.EVALUATOR && } + {span.spanType === SpanType.EVALUATION && }
-
{spanPreview.name}
+
{span.name}
-
- {/* */} +
- {span ? ( -
-
- - -
-
- - -
-
- - -
+
+
+ + +
+
+ +
- ) : ( - - )} +
+ + +
+
@@ -163,7 +157,7 @@ export function SpanView({ spanPreview, onCloseClick }: SpanViewProps) { className='w-full h-full mt-0' >
- +
diff --git a/frontend/components/traces/trace-view.tsx b/frontend/components/traces/trace-view.tsx index 59cec1cf..5339eb00 100644 --- a/frontend/components/traces/trace-view.tsx +++ b/frontend/components/traces/trace-view.tsx @@ -1,49 +1,122 @@ -import { TraceWithSpans } from '@/lib/traces/types'; -import { ChevronsRight } from 'lucide-react'; -import SpanCards from './span-cards'; -import useSWR from 'swr'; -import { swrFetcher } from '@/lib/utils'; -import { useProjectContext } from '@/contexts/project-context'; -import { Skeleton } from '../ui/skeleton'; -import { Button } from '../ui/button'; -import Mono from '../ui/mono'; +import React, { useEffect, useRef, useState } from 'react' +import { SpanCard } from './span-card' +import { getDurationString } from '@/lib/flow/utils' +import { ScrollArea, ScrollBar } from '../ui/scroll-area' +import { Label } from '../ui/label' +import { Span, TraceWithSpans } from '@/lib/traces/types' +import { ChevronsRight, CircleDollarSign, Clock3, Coins } from 'lucide-react' +import { SpanView } from './span-view' +import Timeline from './timeline' +import { cn, swrFetcher } from '@/lib/utils' +import { usePathname, useRouter, useSearchParams } from 'next/navigation' +import { Button } from '../ui/button' +import Mono from '../ui/mono' +import useSWR from 'swr' +import { useProjectContext } from '@/contexts/project-context' +import { Skeleton } from '../ui/skeleton' interface TraceViewProps { - onClose: () => void; - traceId: string; + traceId: string + onClose: () => void } -export default function TraceView({ onClose, traceId }: TraceViewProps) { + +export default function TraceView({ traceId, onClose }: TraceViewProps) { + + const searchParams = new URLSearchParams(useSearchParams().toString()); + const [selectedSpan, setSelectedSpan] = useState(null); + const router = useRouter(); + const pathName = usePathname(); + const ref = useRef(null) + const container = useRef(null) + const traceTreePanel = useRef(null) + const [containerHeight, setContainerHeight] = useState(0) + const [containerWidth, setContainerWidth] = useState(0) + // here timelineWidth refers to the width of the trace tree panel and waterfall timeline + const [timelineWidth, setTimelineWidth] = useState(0) const { projectId } = useProjectContext(); - const { data: rowInfo, isLoading, error } = useSWR(`/api/projects/${projectId}/traces/${traceId}`, swrFetcher); - - const renderTrace = () => { - if (isLoading) return ( -
- - - -
- ); - if (error) return
Error fetching trace. Please try again
; - if (!rowInfo) return
No trace found for this run id
; + const { data: trace, isLoading } = useSWR(`/api/projects/${projectId}/traces/${traceId}`, swrFetcher); + + const [childSpans, setChildSpans] = useState<{ [key: string]: Span[] }>({}) + const [topLevelSpans, setTopLevelSpans] = useState([]) + const [spans, setSpans] = useState([]) + + useEffect(() => { + + if (!trace) { + return + } + + const spans = trace.spans + + const childSpans = {} as { [key: string]: Span[] } + + const topLevelSpans = spans.filter((span: Span) => !span.parentSpanId) + + for (const span of spans) { + if (span.parentSpanId) { + if (!childSpans[span.parentSpanId]) { + childSpans[span.parentSpanId] = [] + } + childSpans[span.parentSpanId].push(span) + } + } + + setChildSpans(childSpans) + setTopLevelSpans(topLevelSpans) + setSpans(spans) + setSelectedSpan(searchParams.get('spanId') ? spans.find((span: Span) => span.spanId === searchParams.get('spanId')) || null : null) + }, [trace]) + + useEffect(() => { + if (!container.current) { + return + } + const resizeObserver = new ResizeObserver((entries) => { + for (let entry of entries) { + const { width, height } = entry.contentRect; + setContainerHeight(height); + setContainerWidth(width) + } + }); + resizeObserver.observe(container.current); - return ( - - ) - } + return () => { + resizeObserver.disconnect(); + } + + }, [container.current]) + + useEffect(() => { + + if (!traceTreePanel.current) { + return + } + + // if no span is selected, timeline should take full width + if (!selectedSpan) { + setTimelineWidth(containerWidth) + } else { + // if a span is selected, waterfall is hidden, so timeline should take the width of the trace tree panel + setTimelineWidth(traceTreePanel.current!.getBoundingClientRect().width + 1) + } + + }, [containerWidth, selectedSpan, traceTreePanel.current]) return ( +
-
+
@@ -53,12 +126,119 @@ export default function TraceView({ onClose, traceId }: TraceViewProps) { {traceId} +
+
+ {selectedSpan && ( + + )} +
- { - renderTrace() - } + {isLoading &&
+ + + +
} + {trace && ( + +
+
+
+ + + + + + {!selectedSpan && ( + + )} + + +
+
+
+
+ + +
+
+ + +
+
+ + +
+
+
+ { + topLevelSpans.map((span, index) => ( +
+ { + setSelectedSpan(span) + setTimelineWidth(traceTreePanel.current!.getBoundingClientRect().width + 1) + searchParams.set('spanId', span.spanId) + router.push(`${pathName}?${searchParams.toString()}`); + }} + /> +
+ )) + + } +
+
+
+ +
+ +
+
+
+ {selectedSpan && ( +
+ +
+ )} +
+ + )}
- ); + ) } diff --git a/frontend/components/traces/traces-table-sessions-view.tsx b/frontend/components/traces/traces-table-sessions-view.tsx index 45c00032..74521b00 100644 --- a/frontend/components/traces/traces-table-sessions-view.tsx +++ b/frontend/components/traces/traces-table-sessions-view.tsx @@ -139,7 +139,6 @@ export default function SessionsTable({ onRowClick }: SessionsTableProps) { }, { accessorFn: row => { - console.log(row.data) if (row.type === 'trace') { return getDurationString(row.data.startTime, row.data.endTime) } diff --git a/frontend/components/traces/traces-table-traces-view.tsx b/frontend/components/traces/traces-table-traces-view.tsx index f384902a..b8d73479 100644 --- a/frontend/components/traces/traces-table-traces-view.tsx +++ b/frontend/components/traces/traces-table-traces-view.tsx @@ -1,7 +1,7 @@ import { useProjectContext } from "@/contexts/project-context"; import { useUserContext } from "@/contexts/user-context"; import { SUPABASE_URL, SUPABASE_ANON_KEY, USE_REALTIME } from "@/lib/const"; -import { Trace } from "@/lib/traces/types"; +import { LabelClass, Trace } from "@/lib/traces/types"; import { createClient } from "@supabase/supabase-js"; import { ColumnDef } from "@tanstack/react-table"; import { usePathname, useRouter, useSearchParams } from "next/navigation"; @@ -9,7 +9,7 @@ import { useState, useEffect, useMemo } from "react"; import ClientTimestampFormatter from "../client-timestamp-formatter"; import StatusLabel from "../ui/status-label"; import TracesPagePlaceholder from "./page-placeholder"; -import { Event } from '@/lib/events/types'; +import { Event, EventTemplate } from '@/lib/events/types'; import DateRangeFilter from "../ui/date-range-filter"; import { DataTable } from "../ui/datatable"; import DataTableFilter from "../ui/datatable-filter"; @@ -17,6 +17,8 @@ import TextSearchFilter from "../ui/text-search-filter"; import { Button } from "../ui/button"; import { RefreshCcw } from "lucide-react"; import { PaginatedResponse } from "@/lib/types"; +import useSWR from "swr"; +import { swrFetcher } from "@/lib/utils"; interface TracesTableProps { onRowClick?: (rowId: string) => void; @@ -41,6 +43,8 @@ export default function TracesTable({ onRowClick }: TracesTableProps) { const pageCount = Math.ceil(totalCount / pageSize); const [traceId, setTraceId] = useState(searchParams.get('traceId') ?? null); + const isCurrentTimestampIncluded = (!!pastHours) || ((!!endDate) && new Date(endDate) >= new Date()); + const getTraces = async () => { setTraces(undefined); @@ -104,7 +108,7 @@ export default function TracesTable({ onRowClick }: TracesTableProps) { router.push(`${pathName}?${searchParams.toString()}`); }; - const staticColumns: ColumnDef[] = [ + const columns: ColumnDef[] = [ { accessorFn: (row) => row.success ? 'Success' : 'Failed', header: 'Status', @@ -177,17 +181,35 @@ export default function TracesTable({ onRowClick }: TracesTableProps) { ] - const eventFilterCol = { - header: "events", - id: `jsonb::events::event`, - }; + const extraFilterCols = [ + { + header: "events", + id: `event`, + }, + { + header: "labels", + id: `label`, + } + ] - const columns = staticColumns + const { data: events } = useSWR( + `/api/projects/${projectId}/event-templates`, + swrFetcher + ); + const { data: labels } = useSWR( + `/api/projects/${projectId}/label-classes`, + swrFetcher + ); + + const customFilterColumns = { + 'event': events?.map(event => event.name) ?? [], + 'label': labels?.map(label => label.name) ?? [], + } const { supabaseAccessToken } = useUserContext() const supabase = useMemo(() => { return USE_REALTIME - ? createClient( + ? createClient( SUPABASE_URL, SUPABASE_ANON_KEY, { @@ -219,7 +241,7 @@ export default function TracesTable({ onRowClick }: TracesTableProps) { }, (payload) => { if (payload.eventType === 'INSERT') { - setCanRefresh(true); + setCanRefresh(isCurrentTimestampIncluded); } } ) @@ -233,7 +255,7 @@ export default function TracesTable({ onRowClick }: TracesTableProps) { }, (payload) => { if (payload.eventType === 'UPDATE') { - setCanRefresh(true); + setCanRefresh(isCurrentTimestampIncluded); } } ) @@ -245,11 +267,13 @@ export default function TracesTable({ onRowClick }: TracesTableProps) { } }, []) - if (traces != null && totalCount === 0 && !anyInProject) { + if (traces != undefined && totalCount === 0 && !anyInProject) { return } - const filterColumns = columns.filter(column => !['actions', 'events', 'start_time'].includes(column.id!)).concat([eventFilterCol]); + const filterColumns = columns + .filter(column => !['actions', 'start_time', 'events'].includes(column.id!)) + .concat(extraFilterCols); return ( - + - - - ) + const isFilterFilled = (filter: DatatableFilter): boolean => { + if (filter.column && Object.keys(customFilterColumns ?? {}).includes(filter.column?.split('.')[0])) { + return filter.column.split('.')[1].length > 0 && !!filter.operator && !!filter.value; + } + return !!filter.column && !!filter.operator && !!filter.value; } return ( @@ -168,27 +58,33 @@ export default function DataTableFilter({ columns, className }: DataTable -
- {filters.length > 0 ? ( - +
+ {filters.length > 0 + ?
{filters.map((filter, i) => ( - filterTableRow(filter, i) + ))} -
) : - ( -
- -
- )} + + :
+ +
+ }
+ + + ) +} diff --git a/frontend/components/ui/datatable.tsx b/frontend/components/ui/datatable.tsx index 3f251fe5..c9f2738f 100644 --- a/frontend/components/ui/datatable.tsx +++ b/frontend/components/ui/datatable.tsx @@ -256,18 +256,6 @@ export function DataTable({ >
- {/*
- {flexRender( - header.column.columnDef.header, - header.getContext() - )} -
header.column.resetSize()} - > -
-
*/} ))} @@ -307,27 +295,30 @@ export function DataTable({ ) const showSelection = Object.keys(rowSelection).length > 0 || allRowsAcrossAllPagesSelected || enableRowSelection; - const hasChildren = !!children; return ( -
- {(showSelection || hasChildren) && -
+
+ {(showSelection && Object.keys(rowSelection).length > 0) && +
{(showSelection) && <> {Object.keys(rowSelection).length > 0 && <> - + } @@ -347,9 +338,11 @@ export function DataTable({ } } - {children}
} + {children &&
+ {children} +
}
{content} diff --git a/frontend/components/user/avatar-menu.tsx b/frontend/components/user/avatar-menu.tsx index 9f803c36..0ca40070 100644 --- a/frontend/components/user/avatar-menu.tsx +++ b/frontend/components/user/avatar-menu.tsx @@ -14,7 +14,7 @@ export default function AvatarMenu() {
- avatar +
diff --git a/frontend/lib/auth.ts b/frontend/lib/auth.ts index b41cce78..5faecef2 100644 --- a/frontend/lib/auth.ts +++ b/frontend/lib/auth.ts @@ -9,6 +9,7 @@ declare module 'next-auth' { user: { id: string apiKey: string + isNewUserCreated: boolean } & DefaultSession['user'] } @@ -20,6 +21,7 @@ declare module 'next-auth' { declare module 'next-auth/jwt' { interface JWT { apiKey: string + isNewUserCreated: boolean } } @@ -67,7 +69,9 @@ export const authOptions: NextAuthOptions = { throw err } - token.apiKey = (await res.json()).apiKey + const resJson = await res.json() + token.apiKey = resJson.apiKey; + token.isNewUserCreated = resJson.isNewUserCreated; } return token @@ -75,7 +79,7 @@ export const authOptions: NextAuthOptions = { session({ session, token }) { session.user.apiKey = token.apiKey session.user.email = token.email! - // session.user.email = token.email + session.user.isNewUserCreated = token.isNewUserCreated return session }, diff --git a/frontend/lib/const.ts b/frontend/lib/const.ts index 2d243bd3..5937ff83 100644 --- a/frontend/lib/const.ts +++ b/frontend/lib/const.ts @@ -4,4 +4,7 @@ export const SUPABASE_ANON_KEY = "" // by default, if SUPABASE_URL and SUPABASE_ANON_KEY are not set, we disable realtime export const USE_REALTIME = SUPABASE_URL != null - && SUPABASE_ANON_KEY != null && SUPABASE_ANON_KEY.length > 0 && SUPABASE_URL.length > 0; \ No newline at end of file + && SUPABASE_ANON_KEY != null && SUPABASE_ANON_KEY.length > 0 && SUPABASE_URL.length > 0; + +export const PYTHON_INSTALL = 'pip install lmnr'; +export const TYPESCRIPT_INSTALL = 'npm add @lmnr-ai/lmnr'; diff --git a/frontend/lib/dataset/types.ts b/frontend/lib/dataset/types.ts index ca86c07c..87e87c43 100644 --- a/frontend/lib/dataset/types.ts +++ b/frontend/lib/dataset/types.ts @@ -10,5 +10,6 @@ export interface Datapoint { createdAt: string; data: Record; target: Record; + metadata: Record | null; indexedOn: string | null; -} +} \ No newline at end of file diff --git a/frontend/lib/evaluation/types.ts b/frontend/lib/evaluation/types.ts index cae29c55..a8c47949 100644 --- a/frontend/lib/evaluation/types.ts +++ b/frontend/lib/evaluation/types.ts @@ -4,34 +4,30 @@ import { TracePreview } from "../traces/types"; export type Evaluation = { id: string, createdAt: string, + groupId: string, name: string, - status: 'Started' | 'Finished' | 'Error', projectId: string, } export type EvaluationDatapoint = { id: string; evaluationId: string; - status: string; scores: Record; data: Record; target: Record; executorOutput: Record | null; executorTrace: TracePreview | null; evaluatorTrace: TracePreview | null; - error: EvaluationDatapointError | null; } export type EvaluationDatapointPreview = { id: string; evaluationId: string; createdAt: string; - status: string; scores?: Record; data: Record; target: Record; executorOutput: Record; - error?: any; traceId: string; } @@ -41,16 +37,11 @@ export type EvaluationDatapointPreviewWithCompared = { comparedScores?: Record; } & EvaluationDatapointPreview -export type EvaluationStats = Record; +export type EvaluationStats = { + averageScores: Record; +}; export type EvaluationResultsInfo = { evaluation: Evaluation; results: EvaluationDatapointPreview[]; } - -export type EvaluationDatapointError = { - errorType: string; - error: string; - executorInputNodeNames: string[] | null; - evaluatorInputNodeNames: string[] | null; -} diff --git a/frontend/lib/evaluation/utils.ts b/frontend/lib/evaluation/utils.ts index 2cc2506e..f90cc037 100644 --- a/frontend/lib/evaluation/utils.ts +++ b/frontend/lib/evaluation/utils.ts @@ -9,12 +9,6 @@ export function mergeOriginalWithComparedDatapoints(results: EvaluationDatapoint for (let i = 0; i < minLen; i++) { const original = results[i]; const compared = comparedResults[i]; - - if (original.status === 'Error' || compared.status === 'Error') { - mergedResults.push(original as EvaluationDatapointPreviewWithCompared); - continue; - } - const merged: EvaluationDatapointPreviewWithCompared = { ...original, comparedId: compared.id, diff --git a/frontend/lib/flow/utils.ts b/frontend/lib/flow/utils.ts index 3aa5b9bb..b8b9fb39 100644 --- a/frontend/lib/flow/utils.ts +++ b/frontend/lib/flow/utils.ts @@ -582,9 +582,10 @@ export function isStringList(input: NodeInput): input is string[] { export function isChatMessageList(input: NodeInput): input is ChatMessage[] { if (input === undefined) return false; if (!Array.isArray(input)) return false; - return input.every((item) => { - return typeof item === 'object' && 'role' in item && 'content' in item - }); + return input.every((item) => + // Check for !== null first, because typeof null is 'object' + item !== null && typeof item === 'object' && 'role' in item && 'content' in item + ); } export const renderChatMessageContentParts = (parts: ChatMessageContentPart[]): string => { diff --git a/frontend/lib/traces/types.ts b/frontend/lib/traces/types.ts index a4a3fcd3..d0800425 100644 --- a/frontend/lib/traces/types.ts +++ b/frontend/lib/traces/types.ts @@ -15,6 +15,7 @@ export type LabelClass = { createdAt: string; labelType: LabelType; valueMap: string[]; + description: string | null; } export type SpanLabel = { @@ -25,6 +26,10 @@ export type SpanLabel = { value: number; valueMap: string[]; className: string; + labelSource: 'Auto' | 'Manual'; + userEmail: string | null; + description: string | null; + updatedAt: string; } export enum SpanType { @@ -74,6 +79,10 @@ export type Trace = { totalTokenCount: number; cost: number | null; metadata: Record | null; + parentSpanInput: any | null; + parentSpanOutput: any | null; + parentSpanName: string | null; + parentSpanType: SpanType | null; events: TraceEvent[] } diff --git a/frontend/package.json b/frontend/package.json index 6c5bcf53..9ecae10f 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -65,7 +65,7 @@ "js-tiktoken": "^1.0.14", "jsonwebtoken": "^9.0.2", "lucide-react": "^0.323.0", - "next": "14.2.3", + "next": "14.2.14", "next-auth": "4.24.5", "next-themes": "^0.2.1", "re-resizable": "^6.9.18", diff --git a/frontend/pnpm-lock.yaml b/frontend/pnpm-lock.yaml index 7b7f7b2a..1195aa72 100644 --- a/frontend/pnpm-lock.yaml +++ b/frontend/pnpm-lock.yaml @@ -162,14 +162,14 @@ importers: specifier: ^0.323.0 version: 0.323.0(react@18.3.1) next: - specifier: 14.2.3 - version: 14.2.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + specifier: 14.2.14 + version: 14.2.14(react-dom@18.3.1(react@18.3.1))(react@18.3.1) next-auth: specifier: 4.24.5 - version: 4.24.5(next@14.2.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 4.24.5(next@14.2.14(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) next-themes: specifier: ^0.2.1 - version: 0.2.1(next@14.2.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 0.2.1(next@14.2.14(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) re-resizable: specifier: ^6.9.18 version: 6.9.18(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -408,6 +408,9 @@ packages: '@codemirror/view@6.33.0': resolution: {integrity: sha512-AroaR3BvnjRW8fiZBalAaK+ZzB5usGgI014YKElYZvQdNH5ZIidHlO+cyf/2rWzyBFRkvG6VhiXeAEbC53P2YQ==} + '@codemirror/view@6.34.1': + resolution: {integrity: sha512-t1zK/l9UiRqwUNPm+pdIT0qzJlzuVckbTEMVNFhfWkGiBQClstzg+78vedCvLSX0xJEZ6lwZbPpnljL7L6iwMQ==} + '@emnapi/runtime@1.2.0': resolution: {integrity: sha512-bV21/9LQmcQeCPEg3BDFtvwL6cwiTMksYNWQQ4KOxCZikEGalWtenoZ0wCiukJINlGCIi2KXx01g4FoH/LxpzQ==} @@ -693,62 +696,62 @@ packages: '@mux/playback-core@0.25.2': resolution: {integrity: sha512-vrBbCgLHwmPpVxF0QGj+sXHUVXSxgDJJhVm8pxPXEkbw0vjPNHTXgAd/Ty6JA0vZ0ZjoQuAa17AxJ+c02JYeWQ==} - '@next/env@14.2.3': - resolution: {integrity: sha512-W7fd7IbkfmeeY2gXrzJYDx8D2lWKbVoTIj1o1ScPHNzvp30s1AuoEFSdr39bC5sjxJaxTtq3OTCZboNp0lNWHA==} + '@next/env@14.2.14': + resolution: {integrity: sha512-/0hWQfiaD5//LvGNgc8PjvyqV50vGK0cADYzaoOOGN8fxzBn3iAiaq3S0tCRnFBldq0LVveLcxCTi41ZoYgAgg==} '@next/eslint-plugin-next@14.1.0': resolution: {integrity: sha512-x4FavbNEeXx/baD/zC/SdrvkjSby8nBn8KcCREqk6UuwvwoAPZmaV8TFCAuo/cpovBRTIY67mHhe86MQQm/68Q==} - '@next/swc-darwin-arm64@14.2.3': - resolution: {integrity: sha512-3pEYo/RaGqPP0YzwnlmPN2puaF2WMLM3apt5jLW2fFdXD9+pqcoTzRk+iZsf8ta7+quAe4Q6Ms0nR0SFGFdS1A==} + '@next/swc-darwin-arm64@14.2.14': + resolution: {integrity: sha512-bsxbSAUodM1cjYeA4o6y7sp9wslvwjSkWw57t8DtC8Zig8aG8V6r+Yc05/9mDzLKcybb6EN85k1rJDnMKBd9Gw==} engines: {node: '>= 10'} cpu: [arm64] os: [darwin] - '@next/swc-darwin-x64@14.2.3': - resolution: {integrity: sha512-6adp7waE6P1TYFSXpY366xwsOnEXM+y1kgRpjSRVI2CBDOcbRjsJ67Z6EgKIqWIue52d2q/Mx8g9MszARj8IEA==} + '@next/swc-darwin-x64@14.2.14': + resolution: {integrity: sha512-cC9/I+0+SK5L1k9J8CInahduTVWGMXhQoXFeNvF0uNs3Bt1Ub0Azb8JzTU9vNCr0hnaMqiWu/Z0S1hfKc3+dww==} engines: {node: '>= 10'} cpu: [x64] os: [darwin] - '@next/swc-linux-arm64-gnu@14.2.3': - resolution: {integrity: sha512-cuzCE/1G0ZSnTAHJPUT1rPgQx1w5tzSX7POXSLaS7w2nIUJUD+e25QoXD/hMfxbsT9rslEXugWypJMILBj/QsA==} + '@next/swc-linux-arm64-gnu@14.2.14': + resolution: {integrity: sha512-RMLOdA2NU4O7w1PQ3Z9ft3PxD6Htl4uB2TJpocm+4jcllHySPkFaUIFacQ3Jekcg6w+LBaFvjSPthZHiPmiAUg==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] - '@next/swc-linux-arm64-musl@14.2.3': - resolution: {integrity: sha512-0D4/oMM2Y9Ta3nGuCcQN8jjJjmDPYpHX9OJzqk42NZGJocU2MqhBq5tWkJrUQOQY9N+In9xOdymzapM09GeiZw==} + '@next/swc-linux-arm64-musl@14.2.14': + resolution: {integrity: sha512-WgLOA4hT9EIP7jhlkPnvz49iSOMdZgDJVvbpb8WWzJv5wBD07M2wdJXLkDYIpZmCFfo/wPqFsFR4JS4V9KkQ2A==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] - '@next/swc-linux-x64-gnu@14.2.3': - resolution: {integrity: sha512-ENPiNnBNDInBLyUU5ii8PMQh+4XLr4pG51tOp6aJ9xqFQ2iRI6IH0Ds2yJkAzNV1CfyagcyzPfROMViS2wOZ9w==} + '@next/swc-linux-x64-gnu@14.2.14': + resolution: {integrity: sha512-lbn7svjUps1kmCettV/R9oAvEW+eUI0lo0LJNFOXoQM5NGNxloAyFRNByYeZKL3+1bF5YE0h0irIJfzXBq9Y6w==} engines: {node: '>= 10'} cpu: [x64] os: [linux] - '@next/swc-linux-x64-musl@14.2.3': - resolution: {integrity: sha512-BTAbq0LnCbF5MtoM7I/9UeUu/8ZBY0i8SFjUMCbPDOLv+un67e2JgyN4pmgfXBwy/I+RHu8q+k+MCkDN6P9ViQ==} + '@next/swc-linux-x64-musl@14.2.14': + resolution: {integrity: sha512-7TcQCvLQ/hKfQRgjxMN4TZ2BRB0P7HwrGAYL+p+m3u3XcKTraUFerVbV3jkNZNwDeQDa8zdxkKkw2els/S5onQ==} engines: {node: '>= 10'} cpu: [x64] os: [linux] - '@next/swc-win32-arm64-msvc@14.2.3': - resolution: {integrity: sha512-AEHIw/dhAMLNFJFJIJIyOFDzrzI5bAjI9J26gbO5xhAKHYTZ9Or04BesFPXiAYXDNdrwTP2dQceYA4dL1geu8A==} + '@next/swc-win32-arm64-msvc@14.2.14': + resolution: {integrity: sha512-8i0Ou5XjTLEje0oj0JiI0Xo9L/93ghFtAUYZ24jARSeTMXLUx8yFIdhS55mTExq5Tj4/dC2fJuaT4e3ySvXU1A==} engines: {node: '>= 10'} cpu: [arm64] os: [win32] - '@next/swc-win32-ia32-msvc@14.2.3': - resolution: {integrity: sha512-vga40n1q6aYb0CLrM+eEmisfKCR45ixQYXuBXxOOmmoV8sYST9k7E3US32FsY+CkkF7NtzdcebiFT4CHuMSyZw==} + '@next/swc-win32-ia32-msvc@14.2.14': + resolution: {integrity: sha512-2u2XcSaDEOj+96eXpyjHjtVPLhkAFw2nlaz83EPeuK4obF+HmtDJHqgR1dZB7Gb6V/d55FL26/lYVd0TwMgcOQ==} engines: {node: '>= 10'} cpu: [ia32] os: [win32] - '@next/swc-win32-x64-msvc@14.2.3': - resolution: {integrity: sha512-Q1/zm43RWynxrO7lW4ehciQVj+5ePBhOK+/K2P7pLFX3JaJ/IZVC69SHidrmZSOkqz7ECIOhhy7XhAFG4JYyHA==} + '@next/swc-win32-x64-msvc@14.2.14': + resolution: {integrity: sha512-MZom+OvZ1NZxuRovKt1ApevjiUJTcU2PmdJKL66xUPaJeRywnbGGRWUlaAOwunD6dX+pm83vj979NTC8QXjGWg==} engines: {node: '>= 10'} cpu: [x64] os: [win32] @@ -3567,8 +3570,8 @@ packages: react: '*' react-dom: '*' - next@14.2.3: - resolution: {integrity: sha512-dowFkFTR8v79NPJO4QsBUtxv0g9BrS/phluVpMAt2ku7H+cbcBJlopXjkWlwxrk/xGqMemr7JkGPGemPrLLX7A==} + next@14.2.14: + resolution: {integrity: sha512-Q1coZG17MW0Ly5x76shJ4dkC23woLAhhnDnw+DfTc7EpZSGuWrlsZ3bZaO8t6u1Yu8FVfhkqJE+U8GC7E0GLPQ==} engines: {node: '>=18.17.0'} hasBin: true peerDependencies: @@ -4637,6 +4640,13 @@ snapshots: '@codemirror/view': 6.33.0 '@lezer/common': 1.2.1 + '@codemirror/autocomplete@6.18.1(@codemirror/language@6.10.3)(@codemirror/state@6.4.1)(@codemirror/view@6.34.1)(@lezer/common@1.2.1)': + dependencies: + '@codemirror/language': 6.10.3 + '@codemirror/state': 6.4.1 + '@codemirror/view': 6.34.1 + '@lezer/common': 1.2.1 + '@codemirror/commands@6.6.2': dependencies: '@codemirror/language': 6.10.3 @@ -4672,13 +4682,13 @@ snapshots: '@codemirror/lint@6.8.2': dependencies: '@codemirror/state': 6.4.1 - '@codemirror/view': 6.33.0 + '@codemirror/view': 6.34.1 crelt: 1.0.6 '@codemirror/search@6.5.6': dependencies: '@codemirror/state': 6.4.1 - '@codemirror/view': 6.33.0 + '@codemirror/view': 6.34.1 crelt: 1.0.6 '@codemirror/state@6.4.1': {} @@ -4687,7 +4697,7 @@ snapshots: dependencies: '@codemirror/language': 6.10.3 '@codemirror/state': 6.4.1 - '@codemirror/view': 6.33.0 + '@codemirror/view': 6.34.1 '@lezer/highlight': 1.2.1 '@codemirror/view@6.33.0': @@ -4696,6 +4706,12 @@ snapshots: style-mod: 4.1.2 w3c-keyname: 2.2.8 + '@codemirror/view@6.34.1': + dependencies: + '@codemirror/state': 6.4.1 + style-mod: 4.1.2 + w3c-keyname: 2.2.8 + '@emnapi/runtime@1.2.0': dependencies: tslib: 2.7.0 @@ -5006,37 +5022,37 @@ snapshots: hls.js: 1.5.15 mux-embed: 5.2.1 - '@next/env@14.2.3': {} + '@next/env@14.2.14': {} '@next/eslint-plugin-next@14.1.0': dependencies: glob: 10.3.10 - '@next/swc-darwin-arm64@14.2.3': + '@next/swc-darwin-arm64@14.2.14': optional: true - '@next/swc-darwin-x64@14.2.3': + '@next/swc-darwin-x64@14.2.14': optional: true - '@next/swc-linux-arm64-gnu@14.2.3': + '@next/swc-linux-arm64-gnu@14.2.14': optional: true - '@next/swc-linux-arm64-musl@14.2.3': + '@next/swc-linux-arm64-musl@14.2.14': optional: true - '@next/swc-linux-x64-gnu@14.2.3': + '@next/swc-linux-x64-gnu@14.2.14': optional: true - '@next/swc-linux-x64-musl@14.2.3': + '@next/swc-linux-x64-musl@14.2.14': optional: true - '@next/swc-win32-arm64-msvc@14.2.3': + '@next/swc-win32-arm64-msvc@14.2.14': optional: true - '@next/swc-win32-ia32-msvc@14.2.3': + '@next/swc-win32-ia32-msvc@14.2.14': optional: true - '@next/swc-win32-x64-msvc@14.2.3': + '@next/swc-win32-x64-msvc@14.2.14': optional: true '@nodelib/fs.scandir@2.1.5': @@ -6721,13 +6737,13 @@ snapshots: codemirror@6.0.1(@lezer/common@1.2.1): dependencies: - '@codemirror/autocomplete': 6.18.1(@codemirror/language@6.10.3)(@codemirror/state@6.4.1)(@codemirror/view@6.33.0)(@lezer/common@1.2.1) + '@codemirror/autocomplete': 6.18.1(@codemirror/language@6.10.3)(@codemirror/state@6.4.1)(@codemirror/view@6.34.1)(@lezer/common@1.2.1) '@codemirror/commands': 6.6.2 '@codemirror/language': 6.10.3 '@codemirror/lint': 6.8.2 '@codemirror/search': 6.5.6 '@codemirror/state': 6.4.1 - '@codemirror/view': 6.33.0 + '@codemirror/view': 6.34.1 transitivePeerDependencies: - '@lezer/common' @@ -8458,13 +8474,13 @@ snapshots: natural-compare@1.4.0: {} - next-auth@4.24.5(next@14.2.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + next-auth@4.24.5(next@14.2.14(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: '@babel/runtime': 7.25.6 '@panva/hkdf': 1.2.1 cookie: 0.5.0 jose: 4.15.9 - next: 14.2.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + next: 14.2.14(react-dom@18.3.1(react@18.3.1))(react@18.3.1) oauth: 0.9.15 openid-client: 5.7.0 preact: 10.23.2 @@ -8473,15 +8489,15 @@ snapshots: react-dom: 18.3.1(react@18.3.1) uuid: 8.3.2 - next-themes@0.2.1(next@14.2.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + next-themes@0.2.1(next@14.2.14(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: - next: 14.2.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + next: 14.2.14(react-dom@18.3.1(react@18.3.1))(react@18.3.1) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - next@14.2.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + next@14.2.14(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: - '@next/env': 14.2.3 + '@next/env': 14.2.14 '@swc/helpers': 0.5.5 busboy: 1.6.0 caniuse-lite: 1.0.30001660 @@ -8491,15 +8507,15 @@ snapshots: react-dom: 18.3.1(react@18.3.1) styled-jsx: 5.1.1(react@18.3.1) optionalDependencies: - '@next/swc-darwin-arm64': 14.2.3 - '@next/swc-darwin-x64': 14.2.3 - '@next/swc-linux-arm64-gnu': 14.2.3 - '@next/swc-linux-arm64-musl': 14.2.3 - '@next/swc-linux-x64-gnu': 14.2.3 - '@next/swc-linux-x64-musl': 14.2.3 - '@next/swc-win32-arm64-msvc': 14.2.3 - '@next/swc-win32-ia32-msvc': 14.2.3 - '@next/swc-win32-x64-msvc': 14.2.3 + '@next/swc-darwin-arm64': 14.2.14 + '@next/swc-darwin-x64': 14.2.14 + '@next/swc-linux-arm64-gnu': 14.2.14 + '@next/swc-linux-arm64-musl': 14.2.14 + '@next/swc-linux-x64-gnu': 14.2.14 + '@next/swc-linux-x64-musl': 14.2.14 + '@next/swc-win32-arm64-msvc': 14.2.14 + '@next/swc-win32-ia32-msvc': 14.2.14 + '@next/swc-win32-x64-msvc': 14.2.14 transitivePeerDependencies: - '@babel/core' - babel-plugin-macros diff --git a/postgres/supabase/migrations/20241005015704_remote_schema.sql b/postgres/supabase/migrations/20241005015704_remote_schema.sql new file mode 100644 index 00000000..6264e036 --- /dev/null +++ b/postgres/supabase/migrations/20241005015704_remote_schema.sql @@ -0,0 +1,31 @@ +create type "public"."label_source" as enum ('MANUAL', 'AUTO'); + +alter table "public"."labels" drop constraint "tags_unique_per_span"; + +drop index if exists "public"."tags_unique_per_span"; + +alter table "public"."dataset_datapoints" add column "metadata" jsonb; + +alter table "public"."evaluation_results" alter column "status" drop not null; + +alter table "public"."evaluations" add column "group_id" text not null default 'default'::text; + +alter table "public"."evaluations" alter column "status" drop not null; + +alter table "public"."label_classes" add column "description" text; + +alter table "public"."label_classes" add column "pipeline_version_id" uuid; + +alter table "public"."labels" drop column "last_updated_by"; + +alter table "public"."labels" add column "label_source" label_source not null default 'MANUAL'::label_source; + +alter table "public"."labels" add column "user_id" uuid default gen_random_uuid(); + +alter table "public"."spans" alter column "attributes" drop not null; + +CREATE UNIQUE INDEX labels_span_id_class_id_user_id_key ON public.labels USING btree (span_id, class_id, user_id); + +CREATE INDEX spans_expr_idx ON public.spans USING btree (((attributes -> 'lmnr.span.path'::text))); + +alter table "public"."labels" add constraint "labels_span_id_class_id_user_id_key" UNIQUE using index "labels_span_id_class_id_user_id_key"; diff --git a/semantic-search-service/Cargo.lock b/semantic-search-service/Cargo.lock index a4b38b6f..4c2e68db 100644 --- a/semantic-search-service/Cargo.lock +++ b/semantic-search-service/Cargo.lock @@ -51,7 +51,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -62,7 +62,7 @@ checksum = "a27b8a3a6e1a44fa4c8baf1f653e4172e81486d4941f2237e20dc2d0cf4ddff1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -77,34 +77,6 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" -[[package]] -name = "axum" -version = "0.6.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" -dependencies = [ - "async-trait", - "axum-core 0.3.4", - "bitflags 1.3.2", - "bytes", - "futures-util", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.30", - "itoa", - "matchit", - "memchr", - "mime", - "percent-encoding", - "pin-project-lite", - "rustversion", - "serde", - "sync_wrapper 0.1.2", - "tower", - "tower-layer", - "tower-service", -] - [[package]] name = "axum" version = "0.7.5" @@ -112,11 +84,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf" dependencies = [ "async-trait", - "axum-core 0.4.3", + "axum-core", "bytes", "futures-util", - "http 1.1.0", - "http-body 1.0.1", + "http", + "http-body", "http-body-util", "itoa", "matchit", @@ -132,23 +104,6 @@ dependencies = [ "tower-service", ] -[[package]] -name = "axum-core" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" -dependencies = [ - "async-trait", - "bytes", - "futures-util", - "http 0.2.12", - "http-body 0.4.6", - "mime", - "rustversion", - "tower-layer", - "tower-service", -] - [[package]] name = "axum-core" version = "0.4.3" @@ -158,8 +113,8 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http 1.1.0", - "http-body 1.0.1", + "http", + "http-body", "http-body-util", "mime", "pin-project-lite", @@ -184,24 +139,12 @@ dependencies = [ "windows-targets", ] -[[package]] -name = "base64" -version = "0.21.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" - [[package]] name = "base64" version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - [[package]] name = "bitflags" version = "2.6.0" @@ -312,7 +255,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.77", + "syn", ] [[package]] @@ -323,7 +266,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -344,7 +287,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -354,7 +297,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4abae7035bf79b9877b779505d8cf3749285b80c43941eda66604841889451dc" dependencies = [ "derive_builder_core", - "syn 2.0.77", + "syn", ] [[package]] @@ -378,7 +321,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -503,7 +446,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -553,25 +496,6 @@ version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32085ea23f3234fc7846555e85283ba4de91e21016dc0455a16286d87a292d64" -[[package]] -name = "h2" -version = "0.3.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" -dependencies = [ - "bytes", - "fnv", - "futures-core", - "futures-sink", - "futures-util", - "http 0.2.12", - "indexmap 2.5.0", - "slab", - "tokio", - "tokio-util", - "tracing", -] - [[package]] name = "h2" version = "0.4.6" @@ -583,7 +507,7 @@ dependencies = [ "fnv", "futures-core", "futures-sink", - "http 1.1.0", + "http", "indexmap 2.5.0", "slab", "tokio", @@ -621,26 +545,6 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" -[[package]] -name = "home" -version = "0.5.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" -dependencies = [ - "windows-sys 0.52.0", -] - -[[package]] -name = "http" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" -dependencies = [ - "bytes", - "fnv", - "itoa", -] - [[package]] name = "http" version = "1.1.0" @@ -652,17 +556,6 @@ dependencies = [ "itoa", ] -[[package]] -name = "http-body" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" -dependencies = [ - "bytes", - "http 0.2.12", - "pin-project-lite", -] - [[package]] name = "http-body" version = "1.0.1" @@ -670,7 +563,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.1.0", + "http", ] [[package]] @@ -681,8 +574,8 @@ checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ "bytes", "futures-util", - "http 1.1.0", - "http-body 1.0.1", + "http", + "http-body", "pin-project-lite", ] @@ -704,30 +597,6 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" -[[package]] -name = "hyper" -version = "0.14.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a152ddd61dfaec7273fe8419ab357f33aee0d914c5f4efbf0d96fa749eea5ec9" -dependencies = [ - "bytes", - "futures-channel", - "futures-core", - "futures-util", - "h2 0.3.26", - "http 0.2.12", - "http-body 0.4.6", - "httparse", - "httpdate", - "itoa", - "pin-project-lite", - "socket2", - "tokio", - "tower-service", - "tracing", - "want", -] - [[package]] name = "hyper" version = "1.4.1" @@ -737,9 +606,9 @@ dependencies = [ "bytes", "futures-channel", "futures-util", - "h2 0.4.6", - "http 1.1.0", - "http-body 1.0.1", + "h2", + "http", + "http-body", "httparse", "httpdate", "itoa", @@ -756,8 +625,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" dependencies = [ "futures-util", - "http 1.1.0", - "hyper 1.4.1", + "http", + "hyper", "hyper-util", "rustls", "rustls-pki-types", @@ -767,25 +636,13 @@ dependencies = [ "webpki-roots", ] -[[package]] -name = "hyper-timeout" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" -dependencies = [ - "hyper 0.14.30", - "pin-project-lite", - "tokio", - "tokio-io-timeout", -] - [[package]] name = "hyper-timeout" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3203a961e5c83b6f5498933e78b6b263e208c197b63e9c6c53cc82ffd3f63793" dependencies = [ - "hyper 1.4.1", + "hyper", "hyper-util", "pin-project-lite", "tokio", @@ -801,9 +658,9 @@ dependencies = [ "bytes", "futures-channel", "futures-util", - "http 1.1.0", - "http-body 1.0.1", - "hyper 1.4.1", + "http", + "http-body", + "hyper", "pin-project-lite", "socket2", "tokio", @@ -865,15 +722,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] - [[package]] name = "itertools" version = "0.13.0" @@ -898,12 +746,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "lazy_static" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" - [[package]] name = "libc" version = "0.2.158" @@ -1021,7 +863,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -1047,12 +889,12 @@ dependencies = [ [[package]] name = "prettyplease" -version = "0.1.25" +version = "0.2.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c8646e95016a7a6c4adea95bafa8a16baab64b583356217f2c85db4a39d9a86" +checksum = "479cf940fbbb3426c32c5d5176f62ad57549a0bb84773423ba8be9d089f5faba" dependencies = [ "proc-macro2", - "syn 1.0.109", + "syn", ] [[package]] @@ -1064,16 +906,6 @@ dependencies = [ "unicode-ident", ] -[[package]] -name = "prost" -version = "0.11.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd" -dependencies = [ - "bytes", - "prost-derive 0.11.9", -] - [[package]] name = "prost" version = "0.13.2" @@ -1081,42 +913,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b2ecbe40f08db5c006b5764a2645f7f3f141ce756412ac9e1dd6087e6d32995" dependencies = [ "bytes", - "prost-derive 0.13.2", + "prost-derive", ] [[package]] name = "prost-build" -version = "0.11.9" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270" +checksum = "f8650aabb6c35b860610e9cff5dc1af886c9e25073b7b1712a68972af4281302" dependencies = [ "bytes", "heck", - "itertools 0.10.5", - "lazy_static", + "itertools", "log", "multimap", + "once_cell", "petgraph", "prettyplease", - "prost 0.11.9", - "prost-types 0.11.9", + "prost", + "prost-types", "regex", - "syn 1.0.109", + "syn", "tempfile", - "which", -] - -[[package]] -name = "prost-derive" -version = "0.11.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" -dependencies = [ - "anyhow", - "itertools 0.10.5", - "proc-macro2", - "quote", - "syn 1.0.109", ] [[package]] @@ -1126,19 +944,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "acf0c195eebb4af52c752bec4f52f645da98b6e92077a04110c7f349477ae5ac" dependencies = [ "anyhow", - "itertools 0.13.0", + "itertools", "proc-macro2", "quote", - "syn 2.0.77", -] - -[[package]] -name = "prost-types" -version = "0.11.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "213622a1460818959ac1181aaeb2dc9c7f63df720db7d788b3e24eacd1983e13" -dependencies = [ - "prost 0.11.9", + "syn", ] [[package]] @@ -1147,7 +956,7 @@ version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60caa6738c7369b940c3d49246a8d1749323674c65cb13010134f5c9bad5b519" dependencies = [ - "prost 0.13.2", + "prost", ] [[package]] @@ -1159,13 +968,13 @@ dependencies = [ "anyhow", "derive_builder", "futures-util", - "prost 0.13.2", - "prost-types 0.13.2", + "prost", + "prost-types", "reqwest", "serde", "serde_json", "thiserror", - "tonic 0.12.2", + "tonic", ] [[package]] @@ -1310,15 +1119,15 @@ version = "0.12.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8f4955649ef5c38cc7f9e8aa41761d48fb9677197daea9984dc54f56aad5e63" dependencies = [ - "base64 0.22.1", + "base64", "bytes", "futures-core", "futures-util", - "h2 0.4.6", - "http 1.1.0", - "http-body 1.0.1", + "h2", + "http", + "http-body", "http-body-util", - "hyper 1.4.1", + "hyper", "hyper-rustls", "hyper-util", "ipnet", @@ -1382,7 +1191,7 @@ version = "0.38.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811" dependencies = [ - "bitflags 2.6.0", + "bitflags", "errno", "libc", "linux-raw-sys", @@ -1406,9 +1215,9 @@ dependencies = [ [[package]] name = "rustls-native-certs" -version = "0.7.3" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5bfb394eeed242e909609f56089eecfe5fda225042e8b171791b9c95f5931e5" +checksum = "fcaf18a4f2be7326cd874a5fa579fae794320a0f388d365dca7e480e55f83f8a" dependencies = [ "openssl-probe", "rustls-pemfile", @@ -1423,7 +1232,7 @@ version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "196fe16b00e106300d3e45ecfcb764fa292a535d7326a29a5875c579c7417425" dependencies = [ - "base64 0.22.1", + "base64", "rustls-pki-types", ] @@ -1471,7 +1280,7 @@ version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ - "bitflags 2.6.0", + "bitflags", "core-foundation", "core-foundation-sys", "libc", @@ -1498,7 +1307,7 @@ dependencies = [ "env_logger", "futures", "log", - "prost 0.11.9", + "prost", "qdrant-client", "rayon", "reqwest", @@ -1507,7 +1316,7 @@ dependencies = [ "simsimd", "tokio", "tokio-stream", - "tonic 0.9.2", + "tonic", "tonic-build", "uuid", ] @@ -1529,7 +1338,7 @@ checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -1614,17 +1423,6 @@ version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" -[[package]] -name = "syn" -version = "1.0.109" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - [[package]] name = "syn" version = "2.0.77" @@ -1690,7 +1488,7 @@ checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -1724,16 +1522,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "tokio-io-timeout" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" -dependencies = [ - "pin-project-lite", - "tokio", -] - [[package]] name = "tokio-macros" version = "2.4.0" @@ -1742,7 +1530,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -1782,54 +1570,26 @@ dependencies = [ [[package]] name = "tonic" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3082666a3a6433f7f511c7192923fa1fe07c69332d3c6a2e6bb040b569199d5a" -dependencies = [ - "async-trait", - "axum 0.6.20", - "base64 0.21.7", - "bytes", - "futures-core", - "futures-util", - "h2 0.3.26", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.30", - "hyper-timeout 0.4.1", - "percent-encoding", - "pin-project", - "prost 0.11.9", - "tokio", - "tokio-stream", - "tower", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tonic" -version = "0.12.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6f6ba989e4b2c58ae83d862d3a3e27690b6e3ae630d0deb59f3697f32aa88ad" +checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52" dependencies = [ "async-stream", "async-trait", - "axum 0.7.5", - "base64 0.22.1", + "axum", + "base64", "bytes", "flate2", - "h2 0.4.6", - "http 1.1.0", - "http-body 1.0.1", + "h2", + "http", + "http-body", "http-body-util", - "hyper 1.4.1", - "hyper-timeout 0.5.1", + "hyper", + "hyper-timeout", "hyper-util", "percent-encoding", "pin-project", - "prost 0.13.2", + "prost", "rustls-native-certs", "rustls-pemfile", "socket2", @@ -1844,15 +1604,16 @@ dependencies = [ [[package]] name = "tonic-build" -version = "0.8.4" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bf5e9b9c0f7e0a7c027dcfaba7b2c60816c7049171f679d99ee2ff65d0de8c4" +checksum = "9557ce109ea773b399c9b9e5dca39294110b74f1f342cb347a80d1fce8c26a11" dependencies = [ "prettyplease", "proc-macro2", "prost-build", + "prost-types", "quote", - "syn 1.0.109", + "syn", ] [[package]] @@ -1906,7 +1667,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -1981,7 +1742,7 @@ checksum = "ee1cd046f83ea2c4e920d6ee9f7c3537ef928d75dce5d84a87c2c5d6b3999a3a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] @@ -2021,7 +1782,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.77", + "syn", "wasm-bindgen-shared", ] @@ -2055,7 +1816,7 @@ checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -2098,18 +1859,6 @@ dependencies = [ "rustls-pki-types", ] -[[package]] -name = "which" -version = "4.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" -dependencies = [ - "either", - "home", - "once_cell", - "rustix", -] - [[package]] name = "winapi-util" version = "0.1.9" @@ -2249,7 +1998,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn", ] [[package]] diff --git a/semantic-search-service/Cargo.toml b/semantic-search-service/Cargo.toml index 9a0e45ab..90565edf 100644 --- a/semantic-search-service/Cargo.toml +++ b/semantic-search-service/Cargo.toml @@ -7,8 +7,8 @@ edition = "2021" anyhow = "1" enum_dispatch = "0.3.12" env_logger = "0.10.0" -tonic = "0.9" -prost = "0.11" +tonic = "0.12.3" +prost = "0.13" tokio = { version = "1.24", features = ["macros", "rt-multi-thread"] } tokio-stream = { version = "0.1", features = ["net"] } futures = "0.3" @@ -23,5 +23,4 @@ dotenv = "0.15.0" uuid = { version = "1.4.1", features = ["v4", "fast-rng", "macro-diagnostics"] } [build-dependencies] -tonic-build = "0.8" - +tonic-build = "0.12.3" diff --git a/semantic-search-service/build.rs b/semantic-search-service/build.rs index 3ab4355d..971fc9a1 100644 --- a/semantic-search-service/build.rs +++ b/semantic-search-service/build.rs @@ -1,11 +1,11 @@ fn main() -> Result<(), Box> { - let proto_file = "./proto/semantic_search_grpc.proto"; + let proto_file = "./proto/semantic_search_grpc.proto"; - tonic_build::configure() - .protoc_arg("--experimental_allow_proto3_optional") // for older systems - .build_server(true) - .out_dir("./src/semantic_search/") - .compile(&[proto_file], &["proto"])?; + tonic_build::configure() + .protoc_arg("--experimental_allow_proto3_optional") // for older systems + .build_server(true) + .out_dir("./src/semantic_search/") + .compile_protos(&[proto_file], &["proto"])?; - Ok(()) -} \ No newline at end of file + Ok(()) +} diff --git a/semantic-search-service/src/semantic_search/semantic_search_grpc.rs b/semantic-search-service/src/semantic_search/semantic_search_grpc.rs index 4f9e97f3..6b3020e8 100644 --- a/semantic-search-service/src/semantic_search/semantic_search_grpc.rs +++ b/semantic-search-service/src/semantic_search/semantic_search_grpc.rs @@ -1,4 +1,4 @@ -#[allow(clippy::derive_partial_eq_without_eq)] +// This file is @generated by prost-build. #[derive(Clone, PartialEq, ::prost::Message)] pub struct IndexRequest { #[prost(message, repeated, tag = "1")] @@ -10,7 +10,6 @@ pub struct IndexRequest { } /// Nested message and enum types in `IndexRequest`. pub mod index_request { - #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Datapoint { #[prost(string, tag = "1")] @@ -26,13 +25,11 @@ pub mod index_request { pub id: ::prost::alloc::string::String, } } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct IndexResponse { #[prost(string, tag = "1")] pub status: ::prost::alloc::string::String, } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DeleteEmbeddingsRequest { #[prost(message, repeated, tag = "1")] @@ -42,13 +39,11 @@ pub struct DeleteEmbeddingsRequest { #[prost(enumeration = "Model", tag = "3")] pub model: i32, } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DeleteEmbeddingsResponse { #[prost(string, tag = "1")] pub status: ::prost::alloc::string::String, } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct RequestPayload { #[prost(map = "string, string", tag = "1")] @@ -57,7 +52,6 @@ pub struct RequestPayload { ::prost::alloc::string::String, >, } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryRequest { #[prost(string, tag = "1")] @@ -73,7 +67,6 @@ pub struct QueryRequest { #[prost(enumeration = "Model", tag = "6")] pub model: i32, } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryResponse { #[prost(message, repeated, tag = "1")] @@ -81,7 +74,6 @@ pub struct QueryResponse { } /// Nested message and enum types in `QueryResponse`. pub mod query_response { - #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryPoint { #[prost(float, tag = "1")] @@ -97,7 +89,6 @@ pub mod query_response { >, } } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GenerateEmbeddingsRequest { #[prost(string, repeated, tag = "1")] @@ -105,7 +96,6 @@ pub struct GenerateEmbeddingsRequest { #[prost(enumeration = "Model", tag = "2")] pub model: i32, } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GenerateEmbeddingsResponse { #[prost(message, repeated, tag = "1")] @@ -113,14 +103,12 @@ pub struct GenerateEmbeddingsResponse { } /// Nested message and enum types in `GenerateEmbeddingsResponse`. pub mod generate_embeddings_response { - #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Embeddings { #[prost(float, repeated, tag = "1")] pub values: ::prost::alloc::vec::Vec, } } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CalculateSimilarityScoresRequest { #[prost(message, repeated, tag = "1")] @@ -132,7 +120,6 @@ pub struct CalculateSimilarityScoresRequest { } /// Nested message and enum types in `CalculateSimilarityScoresRequest`. pub mod calculate_similarity_scores_request { - #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ComparedContents { #[prost(string, tag = "1")] @@ -141,13 +128,11 @@ pub mod calculate_similarity_scores_request { pub second: ::prost::alloc::string::String, } } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CalculateSimilarityScoresResponse { #[prost(float, repeated, tag = "1")] pub scores: ::prost::alloc::vec::Vec, } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CreateCollectionRequest { #[prost(string, tag = "1")] @@ -155,19 +140,16 @@ pub struct CreateCollectionRequest { #[prost(enumeration = "Model", tag = "2")] pub model: i32, } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CreateCollectionResponse { #[prost(string, tag = "1")] pub status: ::prost::alloc::string::String, } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DeleteCollectionsRequest { #[prost(string, tag = "1")] pub collection_name: ::prost::alloc::string::String, } -#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DeleteCollectionsResponse { #[prost(string, tag = "1")] @@ -202,7 +184,13 @@ impl Model { } /// Generated client implementations. pub mod semantic_search_client { - #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] use tonic::codegen::*; use tonic::codegen::http::Uri; #[derive(Debug, Clone)] @@ -213,7 +201,7 @@ pub mod semantic_search_client { /// Attempt to create a new client by connecting to a given endpoint. pub async fn connect(dst: D) -> Result where - D: std::convert::TryInto, + D: TryInto, D::Error: Into, { let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; @@ -224,8 +212,8 @@ pub mod semantic_search_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + Send + 'static, - ::Error: Into + Send, + T::ResponseBody: Body + std::marker::Send + 'static, + ::Error: Into + std::marker::Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -250,7 +238,7 @@ pub mod semantic_search_client { >, , - >>::Error: Into + Send + Sync, + >>::Error: Into + std::marker::Send + std::marker::Sync, { SemanticSearchClient::new(InterceptedService::new(inner, interceptor)) } @@ -269,17 +257,32 @@ pub mod semantic_search_client { self.inner = self.inner.accept_compressed(encoding); self } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } /// Embeds datapoints and adds them to the storage. pub async fn index( &mut self, request: impl tonic::IntoRequest, - ) -> Result, tonic::Status> { + ) -> std::result::Result, tonic::Status> { self.inner .ready() .await .map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, + tonic::Status::unknown( format!("Service was not ready: {}", e.into()), ) })?; @@ -287,19 +290,24 @@ pub mod semantic_search_client { let path = http::uri::PathAndQuery::from_static( "/semantic_search_grpc.SemanticSearch/Index", ); - self.inner.unary(request.into_request(), path, codec).await + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("semantic_search_grpc.SemanticSearch", "Index")); + self.inner.unary(req, path, codec).await } /// Deletes the embeddings pub async fn delete_embeddings( &mut self, request: impl tonic::IntoRequest, - ) -> Result, tonic::Status> { + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { self.inner .ready() .await .map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, + tonic::Status::unknown( format!("Service was not ready: {}", e.into()), ) })?; @@ -307,19 +315,26 @@ pub mod semantic_search_client { let path = http::uri::PathAndQuery::from_static( "/semantic_search_grpc.SemanticSearch/DeleteEmbeddings", ); - self.inner.unary(request.into_request(), path, codec).await + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "semantic_search_grpc.SemanticSearch", + "DeleteEmbeddings", + ), + ); + self.inner.unary(req, path, codec).await } /// Queries the index for similar text. pub async fn query( &mut self, request: impl tonic::IntoRequest, - ) -> Result, tonic::Status> { + ) -> std::result::Result, tonic::Status> { self.inner .ready() .await .map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, + tonic::Status::unknown( format!("Service was not ready: {}", e.into()), ) })?; @@ -327,19 +342,24 @@ pub mod semantic_search_client { let path = http::uri::PathAndQuery::from_static( "/semantic_search_grpc.SemanticSearch/Query", ); - self.inner.unary(request.into_request(), path, codec).await + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("semantic_search_grpc.SemanticSearch", "Query")); + self.inner.unary(req, path, codec).await } /// Creates a new collection. pub async fn create_collection( &mut self, request: impl tonic::IntoRequest, - ) -> Result, tonic::Status> { + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { self.inner .ready() .await .map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, + tonic::Status::unknown( format!("Service was not ready: {}", e.into()), ) })?; @@ -347,19 +367,29 @@ pub mod semantic_search_client { let path = http::uri::PathAndQuery::from_static( "/semantic_search_grpc.SemanticSearch/CreateCollection", ); - self.inner.unary(request.into_request(), path, codec).await + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "semantic_search_grpc.SemanticSearch", + "CreateCollection", + ), + ); + self.inner.unary(req, path, codec).await } /// Delete collection. pub async fn delete_collections( &mut self, request: impl tonic::IntoRequest, - ) -> Result, tonic::Status> { + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { self.inner .ready() .await .map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, + tonic::Status::unknown( format!("Service was not ready: {}", e.into()), ) })?; @@ -367,19 +397,29 @@ pub mod semantic_search_client { let path = http::uri::PathAndQuery::from_static( "/semantic_search_grpc.SemanticSearch/DeleteCollections", ); - self.inner.unary(request.into_request(), path, codec).await + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "semantic_search_grpc.SemanticSearch", + "DeleteCollections", + ), + ); + self.inner.unary(req, path, codec).await } /// Generates embeddings for provided texts pub async fn generate_embeddings( &mut self, request: impl tonic::IntoRequest, - ) -> Result, tonic::Status> { + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { self.inner .ready() .await .map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, + tonic::Status::unknown( format!("Service was not ready: {}", e.into()), ) })?; @@ -387,13 +427,21 @@ pub mod semantic_search_client { let path = http::uri::PathAndQuery::from_static( "/semantic_search_grpc.SemanticSearch/GenerateEmbeddings", ); - self.inner.unary(request.into_request(), path, codec).await + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "semantic_search_grpc.SemanticSearch", + "GenerateEmbeddings", + ), + ); + self.inner.unary(req, path, codec).await } /// Calculate similarity score for pairs of texts pub async fn calculate_similarity_scores( &mut self, request: impl tonic::IntoRequest, - ) -> Result< + ) -> std::result::Result< tonic::Response, tonic::Status, > { @@ -401,8 +449,7 @@ pub mod semantic_search_client { .ready() .await .map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, + tonic::Status::unknown( format!("Service was not ready: {}", e.into()), ) })?; @@ -410,73 +457,101 @@ pub mod semantic_search_client { let path = http::uri::PathAndQuery::from_static( "/semantic_search_grpc.SemanticSearch/CalculateSimilarityScores", ); - self.inner.unary(request.into_request(), path, codec).await + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "semantic_search_grpc.SemanticSearch", + "CalculateSimilarityScores", + ), + ); + self.inner.unary(req, path, codec).await } } } /// Generated server implementations. pub mod semantic_search_server { - #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with SemanticSearchServer. #[async_trait] - pub trait SemanticSearch: Send + Sync + 'static { + pub trait SemanticSearch: std::marker::Send + std::marker::Sync + 'static { /// Embeds datapoints and adds them to the storage. async fn index( &self, request: tonic::Request, - ) -> Result, tonic::Status>; + ) -> std::result::Result, tonic::Status>; /// Deletes the embeddings async fn delete_embeddings( &self, request: tonic::Request, - ) -> Result, tonic::Status>; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; /// Queries the index for similar text. async fn query( &self, request: tonic::Request, - ) -> Result, tonic::Status>; + ) -> std::result::Result, tonic::Status>; /// Creates a new collection. async fn create_collection( &self, request: tonic::Request, - ) -> Result, tonic::Status>; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; /// Delete collection. async fn delete_collections( &self, request: tonic::Request, - ) -> Result, tonic::Status>; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; /// Generates embeddings for provided texts async fn generate_embeddings( &self, request: tonic::Request, - ) -> Result, tonic::Status>; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; /// Calculate similarity score for pairs of texts async fn calculate_similarity_scores( &self, request: tonic::Request, - ) -> Result< + ) -> std::result::Result< tonic::Response, tonic::Status, >; } #[derive(Debug)] - pub struct SemanticSearchServer { - inner: _Inner, + pub struct SemanticSearchServer { + inner: Arc, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, + max_decoding_message_size: Option, + max_encoding_message_size: Option, } - struct _Inner(Arc); - impl SemanticSearchServer { + impl SemanticSearchServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { - let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), send_compression_encodings: Default::default(), + max_decoding_message_size: None, + max_encoding_message_size: None, } } pub fn with_interceptor( @@ -500,12 +575,28 @@ pub mod semantic_search_server { self.send_compression_encodings.enable(encoding); self } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.max_decoding_message_size = Some(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.max_encoding_message_size = Some(limit); + self + } } impl tonic::codegen::Service> for SemanticSearchServer where T: SemanticSearch, - B: Body + Send + 'static, - B::Error: Into + Send + 'static, + B: Body + std::marker::Send + 'static, + B::Error: Into + std::marker::Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -513,11 +604,10 @@ pub mod semantic_search_server { fn poll_ready( &mut self, _cx: &mut Context<'_>, - ) -> Poll> { + ) -> Poll> { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { - let inner = self.inner.clone(); match req.uri().path() { "/semantic_search_grpc.SemanticSearch/Index" => { #[allow(non_camel_case_types)] @@ -534,22 +624,29 @@ pub mod semantic_search_server { &mut self, request: tonic::Request, ) -> Self::Future { - let inner = self.0.clone(); - let fut = async move { (*inner).index(request).await }; + let inner = Arc::clone(&self.0); + let fut = async move { + ::index(&inner, request).await + }; Box::pin(fut) } } let accept_compression_encodings = self.accept_compression_encodings; let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { - let inner = inner.0; let method = IndexSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) .apply_compression_config( accept_compression_encodings, send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, ); let res = grpc.unary(method, req).await; Ok(res) @@ -572,24 +669,30 @@ pub mod semantic_search_server { &mut self, request: tonic::Request, ) -> Self::Future { - let inner = self.0.clone(); + let inner = Arc::clone(&self.0); let fut = async move { - (*inner).delete_embeddings(request).await + ::delete_embeddings(&inner, request) + .await }; Box::pin(fut) } } let accept_compression_encodings = self.accept_compression_encodings; let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { - let inner = inner.0; let method = DeleteEmbeddingsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) .apply_compression_config( accept_compression_encodings, send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, ); let res = grpc.unary(method, req).await; Ok(res) @@ -611,22 +714,29 @@ pub mod semantic_search_server { &mut self, request: tonic::Request, ) -> Self::Future { - let inner = self.0.clone(); - let fut = async move { (*inner).query(request).await }; + let inner = Arc::clone(&self.0); + let fut = async move { + ::query(&inner, request).await + }; Box::pin(fut) } } let accept_compression_encodings = self.accept_compression_encodings; let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { - let inner = inner.0; let method = QuerySvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) .apply_compression_config( accept_compression_encodings, send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, ); let res = grpc.unary(method, req).await; Ok(res) @@ -649,24 +759,30 @@ pub mod semantic_search_server { &mut self, request: tonic::Request, ) -> Self::Future { - let inner = self.0.clone(); + let inner = Arc::clone(&self.0); let fut = async move { - (*inner).create_collection(request).await + ::create_collection(&inner, request) + .await }; Box::pin(fut) } } let accept_compression_encodings = self.accept_compression_encodings; let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { - let inner = inner.0; let method = CreateCollectionSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) .apply_compression_config( accept_compression_encodings, send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, ); let res = grpc.unary(method, req).await; Ok(res) @@ -689,24 +805,30 @@ pub mod semantic_search_server { &mut self, request: tonic::Request, ) -> Self::Future { - let inner = self.0.clone(); + let inner = Arc::clone(&self.0); let fut = async move { - (*inner).delete_collections(request).await + ::delete_collections(&inner, request) + .await }; Box::pin(fut) } } let accept_compression_encodings = self.accept_compression_encodings; let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { - let inner = inner.0; let method = DeleteCollectionsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) .apply_compression_config( accept_compression_encodings, send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, ); let res = grpc.unary(method, req).await; Ok(res) @@ -729,24 +851,30 @@ pub mod semantic_search_server { &mut self, request: tonic::Request, ) -> Self::Future { - let inner = self.0.clone(); + let inner = Arc::clone(&self.0); let fut = async move { - (*inner).generate_embeddings(request).await + ::generate_embeddings(&inner, request) + .await }; Box::pin(fut) } } let accept_compression_encodings = self.accept_compression_encodings; let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { - let inner = inner.0; let method = GenerateEmbeddingsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) .apply_compression_config( accept_compression_encodings, send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, ); let res = grpc.unary(method, req).await; Ok(res) @@ -772,24 +900,33 @@ pub mod semantic_search_server { super::CalculateSimilarityScoresRequest, >, ) -> Self::Future { - let inner = self.0.clone(); + let inner = Arc::clone(&self.0); let fut = async move { - (*inner).calculate_similarity_scores(request).await + ::calculate_similarity_scores( + &inner, + request, + ) + .await }; Box::pin(fut) } } let accept_compression_encodings = self.accept_compression_encodings; let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { - let inner = inner.0; let method = CalculateSimilarityScoresSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) .apply_compression_config( accept_compression_encodings, send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, ); let res = grpc.unary(method, req).await; Ok(res) @@ -798,40 +935,39 @@ pub mod semantic_search_server { } _ => { Box::pin(async move { - Ok( - http::Response::builder() - .status(200) - .header("grpc-status", "12") - .header("content-type", "application/grpc") - .body(empty_body()) - .unwrap(), - ) + let mut response = http::Response::new(empty_body()); + let headers = response.headers_mut(); + headers + .insert( + tonic::Status::GRPC_STATUS, + (tonic::Code::Unimplemented as i32).into(), + ); + headers + .insert( + http::header::CONTENT_TYPE, + tonic::metadata::GRPC_CONTENT_TYPE, + ); + Ok(response) }) } } } } - impl Clone for SemanticSearchServer { + impl Clone for SemanticSearchServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { inner, accept_compression_encodings: self.accept_compression_encodings, send_compression_encodings: self.send_compression_encodings, + max_decoding_message_size: self.max_decoding_message_size, + max_encoding_message_size: self.max_encoding_message_size, } } } - impl Clone for _Inner { - fn clone(&self) -> Self { - Self(self.0.clone()) - } - } - impl std::fmt::Debug for _Inner { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{:?}", self.0) - } - } - impl tonic::server::NamedService for SemanticSearchServer { - const NAME: &'static str = "semantic_search_grpc.SemanticSearch"; + /// Generated gRPC service name + pub const SERVICE_NAME: &str = "semantic_search_grpc.SemanticSearch"; + impl tonic::server::NamedService for SemanticSearchServer { + const NAME: &'static str = SERVICE_NAME; } }