From f0ae0c7e8a81b767a770714b2265f7971e614963 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Vladislav=20Vanc=C3=A1k?= Date: Tue, 23 Jul 2024 17:11:00 +0200 Subject: [PATCH 01/10] Allow different package and distribution names in rialto jobs (#7) --- poetry.lock | 531 ++++++++++++++------------- pyproject.toml | 3 +- rialto/jobs/decorators/decorators.py | 9 +- 3 files changed, 280 insertions(+), 263 deletions(-) diff --git a/poetry.lock b/poetry.lock index b516a14..0cb768b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -84,13 +84,13 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] [[package]] @@ -241,63 +241,63 @@ files = [ [[package]] name = "coverage" -version = "7.5.1" +version = "7.6.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0884920835a033b78d1c73b6d3bbcda8161a900f38a488829a83982925f6c2e"}, - {file = "coverage-7.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:39afcd3d4339329c5f58de48a52f6e4e50f6578dd6099961cf22228feb25f38f"}, - {file = "coverage-7.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7b0ceee8147444347da6a66be737c9d78f3353b0681715b668b72e79203e4a"}, - {file = "coverage-7.5.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a9ca3f2fae0088c3c71d743d85404cec8df9be818a005ea065495bedc33da35"}, - {file = "coverage-7.5.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd215c0c7d7aab005221608a3c2b46f58c0285a819565887ee0b718c052aa4e"}, - {file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4bf0655ab60d754491004a5efd7f9cccefcc1081a74c9ef2da4735d6ee4a6223"}, - {file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:61c4bf1ba021817de12b813338c9be9f0ad5b1e781b9b340a6d29fc13e7c1b5e"}, - {file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:db66fc317a046556a96b453a58eced5024af4582a8dbdc0c23ca4dbc0d5b3146"}, - {file = "coverage-7.5.1-cp310-cp310-win32.whl", hash = "sha256:b016ea6b959d3b9556cb401c55a37547135a587db0115635a443b2ce8f1c7228"}, - {file = "coverage-7.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:df4e745a81c110e7446b1cc8131bf986157770fa405fe90e15e850aaf7619bc8"}, - {file = "coverage-7.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:796a79f63eca8814ca3317a1ea443645c9ff0d18b188de470ed7ccd45ae79428"}, - {file = "coverage-7.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fc84a37bfd98db31beae3c2748811a3fa72bf2007ff7902f68746d9757f3746"}, - {file = "coverage-7.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6175d1a0559986c6ee3f7fccfc4a90ecd12ba0a383dcc2da30c2b9918d67d8a3"}, - {file = "coverage-7.5.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fc81d5878cd6274ce971e0a3a18a8803c3fe25457165314271cf78e3aae3aa2"}, - {file = "coverage-7.5.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:556cf1a7cbc8028cb60e1ff0be806be2eded2daf8129b8811c63e2b9a6c43bca"}, - {file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9981706d300c18d8b220995ad22627647be11a4276721c10911e0e9fa44c83e8"}, - {file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d7fed867ee50edf1a0b4a11e8e5d0895150e572af1cd6d315d557758bfa9c057"}, - {file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef48e2707fb320c8f139424a596f5b69955a85b178f15af261bab871873bb987"}, - {file = "coverage-7.5.1-cp311-cp311-win32.whl", hash = "sha256:9314d5678dcc665330df5b69c1e726a0e49b27df0461c08ca12674bcc19ef136"}, - {file = "coverage-7.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:5fa567e99765fe98f4e7d7394ce623e794d7cabb170f2ca2ac5a4174437e90dd"}, - {file = "coverage-7.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b6cf3764c030e5338e7f61f95bd21147963cf6aa16e09d2f74f1fa52013c1206"}, - {file = "coverage-7.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ec92012fefebee89a6b9c79bc39051a6cb3891d562b9270ab10ecfdadbc0c34"}, - {file = "coverage-7.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16db7f26000a07efcf6aea00316f6ac57e7d9a96501e990a36f40c965ec7a95d"}, - {file = "coverage-7.5.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beccf7b8a10b09c4ae543582c1319c6df47d78fd732f854ac68d518ee1fb97fa"}, - {file = "coverage-7.5.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8748731ad392d736cc9ccac03c9845b13bb07d020a33423fa5b3a36521ac6e4e"}, - {file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7352b9161b33fd0b643ccd1f21f3a3908daaddf414f1c6cb9d3a2fd618bf2572"}, - {file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7a588d39e0925f6a2bff87154752481273cdb1736270642aeb3635cb9b4cad07"}, - {file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:68f962d9b72ce69ea8621f57551b2fa9c70509af757ee3b8105d4f51b92b41a7"}, - {file = "coverage-7.5.1-cp312-cp312-win32.whl", hash = "sha256:f152cbf5b88aaeb836127d920dd0f5e7edff5a66f10c079157306c4343d86c19"}, - {file = "coverage-7.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:5a5740d1fb60ddf268a3811bcd353de34eb56dc24e8f52a7f05ee513b2d4f596"}, - {file = "coverage-7.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e2213def81a50519d7cc56ed643c9e93e0247f5bbe0d1247d15fa520814a7cd7"}, - {file = "coverage-7.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5037f8fcc2a95b1f0e80585bd9d1ec31068a9bcb157d9750a172836e98bc7a90"}, - {file = "coverage-7.5.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3721c2c9e4c4953a41a26c14f4cef64330392a6d2d675c8b1db3b645e31f0e"}, - {file = "coverage-7.5.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca498687ca46a62ae590253fba634a1fe9836bc56f626852fb2720f334c9e4e5"}, - {file = "coverage-7.5.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cdcbc320b14c3e5877ee79e649677cb7d89ef588852e9583e6b24c2e5072661"}, - {file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:57e0204b5b745594e5bc14b9b50006da722827f0b8c776949f1135677e88d0b8"}, - {file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fe7502616b67b234482c3ce276ff26f39ffe88adca2acf0261df4b8454668b4"}, - {file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9e78295f4144f9dacfed4f92935fbe1780021247c2fabf73a819b17f0ccfff8d"}, - {file = "coverage-7.5.1-cp38-cp38-win32.whl", hash = "sha256:1434e088b41594baa71188a17533083eabf5609e8e72f16ce8c186001e6b8c41"}, - {file = "coverage-7.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:0646599e9b139988b63704d704af8e8df7fa4cbc4a1f33df69d97f36cb0a38de"}, - {file = "coverage-7.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4cc37def103a2725bc672f84bd939a6fe4522310503207aae4d56351644682f1"}, - {file = "coverage-7.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc0b4d8bfeabd25ea75e94632f5b6e047eef8adaed0c2161ada1e922e7f7cece"}, - {file = "coverage-7.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d0a0f5e06881ecedfe6f3dd2f56dcb057b6dbeb3327fd32d4b12854df36bf26"}, - {file = "coverage-7.5.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9735317685ba6ec7e3754798c8871c2f49aa5e687cc794a0b1d284b2389d1bd5"}, - {file = "coverage-7.5.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d21918e9ef11edf36764b93101e2ae8cc82aa5efdc7c5a4e9c6c35a48496d601"}, - {file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c3e757949f268364b96ca894b4c342b41dc6f8f8b66c37878aacef5930db61be"}, - {file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:79afb6197e2f7f60c4824dd4b2d4c2ec5801ceb6ba9ce5d2c3080e5660d51a4f"}, - {file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d1d0d98d95dd18fe29dc66808e1accf59f037d5716f86a501fc0256455219668"}, - {file = "coverage-7.5.1-cp39-cp39-win32.whl", hash = "sha256:1cc0fe9b0b3a8364093c53b0b4c0c2dd4bb23acbec4c9240b5f284095ccf7981"}, - {file = "coverage-7.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:dde0070c40ea8bb3641e811c1cfbf18e265d024deff6de52c5950677a8fb1e0f"}, - {file = "coverage-7.5.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:6537e7c10cc47c595828b8a8be04c72144725c383c4702703ff4e42e44577312"}, - {file = "coverage-7.5.1.tar.gz", hash = "sha256:54de9ef3a9da981f7af93eafde4ede199e0846cd819eb27c88e2b712aae9708c"}, + {file = "coverage-7.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dff044f661f59dace805eedb4a7404c573b6ff0cdba4a524141bc63d7be5c7fd"}, + {file = "coverage-7.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8659fd33ee9e6ca03950cfdcdf271d645cf681609153f218826dd9805ab585c"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7792f0ab20df8071d669d929c75c97fecfa6bcab82c10ee4adb91c7a54055463"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b3cd1ca7cd73d229487fa5caca9e4bc1f0bca96526b922d61053ea751fe791"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7e128f85c0b419907d1f38e616c4f1e9f1d1b37a7949f44df9a73d5da5cd53c"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a94925102c89247530ae1dab7dc02c690942566f22e189cbd53579b0693c0783"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dcd070b5b585b50e6617e8972f3fbbee786afca71b1936ac06257f7e178f00f6"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d50a252b23b9b4dfeefc1f663c568a221092cbaded20a05a11665d0dbec9b8fb"}, + {file = "coverage-7.6.0-cp310-cp310-win32.whl", hash = "sha256:0e7b27d04131c46e6894f23a4ae186a6a2207209a05df5b6ad4caee6d54a222c"}, + {file = "coverage-7.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dece71673b3187c86226c3ca793c5f891f9fc3d8aa183f2e3653da18566169"}, + {file = "coverage-7.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7b525ab52ce18c57ae232ba6f7010297a87ced82a2383b1afd238849c1ff933"}, + {file = "coverage-7.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bea27c4269234e06f621f3fac3925f56ff34bc14521484b8f66a580aacc2e7d"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed8d1d1821ba5fc88d4a4f45387b65de52382fa3ef1f0115a4f7a20cdfab0e94"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c322ef2bbe15057bc4bf132b525b7e3f7206f071799eb8aa6ad1940bcf5fb1"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03cafe82c1b32b770a29fd6de923625ccac3185a54a5e66606da26d105f37dac"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0d1b923fc4a40c5832be4f35a5dab0e5ff89cddf83bb4174499e02ea089daf57"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4b03741e70fb811d1a9a1d75355cf391f274ed85847f4b78e35459899f57af4d"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a73d18625f6a8a1cbb11eadc1d03929f9510f4131879288e3f7922097a429f63"}, + {file = "coverage-7.6.0-cp311-cp311-win32.whl", hash = "sha256:65fa405b837060db569a61ec368b74688f429b32fa47a8929a7a2f9b47183713"}, + {file = "coverage-7.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:6379688fb4cfa921ae349c76eb1a9ab26b65f32b03d46bb0eed841fd4cb6afb1"}, + {file = "coverage-7.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f7db0b6ae1f96ae41afe626095149ecd1b212b424626175a6633c2999eaad45b"}, + {file = "coverage-7.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bbdf9a72403110a3bdae77948b8011f644571311c2fb35ee15f0f10a8fc082e8"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc44bf0315268e253bf563f3560e6c004efe38f76db03a1558274a6e04bf5d5"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da8549d17489cd52f85a9829d0e1d91059359b3c54a26f28bec2c5d369524807"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0086cd4fc71b7d485ac93ca4239c8f75732c2ae3ba83f6be1c9be59d9e2c6382"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fad32ee9b27350687035cb5fdf9145bc9cf0a094a9577d43e909948ebcfa27b"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:044a0985a4f25b335882b0966625270a8d9db3d3409ddc49a4eb00b0ef5e8cee"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:76d5f82213aa78098b9b964ea89de4617e70e0d43e97900c2778a50856dac605"}, + {file = "coverage-7.6.0-cp312-cp312-win32.whl", hash = "sha256:3c59105f8d58ce500f348c5b56163a4113a440dad6daa2294b5052a10db866da"}, + {file = "coverage-7.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca5d79cfdae420a1d52bf177de4bc2289c321d6c961ae321503b2ca59c17ae67"}, + {file = "coverage-7.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d39bd10f0ae453554798b125d2f39884290c480f56e8a02ba7a6ed552005243b"}, + {file = "coverage-7.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:beb08e8508e53a568811016e59f3234d29c2583f6b6e28572f0954a6b4f7e03d"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2e16f4cd2bc4d88ba30ca2d3bbf2f21f00f382cf4e1ce3b1ddc96c634bc48ca"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6616d1c9bf1e3faea78711ee42a8b972367d82ceae233ec0ac61cc7fec09fa6b"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4567d6c334c46046d1c4c20024de2a1c3abc626817ae21ae3da600f5779b44"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d17c6a415d68cfe1091d3296ba5749d3d8696e42c37fca5d4860c5bf7b729f03"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9146579352d7b5f6412735d0f203bbd8d00113a680b66565e205bc605ef81bc6"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cdab02a0a941af190df8782aafc591ef3ad08824f97850b015c8c6a8b3877b0b"}, + {file = "coverage-7.6.0-cp38-cp38-win32.whl", hash = "sha256:df423f351b162a702c053d5dddc0fc0ef9a9e27ea3f449781ace5f906b664428"}, + {file = "coverage-7.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f2501d60d7497fd55e391f423f965bbe9e650e9ffc3c627d5f0ac516026000b8"}, + {file = "coverage-7.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7221f9ac9dad9492cecab6f676b3eaf9185141539d5c9689d13fd6b0d7de840c"}, + {file = "coverage-7.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ddaaa91bfc4477d2871442bbf30a125e8fe6b05da8a0015507bfbf4718228ab2"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cbe651f3904e28f3a55d6f371203049034b4ddbce65a54527a3f189ca3b390"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831b476d79408ab6ccfadaaf199906c833f02fdb32c9ab907b1d4aa0713cfa3b"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46c3d091059ad0b9c59d1034de74a7f36dcfa7f6d3bde782c49deb42438f2450"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4d5fae0a22dc86259dee66f2cc6c1d3e490c4a1214d7daa2a93d07491c5c04b6"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:07ed352205574aad067482e53dd606926afebcb5590653121063fbf4e2175166"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:49c76cdfa13015c4560702574bad67f0e15ca5a2872c6a125f6327ead2b731dd"}, + {file = "coverage-7.6.0-cp39-cp39-win32.whl", hash = "sha256:482855914928c8175735a2a59c8dc5806cf7d8f032e4820d52e845d1f731dca2"}, + {file = "coverage-7.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:543ef9179bc55edfd895154a51792b01c017c87af0ebaae092720152e19e42ca"}, + {file = "coverage-7.6.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:6fe885135c8a479d3e37a7aae61cbd3a0fb2deccb4dda3c25f92a49189f766d6"}, + {file = "coverage-7.6.0.tar.gz", hash = "sha256:289cc803fa1dc901f84701ac10c9ee873619320f2f9aff38794db4a4a0268d51"}, ] [package.dependencies] @@ -345,13 +345,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.1" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -359,18 +359,18 @@ test = ["pytest (>=6)"] [[package]] name = "filelock" -version = "3.14.0" +version = "3.15.4" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"}, - {file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"}, + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] @@ -420,13 +420,13 @@ pydocstyle = ">=2.1" [[package]] name = "identify" -version = "2.5.36" +version = "2.6.0" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.36-py2.py3-none-any.whl", hash = "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa"}, - {file = "identify-2.5.36.tar.gz", hash = "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"}, + {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, + {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, ] [package.extras] @@ -456,22 +456,22 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.1.0" +version = "7.2.1" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, - {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, + {file = "importlib_metadata-7.2.1-py3-none-any.whl", hash = "sha256:ffef94b0b66046dd8ea2d619b701fe978d9264d38f3998bc4c27ec3b146a87c8"}, + {file = "importlib_metadata-7.2.1.tar.gz", hash = "sha256:509ecb2ab77071db5137c655e24ceb3eee66e7bbc6574165d0d114d9fc4bbe68"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "iniconfig" @@ -637,72 +637,78 @@ files = [ [[package]] name = "nodeenv" -version = "1.8.0" +version = "1.9.1" description = "Node.js virtual environment builder" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, - {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] -[package.dependencies] -setuptools = "*" - [[package]] name = "numpy" -version = "1.26.4" +version = "2.0.1" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, + {file = "numpy-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fbb536eac80e27a2793ffd787895242b7f18ef792563d742c2d673bfcb75134"}, + {file = "numpy-2.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:69ff563d43c69b1baba77af455dd0a839df8d25e8590e79c90fcbe1499ebde42"}, + {file = "numpy-2.0.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:1b902ce0e0a5bb7704556a217c4f63a7974f8f43e090aff03fcf262e0b135e02"}, + {file = "numpy-2.0.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:f1659887361a7151f89e79b276ed8dff3d75877df906328f14d8bb40bb4f5101"}, + {file = "numpy-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4658c398d65d1b25e1760de3157011a80375da861709abd7cef3bad65d6543f9"}, + {file = "numpy-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4127d4303b9ac9f94ca0441138acead39928938660ca58329fe156f84b9f3015"}, + {file = "numpy-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e5eeca8067ad04bc8a2a8731183d51d7cbaac66d86085d5f4766ee6bf19c7f87"}, + {file = "numpy-2.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9adbd9bb520c866e1bfd7e10e1880a1f7749f1f6e5017686a5fbb9b72cf69f82"}, + {file = "numpy-2.0.1-cp310-cp310-win32.whl", hash = "sha256:7b9853803278db3bdcc6cd5beca37815b133e9e77ff3d4733c247414e78eb8d1"}, + {file = "numpy-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:81b0893a39bc5b865b8bf89e9ad7807e16717f19868e9d234bdaf9b1f1393868"}, + {file = "numpy-2.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75b4e316c5902d8163ef9d423b1c3f2f6252226d1aa5cd8a0a03a7d01ffc6268"}, + {file = "numpy-2.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6e4eeb6eb2fced786e32e6d8df9e755ce5be920d17f7ce00bc38fcde8ccdbf9e"}, + {file = "numpy-2.0.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:a1e01dcaab205fbece13c1410253a9eea1b1c9b61d237b6fa59bcc46e8e89343"}, + {file = "numpy-2.0.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:a8fc2de81ad835d999113ddf87d1ea2b0f4704cbd947c948d2f5513deafe5a7b"}, + {file = "numpy-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a3d94942c331dd4e0e1147f7a8699a4aa47dffc11bf8a1523c12af8b2e91bbe"}, + {file = "numpy-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15eb4eca47d36ec3f78cde0a3a2ee24cf05ca7396ef808dda2c0ddad7c2bde67"}, + {file = "numpy-2.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b83e16a5511d1b1f8a88cbabb1a6f6a499f82c062a4251892d9ad5d609863fb7"}, + {file = "numpy-2.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f87fec1f9bc1efd23f4227becff04bd0e979e23ca50cc92ec88b38489db3b55"}, + {file = "numpy-2.0.1-cp311-cp311-win32.whl", hash = "sha256:36d3a9405fd7c511804dc56fc32974fa5533bdeb3cd1604d6b8ff1d292b819c4"}, + {file = "numpy-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:08458fbf403bff5e2b45f08eda195d4b0c9b35682311da5a5a0a0925b11b9bd8"}, + {file = "numpy-2.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bf4e6f4a2a2e26655717a1983ef6324f2664d7011f6ef7482e8c0b3d51e82ac"}, + {file = "numpy-2.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6fddc5fe258d3328cd8e3d7d3e02234c5d70e01ebe377a6ab92adb14039cb4"}, + {file = "numpy-2.0.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5daab361be6ddeb299a918a7c0864fa8618af66019138263247af405018b04e1"}, + {file = "numpy-2.0.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:ea2326a4dca88e4a274ba3a4405eb6c6467d3ffbd8c7d38632502eaae3820587"}, + {file = "numpy-2.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529af13c5f4b7a932fb0e1911d3a75da204eff023ee5e0e79c1751564221a5c8"}, + {file = "numpy-2.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6790654cb13eab303d8402354fabd47472b24635700f631f041bd0b65e37298a"}, + {file = "numpy-2.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cbab9fc9c391700e3e1287666dfd82d8666d10e69a6c4a09ab97574c0b7ee0a7"}, + {file = "numpy-2.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99d0d92a5e3613c33a5f01db206a33f8fdf3d71f2912b0de1739894668b7a93b"}, + {file = "numpy-2.0.1-cp312-cp312-win32.whl", hash = "sha256:173a00b9995f73b79eb0191129f2455f1e34c203f559dd118636858cc452a1bf"}, + {file = "numpy-2.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:bb2124fdc6e62baae159ebcfa368708867eb56806804d005860b6007388df171"}, + {file = "numpy-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bfc085b28d62ff4009364e7ca34b80a9a080cbd97c2c0630bb5f7f770dae9414"}, + {file = "numpy-2.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8fae4ebbf95a179c1156fab0b142b74e4ba4204c87bde8d3d8b6f9c34c5825ef"}, + {file = "numpy-2.0.1-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:72dc22e9ec8f6eaa206deb1b1355eb2e253899d7347f5e2fae5f0af613741d06"}, + {file = "numpy-2.0.1-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:ec87f5f8aca726117a1c9b7083e7656a9d0d606eec7299cc067bb83d26f16e0c"}, + {file = "numpy-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f682ea61a88479d9498bf2091fdcd722b090724b08b31d63e022adc063bad59"}, + {file = "numpy-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8efc84f01c1cd7e34b3fb310183e72fcdf55293ee736d679b6d35b35d80bba26"}, + {file = "numpy-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3fdabe3e2a52bc4eff8dc7a5044342f8bd9f11ef0934fcd3289a788c0eb10018"}, + {file = "numpy-2.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:24a0e1befbfa14615b49ba9659d3d8818a0f4d8a1c5822af8696706fbda7310c"}, + {file = "numpy-2.0.1-cp39-cp39-win32.whl", hash = "sha256:f9cf5ea551aec449206954b075db819f52adc1638d46a6738253a712d553c7b4"}, + {file = "numpy-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:e9e81fa9017eaa416c056e5d9e71be93d05e2c3c2ab308d23307a8bc4443c368"}, + {file = "numpy-2.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:61728fba1e464f789b11deb78a57805c70b2ed02343560456190d0501ba37b0f"}, + {file = "numpy-2.0.1-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:12f5d865d60fb9734e60a60f1d5afa6d962d8d4467c120a1c0cda6eb2964437d"}, + {file = "numpy-2.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eacf3291e263d5a67d8c1a581a8ebbcfd6447204ef58828caf69a5e3e8c75990"}, + {file = "numpy-2.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2c3a346ae20cfd80b6cfd3e60dc179963ef2ea58da5ec074fd3d9e7a1e7ba97f"}, + {file = "numpy-2.0.1.tar.gz", hash = "sha256:485b87235796410c3519a699cfe1faab097e509e90ebb05dcd098db2ae87e7b3"}, ] [[package]] name = "packaging" -version = "24.0" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] @@ -745,9 +751,9 @@ files = [ [package.dependencies] numpy = [ + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, {version = ">=1.22.4", markers = "python_version < \"3.11\""}, {version = ">=1.23.2", markers = "python_version == \"3.11\""}, - {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -887,109 +893,122 @@ files = [ [[package]] name = "pydantic" -version = "2.7.1" +version = "2.8.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.7.1-py3-none-any.whl", hash = "sha256:e029badca45266732a9a79898a15ae2e8b14840b1eabbb25844be28f0b33f3d5"}, - {file = "pydantic-2.7.1.tar.gz", hash = "sha256:e9dbb5eada8abe4d9ae5f46b9939aead650cd2b68f249bb3a8139dbe125803cc"}, + {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, + {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.18.2" -typing-extensions = ">=4.6.1" +pydantic-core = "2.20.1" +typing-extensions = [ + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, +] [package.extras] email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.18.2" +version = "2.20.1" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.18.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9e08e867b306f525802df7cd16c44ff5ebbe747ff0ca6cf3fde7f36c05a59a81"}, - {file = "pydantic_core-2.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f0a21cbaa69900cbe1a2e7cad2aa74ac3cf21b10c3efb0fa0b80305274c0e8a2"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0680b1f1f11fda801397de52c36ce38ef1c1dc841a0927a94f226dea29c3ae3d"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95b9d5e72481d3780ba3442eac863eae92ae43a5f3adb5b4d0a1de89d42bb250"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fcf5cd9c4b655ad666ca332b9a081112cd7a58a8b5a6ca7a3104bc950f2038"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b5155ff768083cb1d62f3e143b49a8a3432e6789a3abee8acd005c3c7af1c74"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:553ef617b6836fc7e4df130bb851e32fe357ce36336d897fd6646d6058d980af"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89ed9eb7d616ef5714e5590e6cf7f23b02d0d539767d33561e3675d6f9e3857"}, - {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:75f7e9488238e920ab6204399ded280dc4c307d034f3924cd7f90a38b1829563"}, - {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ef26c9e94a8c04a1b2924149a9cb081836913818e55681722d7f29af88fe7b38"}, - {file = "pydantic_core-2.18.2-cp310-none-win32.whl", hash = "sha256:182245ff6b0039e82b6bb585ed55a64d7c81c560715d1bad0cbad6dfa07b4027"}, - {file = "pydantic_core-2.18.2-cp310-none-win_amd64.whl", hash = "sha256:e23ec367a948b6d812301afc1b13f8094ab7b2c280af66ef450efc357d2ae543"}, - {file = "pydantic_core-2.18.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:219da3f096d50a157f33645a1cf31c0ad1fe829a92181dd1311022f986e5fbe3"}, - {file = "pydantic_core-2.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc1cfd88a64e012b74e94cd00bbe0f9c6df57049c97f02bb07d39e9c852e19a4"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b7133a6e6aeb8df37d6f413f7705a37ab4031597f64ab56384c94d98fa0e90"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:224c421235f6102e8737032483f43c1a8cfb1d2f45740c44166219599358c2cd"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b14d82cdb934e99dda6d9d60dc84a24379820176cc4a0d123f88df319ae9c150"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2728b01246a3bba6de144f9e3115b532ee44bd6cf39795194fb75491824a1413"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:470b94480bb5ee929f5acba6995251ada5e059a5ef3e0dfc63cca287283ebfa6"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:997abc4df705d1295a42f95b4eec4950a37ad8ae46d913caeee117b6b198811c"}, - {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75250dbc5290e3f1a0f4618db35e51a165186f9034eff158f3d490b3fed9f8a0"}, - {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4456f2dca97c425231d7315737d45239b2b51a50dc2b6f0c2bb181fce6207664"}, - {file = "pydantic_core-2.18.2-cp311-none-win32.whl", hash = "sha256:269322dcc3d8bdb69f054681edff86276b2ff972447863cf34c8b860f5188e2e"}, - {file = "pydantic_core-2.18.2-cp311-none-win_amd64.whl", hash = "sha256:800d60565aec896f25bc3cfa56d2277d52d5182af08162f7954f938c06dc4ee3"}, - {file = "pydantic_core-2.18.2-cp311-none-win_arm64.whl", hash = "sha256:1404c69d6a676245199767ba4f633cce5f4ad4181f9d0ccb0577e1f66cf4c46d"}, - {file = "pydantic_core-2.18.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:fb2bd7be70c0fe4dfd32c951bc813d9fe6ebcbfdd15a07527796c8204bd36242"}, - {file = "pydantic_core-2.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6132dd3bd52838acddca05a72aafb6eab6536aa145e923bb50f45e78b7251043"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d904828195733c183d20a54230c0df0eb46ec746ea1a666730787353e87182"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9bd70772c720142be1020eac55f8143a34ec9f82d75a8e7a07852023e46617f"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8ed04b3582771764538f7ee7001b02e1170223cf9b75dff0bc698fadb00cf3"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6dac87ddb34aaec85f873d737e9d06a3555a1cc1a8e0c44b7f8d5daeb89d86f"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca4ae5a27ad7a4ee5170aebce1574b375de390bc01284f87b18d43a3984df72"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:886eec03591b7cf058467a70a87733b35f44707bd86cf64a615584fd72488b7c"}, - {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ca7b0c1f1c983e064caa85f3792dd2fe3526b3505378874afa84baf662e12241"}, - {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b4356d3538c3649337df4074e81b85f0616b79731fe22dd11b99499b2ebbdf3"}, - {file = "pydantic_core-2.18.2-cp312-none-win32.whl", hash = "sha256:8b172601454f2d7701121bbec3425dd71efcb787a027edf49724c9cefc14c038"}, - {file = "pydantic_core-2.18.2-cp312-none-win_amd64.whl", hash = "sha256:b1bd7e47b1558ea872bd16c8502c414f9e90dcf12f1395129d7bb42a09a95438"}, - {file = "pydantic_core-2.18.2-cp312-none-win_arm64.whl", hash = "sha256:98758d627ff397e752bc339272c14c98199c613f922d4a384ddc07526c86a2ec"}, - {file = "pydantic_core-2.18.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9fdad8e35f278b2c3eb77cbdc5c0a49dada440657bf738d6905ce106dc1de439"}, - {file = "pydantic_core-2.18.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1d90c3265ae107f91a4f279f4d6f6f1d4907ac76c6868b27dc7fb33688cfb347"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390193c770399861d8df9670fb0d1874f330c79caaca4642332df7c682bf6b91"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:82d5d4d78e4448683cb467897fe24e2b74bb7b973a541ea1dcfec1d3cbce39fb"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4774f3184d2ef3e14e8693194f661dea5a4d6ca4e3dc8e39786d33a94865cefd"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4d938ec0adf5167cb335acb25a4ee69a8107e4984f8fbd2e897021d9e4ca21b"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0e8b1be28239fc64a88a8189d1df7fad8be8c1ae47fcc33e43d4be15f99cc70"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:868649da93e5a3d5eacc2b5b3b9235c98ccdbfd443832f31e075f54419e1b96b"}, - {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:78363590ef93d5d226ba21a90a03ea89a20738ee5b7da83d771d283fd8a56761"}, - {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:852e966fbd035a6468fc0a3496589b45e2208ec7ca95c26470a54daed82a0788"}, - {file = "pydantic_core-2.18.2-cp38-none-win32.whl", hash = "sha256:6a46e22a707e7ad4484ac9ee9f290f9d501df45954184e23fc29408dfad61350"}, - {file = "pydantic_core-2.18.2-cp38-none-win_amd64.whl", hash = "sha256:d91cb5ea8b11607cc757675051f61b3d93f15eca3cefb3e6c704a5d6e8440f4e"}, - {file = "pydantic_core-2.18.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ae0a8a797a5e56c053610fa7be147993fe50960fa43609ff2a9552b0e07013e8"}, - {file = "pydantic_core-2.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:042473b6280246b1dbf530559246f6842b56119c2926d1e52b631bdc46075f2a"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a388a77e629b9ec814c1b1e6b3b595fe521d2cdc625fcca26fbc2d44c816804"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25add29b8f3b233ae90ccef2d902d0ae0432eb0d45370fe315d1a5cf231004b"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f459a5ce8434614dfd39bbebf1041952ae01da6bed9855008cb33b875cb024c0"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eff2de745698eb46eeb51193a9f41d67d834d50e424aef27df2fcdee1b153845"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8309f67285bdfe65c372ea3722b7a5642680f3dba538566340a9d36e920b5f0"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f93a8a2e3938ff656a7c1bc57193b1319960ac015b6e87d76c76bf14fe0244b4"}, - {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:22057013c8c1e272eb8d0eebc796701167d8377441ec894a8fed1af64a0bf399"}, - {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cfeecd1ac6cc1fb2692c3d5110781c965aabd4ec5d32799773ca7b1456ac636b"}, - {file = "pydantic_core-2.18.2-cp39-none-win32.whl", hash = "sha256:0d69b4c2f6bb3e130dba60d34c0845ba31b69babdd3f78f7c0c8fae5021a253e"}, - {file = "pydantic_core-2.18.2-cp39-none-win_amd64.whl", hash = "sha256:d9319e499827271b09b4e411905b24a426b8fb69464dfa1696258f53a3334641"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a1874c6dd4113308bd0eb568418e6114b252afe44319ead2b4081e9b9521fe75"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:ccdd111c03bfd3666bd2472b674c6899550e09e9f298954cfc896ab92b5b0e6d"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e18609ceaa6eed63753037fc06ebb16041d17d28199ae5aba0052c51449650a9"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e5c584d357c4e2baf0ff7baf44f4994be121e16a2c88918a5817331fc7599d7"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43f0f463cf89ace478de71a318b1b4f05ebc456a9b9300d027b4b57c1a2064fb"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e1b395e58b10b73b07b7cf740d728dd4ff9365ac46c18751bf8b3d8cca8f625a"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0098300eebb1c837271d3d1a2cd2911e7c11b396eac9661655ee524a7f10587b"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:36789b70d613fbac0a25bb07ab3d9dba4d2e38af609c020cf4d888d165ee0bf3"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f9a801e7c8f1ef8718da265bba008fa121243dfe37c1cea17840b0944dfd72c"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3a6515ebc6e69d85502b4951d89131ca4e036078ea35533bb76327f8424531ce"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20aca1e2298c56ececfd8ed159ae4dde2df0781988c97ef77d5c16ff4bd5b400"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:223ee893d77a310a0391dca6df00f70bbc2f36a71a895cecd9a0e762dc37b349"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2334ce8c673ee93a1d6a65bd90327588387ba073c17e61bf19b4fd97d688d63c"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cbca948f2d14b09d20268cda7b0367723d79063f26c4ffc523af9042cad95592"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b3ef08e20ec49e02d5c6717a91bb5af9b20f1805583cb0adfe9ba2c6b505b5ae"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6fdc8627910eed0c01aed6a390a252fe3ea6d472ee70fdde56273f198938374"}, - {file = "pydantic_core-2.18.2.tar.gz", hash = "sha256:2e29d20810dfc3043ee13ac7d9e25105799817683348823f305ab3f349b9386e"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, + {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, + {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, + {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, + {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, + {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, + {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, + {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, + {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, + {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, + {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, + {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, + {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, + {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, ] [package.dependencies] @@ -1151,6 +1170,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1158,8 +1178,16 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1176,6 +1204,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1183,6 +1212,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1190,13 +1220,13 @@ files = [ [[package]] name = "requests" -version = "2.32.2" +version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" files = [ - {file = "requests-2.32.2-py3-none-any.whl", hash = "sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c"}, - {file = "requests-2.32.2.tar.gz", hash = "sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -1209,21 +1239,6 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] -[[package]] -name = "setuptools" -version = "70.0.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, - {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] - [[package]] name = "six" version = "1.16.0" @@ -1248,26 +1263,26 @@ files = [ [[package]] name = "sphinx" -version = "7.3.7" +version = "7.4.7" description = "Python documentation generator" optional = false python-versions = ">=3.9" files = [ - {file = "sphinx-7.3.7-py3-none-any.whl", hash = "sha256:413f75440be4cacf328f580b4274ada4565fb2187d696a84970c23f77b64d8c3"}, - {file = "sphinx-7.3.7.tar.gz", hash = "sha256:a4a7db75ed37531c05002d56ed6948d4c42f473a36f46e1382b0bd76ca9627bc"}, + {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, + {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, ] [package.dependencies] alabaster = ">=0.7.14,<0.8.0" -babel = ">=2.9" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.18.1,<0.22" +babel = ">=2.13" +colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} +docutils = ">=0.20,<0.22" imagesize = ">=1.3" -Jinja2 = ">=3.0" -packaging = ">=21.0" -Pygments = ">=2.14" -requests = ">=2.25.0" -snowballstemmer = ">=2.0" +Jinja2 = ">=3.1" +packaging = ">=23.0" +Pygments = ">=2.17" +requests = ">=2.30.0" +snowballstemmer = ">=2.2" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" sphinxcontrib-htmlhelp = ">=2.0.0" @@ -1278,8 +1293,8 @@ tomli = {version = ">=2", markers = "python_version < \"3.11\""} [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["flake8 (>=3.5.0)", "importlib_metadata", "mypy (==1.9.0)", "pytest (>=6.0)", "ruff (==0.3.7)", "sphinx-lint", "tomli", "types-docutils", "types-requests"] -test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=6.0)", "setuptools (>=67.0)"] +lint = ["flake8 (>=6.0)", "importlib-metadata (>=6.0)", "mypy (==1.10.1)", "pytest (>=6.0)", "ruff (==0.5.2)", "sphinx-lint (>=0.9)", "tomli (>=2)", "types-docutils (==0.21.0.20240711)", "types-requests (>=2.30.0)"] +test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] [[package]] name = "sphinx-mdinclude" @@ -1353,13 +1368,13 @@ test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.5" +version = "2.0.6" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"}, - {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"}, + {file = "sphinxcontrib_htmlhelp-2.0.6-py3-none-any.whl", hash = "sha256:1b9af5a2671a61410a868fce050cab7ca393c218e6205cbc7f590136f207395c"}, + {file = "sphinxcontrib_htmlhelp-2.0.6.tar.gz", hash = "sha256:c6597da06185f0e3b4dc952777a04200611ef563882e0c244d27a15ee22afa73"}, ] [package.extras] @@ -1397,19 +1412,19 @@ test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" -version = "1.0.7" +version = "1.0.8" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"}, - {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"}, + {file = "sphinxcontrib_qthelp-1.0.8-py3-none-any.whl", hash = "sha256:323d6acc4189af76dfe94edd2a27d458902319b60fcca2aeef3b2180c106a75f"}, + {file = "sphinxcontrib_qthelp-1.0.8.tar.gz", hash = "sha256:db3f8fa10789c7a8e76d173c23364bdf0ebcd9449969a9e6a3dd31b8b7469f03"}, ] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] standalone = ["Sphinx (>=5)"] -test = ["pytest"] +test = ["defusedxml (>=0.7.1)", "pytest"] [[package]] name = "sphinxcontrib-serializinghtml" @@ -1440,13 +1455,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.12.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.12.0-py3-none-any.whl", hash = "sha256:b349c66bea9016ac22978d800cfff206d5f9816951f12a7d0ec5578b0a819594"}, - {file = "typing_extensions-4.12.0.tar.gz", hash = "sha256:8cbcdc8606ebcb0d95453ad7dc5065e6237b6aa230a31e81d0f440c30fed5fd8"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] @@ -1462,13 +1477,13 @@ files = [ [[package]] name = "urllib3" -version = "2.2.1" +version = "2.2.2" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, ] [package.extras] @@ -1479,13 +1494,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.26.2" +version = "20.26.3" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.2-py3-none-any.whl", hash = "sha256:a624db5e94f01ad993d476b9ee5346fdf7b9de43ccaee0e0197012dc838a0e9b"}, - {file = "virtualenv-20.26.2.tar.gz", hash = "sha256:82bf0f4eebbb78d36ddaee0283d43fe5736b53880b8a8cdcd37390a07ac3741c"}, + {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, + {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, ] [package.dependencies] @@ -1513,20 +1528,20 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [[package]] name = "zipp" -version = "3.18.2" +version = "3.19.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.18.2-py3-none-any.whl", hash = "sha256:dce197b859eb796242b0622af1b8beb0a722d52aa2f57133ead08edd5bf5374e"}, - {file = "zipp-3.18.2.tar.gz", hash = "sha256:6278d9ddbcfb1f1089a88fde84481528b07b0e10474e09dcfe53dad4069fa059"}, + {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, + {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<4.0" -content-hash = "33525bd539e6e5417fe95d87a461413b0f4310a1388a808a16d3aab4885b97e0" +content-hash = "243b1919c3e881039c2cd7b4e786f455b15a78872278050e7850e6a21c706c8e" diff --git a/pyproject.toml b/pyproject.toml index f419e16..8255885 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [tool.poetry] name = "rialto" -version = "1.3.1" +version = "1.3.2" packages = [ { include = "rialto" }, @@ -30,6 +30,7 @@ pytest-mock = "^3.11.1" pandas = "^2.1.0" flake8-broken-line = "^1.0.0" loguru = "^0.7.2" +importlib-metadata = "^7.2.1" [tool.poetry.dev-dependencies] pyspark = "^3.4.1" diff --git a/rialto/jobs/decorators/decorators.py b/rialto/jobs/decorators/decorators.py index 894f682..5f2d100 100644 --- a/rialto/jobs/decorators/decorators.py +++ b/rialto/jobs/decorators/decorators.py @@ -14,7 +14,7 @@ __all__ = ["datasource", "job"] -import importlib.metadata +import importlib_metadata import inspect import typing @@ -46,11 +46,12 @@ def _get_module(stack: typing.List) -> typing.Any: def _get_version(module: typing.Any) -> str: try: - parent_name, _, _ = module.__name__.partition(".") - return importlib.metadata.version(parent_name) + package_name, _, _ = module.__name__.partition(".") + dist_name = importlib_metadata.packages_distributions()[package_name][0] + return importlib_metadata.version(dist_name) except Exception: - logger.warning(f"Failed to get library {module.__name__} version!") + logger.warning(f"Failed to get version of {module.__name__}! Will use N/A") return "N/A" From d2ab2bb587e60fd49dc9e82e8ef8db6e35ad66c8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Vladislav=20Vanc=C3=A1k?= Date: Tue, 23 Jul 2024 19:13:43 +0200 Subject: [PATCH 02/10] Switchable Version Column in job output (#8) --- rialto/jobs/decorators/decorators.py | 42 +++++++++++++++++++--------- rialto/jobs/decorators/job_base.py | 6 +++- rialto/jobs/decorators/test_utils.py | 7 ++--- tests/jobs/resources.py | 5 ++++ tests/jobs/test_decorators.py | 11 ++++++++ tests/jobs/test_job/test_job.py | 7 ++++- tests/jobs/test_job_base.py | 10 +++++++ 7 files changed, 69 insertions(+), 19 deletions(-) diff --git a/rialto/jobs/decorators/decorators.py b/rialto/jobs/decorators/decorators.py index 5f2d100..f900726 100644 --- a/rialto/jobs/decorators/decorators.py +++ b/rialto/jobs/decorators/decorators.py @@ -14,10 +14,10 @@ __all__ = ["datasource", "job"] -import importlib_metadata import inspect import typing +import importlib_metadata from loguru import logger from rialto.jobs.decorators.job_base import JobBase @@ -47,7 +47,7 @@ def _get_module(stack: typing.List) -> typing.Any: def _get_version(module: typing.Any) -> str: try: package_name, _, _ = module.__name__.partition(".") - dist_name = importlib_metadata.packages_distributions()[package_name][0] + dist_name = importlib_metadata.packages_distributions()[package_name][0] return importlib_metadata.version(dist_name) except Exception: @@ -73,15 +73,19 @@ def _generate_rialto_job(callable: typing.Callable, module: object, class_name: return generated_class -def job(name_or_callable: typing.Union[str, typing.Callable]) -> typing.Union[typing.Callable, typing.Type]: +def job(*args, custom_name=None, disable_version=False): """ Rialto jobs decorator. Transforms a python function into a rialto transormation, which can be imported and ran by Rialto Runner. - Allows a custom name, via @job("custom_name_here") or can be just used as @job and the function's name is used. + Is mainly used as @job and the function's name is used, and the outputs get automatic. + To override this behavior, use @job(custom_name=XXX, disable_version=True). + - :param name_or_callable: str for custom job name. Otherwise, run function. - :return: One more job wrapper for run function (if custom name specified). + :param *args: list of positional arguments. Empty in case custom_name or disable_version is specified. + :param custom_name: str for custom job name. + :param disable_version: bool for disabling autofilling the VERSION column in the job's outputs. + :return: One more job wrapper for run function (if custom name or version override specified). Otherwise, generates Rialto Transformation Type and returns it for in-module registration. """ stack = inspect.stack() @@ -89,13 +93,25 @@ def job(name_or_callable: typing.Union[str, typing.Callable]) -> typing.Union[ty module = _get_module(stack) version = _get_version(module) - if type(name_or_callable) is str: + # Use case where it's just raw @f. Otherwise we get [] here. + if len(args) == 1 and callable(args[0]): + f = args[0] + return _generate_rialto_job(callable=f, module=module, class_name=f.__name__, version=version) + + # If custom args are specified, we need to return one more wrapper + def inner_wrapper(f): + # Setting default custom name, in case user only disables version + class_name = f.__name__ + nullable_version = version + + # User - Specified custom name + if custom_name is not None: + class_name = custom_name - def inner_wrapper(callable): - return _generate_rialto_job(callable, module, name_or_callable, version) + # Setting version to None causes JobBase to not fill it + if disable_version: + nullable_version = None - return inner_wrapper + return _generate_rialto_job(callable=f, module=module, class_name=class_name, version=nullable_version) - else: - name = name_or_callable.__name__ - return _generate_rialto_job(name_or_callable, module, name, version) + return inner_wrapper diff --git a/rialto/jobs/decorators/job_base.py b/rialto/jobs/decorators/job_base.py index c55e09c..9e3ecc8 100644 --- a/rialto/jobs/decorators/job_base.py +++ b/rialto/jobs/decorators/job_base.py @@ -96,7 +96,11 @@ def _get_timestamp_holder_result(self) -> DataFrame: def _add_job_version(self, df: DataFrame) -> DataFrame: version = self.get_job_version() - return df.withColumn("VERSION", F.lit(version)) + + if version is not None: + return df.withColumn("VERSION", F.lit(version)) + + return df def _run_main_callable(self, run_date: datetime.date) -> DataFrame: with self._setup_resolver(run_date): diff --git a/rialto/jobs/decorators/test_utils.py b/rialto/jobs/decorators/test_utils.py index bd21dba..d5cf810 100644 --- a/rialto/jobs/decorators/test_utils.py +++ b/rialto/jobs/decorators/test_utils.py @@ -20,12 +20,11 @@ from unittest.mock import patch -def _passthrough_decorator(x: typing.Callable) -> typing.Callable: - if type(x) is str: +def _passthrough_decorator(*args, **kwargs) -> typing.Callable: + if len(args) == 0: return _passthrough_decorator - else: - return x + return args[0] @contextmanager diff --git a/tests/jobs/resources.py b/tests/jobs/resources.py index 4d33fad..60fda7b 100644 --- a/tests/jobs/resources.py +++ b/tests/jobs/resources.py @@ -41,3 +41,8 @@ def f(spark): return spark.createDataFrame(df) return f + + +class CustomJobNoVersion(CustomJobNoReturnVal): + def get_job_version(self) -> str: + return None diff --git a/tests/jobs/test_decorators.py b/tests/jobs/test_decorators.py index e896cec..c6d05e6 100644 --- a/tests/jobs/test_decorators.py +++ b/tests/jobs/test_decorators.py @@ -57,6 +57,17 @@ def test_custom_name_function(): custom_callable = result_class.get_custom_callable() assert custom_callable() == "custom_job_name_return" + job_name = result_class.get_job_name() + assert job_name == "custom_job_name" + + +def test_job_disabling_version(): + result_class = _rialto_import_stub("tests.jobs.test_job.test_job", "disable_version_job_function") + assert issubclass(type(result_class), JobBase) + + job_version = result_class.get_job_version() + assert job_version is None + def test_job_dependencies_registered(spark): ConfigHolder.set_custom_config(value=123) diff --git a/tests/jobs/test_job/test_job.py b/tests/jobs/test_job/test_job.py index 12baec9..460490a 100644 --- a/tests/jobs/test_job/test_job.py +++ b/tests/jobs/test_job/test_job.py @@ -26,11 +26,16 @@ def job_function(): return "job_function_return" -@job("custom_job_name") +@job(custom_name="custom_job_name") def custom_name_job_function(): return "custom_job_name_return" +@job(disable_version=True) +def disable_version_job_function(): + return "disabled_version_job_return" + + @job def job_asking_for_all_deps(spark, run_date, config, dependencies, table_reader): assert spark is not None diff --git a/tests/jobs/test_job_base.py b/tests/jobs/test_job_base.py index 2cdc741..ab8284a 100644 --- a/tests/jobs/test_job_base.py +++ b/tests/jobs/test_job_base.py @@ -91,3 +91,13 @@ def test_return_dataframe_forwarded_with_version(spark): assert result.columns == ["FIRST", "SECOND", "VERSION"] assert result.first()["VERSION"] == "job_version" assert result.count() == 2 + + +def test_none_job_version_wont_fill_job_colun(spark): + table_reader = MagicMock() + date = datetime.date(2023, 1, 1) + + result = resources.CustomJobNoVersion().run(reader=table_reader, run_date=date, spark=spark, metadata_manager=None) + + assert type(result) is pyspark.sql.DataFrame + assert "VERSION" not in result.columns From 2924719e7a2d58bbfa7022261330e3edabe75028 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Vladislav=20Vanc=C3=A1k?= Date: Thu, 25 Jul 2024 17:51:13 +0200 Subject: [PATCH 03/10] Resolver Resolution Test Utils (#9) --- rialto/jobs/decorators/test_utils.py | 46 ++++++++++++++++++- tests/jobs/test_job/dependency_tests_job.py | 51 +++++++++++++++++++++ tests/jobs/test_test_utils.py | 29 +++++++++++- 3 files changed, 123 insertions(+), 3 deletions(-) create mode 100644 tests/jobs/test_job/dependency_tests_job.py diff --git a/rialto/jobs/decorators/test_utils.py b/rialto/jobs/decorators/test_utils.py index d5cf810..5465d6e 100644 --- a/rialto/jobs/decorators/test_utils.py +++ b/rialto/jobs/decorators/test_utils.py @@ -17,7 +17,9 @@ import importlib import typing from contextlib import contextmanager -from unittest.mock import patch +from unittest.mock import patch, create_autospec, MagicMock +from rialto.jobs.decorators.resolver import Resolver, ResolverException +from rialto.jobs.decorators.job_base import JobBase def _passthrough_decorator(*args, **kwargs) -> typing.Callable: @@ -58,3 +60,45 @@ def disable_job_decorators(module) -> None: yield importlib.reload(module) + + +def resolver_resolves(spark, job: JobBase) -> bool: + """ + Checker method for your dependency resoultion. + + If your job's dependencies are all defined and resolvable, returns true. + Otherwise, throws an exception. + + :param spark: SparkSession object. + :param job: Job to try and resolve. + + :return: bool, True if job can be resolved + """ + + class SmartStorage: + def __init__(self): + self._storage = Resolver._storage.copy() + self._call_stack = [] + + def __setitem__(self, key, value): + self._storage[key] = value + + def keys(self): + return self._storage.keys() + + def __getitem__(self, func_name): + if func_name in self._call_stack: + raise ResolverException(f"Circular Dependence on {func_name}!") + + self._call_stack.append(func_name) + + real_method = self._storage[func_name] + fake_method = create_autospec(real_method) + fake_method.side_effect = lambda *args, **kwargs: self._call_stack.remove(func_name) + + return fake_method + + with patch("rialto.jobs.decorators.resolver.Resolver._storage", SmartStorage()): + job().run(reader=MagicMock(), run_date=MagicMock(), spark=spark) + + return True diff --git a/tests/jobs/test_job/dependency_tests_job.py b/tests/jobs/test_job/dependency_tests_job.py new file mode 100644 index 0000000..3029b33 --- /dev/null +++ b/tests/jobs/test_job/dependency_tests_job.py @@ -0,0 +1,51 @@ +from rialto.jobs.decorators import job, datasource + + +@datasource +def a(): + return 1 + + +@datasource +def b(a): + return a + 1 + + +@datasource +def c(a, b): + return a + b + + +@job +def ok_dependency_job(c): + return c + 1 + + +@datasource +def d(a, circle_1): + return circle_1 + a + + +@datasource +def circle_1(circle_2): + return circle_2 + 1 + + +@datasource +def circle_2(circle_1): + return circle_1 + 1 + + +@job +def circular_dependency_job(d): + return d + 1 + + +@job +def missing_dependency_job(a, x): + return x + a + + +@job +def default_dependency_job(run_date, spark, config, dependencies, table_reader, feature_loader): + return 1 diff --git a/tests/jobs/test_test_utils.py b/tests/jobs/test_test_utils.py index a6b31b2..63884b4 100644 --- a/tests/jobs/test_test_utils.py +++ b/tests/jobs/test_test_utils.py @@ -11,12 +11,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +import pytest import rialto.jobs.decorators as decorators import tests.jobs.test_job.test_job as test_job +import tests.jobs.test_job.dependency_tests_job as dependency_tests_job from rialto.jobs.decorators.resolver import Resolver -from rialto.jobs.decorators.test_utils import disable_job_decorators +from rialto.jobs.decorators.test_utils import disable_job_decorators, resolver_resolves def test_raw_dataset_patch(mocker): @@ -46,3 +47,27 @@ def test_custom_name_job_function_patch(mocker): assert test_job.custom_name_job_function() == "custom_job_name_return" spy_dec.assert_not_called() + + +def test_resolver_resolves_ok_job(spark): + assert resolver_resolves(spark, dependency_tests_job.ok_dependency_job) + + +def test_resolver_resolves_default_dependency(spark): + assert resolver_resolves(spark, dependency_tests_job.default_dependency_job) + + +def test_resolver_resolves_fails_circular_dependency(spark): + with pytest.raises(Exception) as exc_info: + assert resolver_resolves(spark, dependency_tests_job.circular_dependency_job) + + assert exc_info is not None + assert str(exc_info.value) == "Circular Dependence on circle_1!" + + +def test_resolver_resolves_fails_missing_dependency(spark): + with pytest.raises(Exception) as exc_info: + assert resolver_resolves(spark, dependency_tests_job.missing_dependency_job) + + assert exc_info is not None + assert str(exc_info.value) == "x declaration not found!" From e2186cd136e2f325449f850a9543cfa89ba7c61d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Vladislav=20Vanc=C3=A1k?= Date: Fri, 26 Jul 2024 15:11:06 +0200 Subject: [PATCH 04/10] Jobs Changes Docs (#10) --- README.md | 86 ++++++++++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 75 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index 142d23e..4f52d50 100644 --- a/README.md +++ b/README.md @@ -288,7 +288,7 @@ Rialto jobs simplify creation of runner transformations. Instead of having to in As the names might suggest, * *datasource* registers the function below as a valid datasource, which can be used as dependency -* *job* registers the decorated function as a Rialo transformation. +* *job* registers the decorated function as a Rialto transformation. The output / return value of both functions **should/has to be** a python dataframe *(or nothing for jobs, more on that later)*. @@ -321,34 +321,98 @@ def my_datasource(run_date: datetime.date, table_reader: TableReader) -> DataFra def my_job(my_datasource: DataFrame) -> DataFrame: return my_datasource.withColumn("HelloWorld", F.lit(1)) ``` -This piece of code creates a rialto transformation called *my_job*, which is then callable by the rialto runner. It first sources the *my_datasource* and then runs *my_job* on top of that datasource. - -### job naming / outputs -The rialto runner creates a final table according to the job's name. Therefore, we do support 2 ways of creating jobs: +This piece of code +1. creates a rialto transformation called *my_job*, which is then callable by the rialto runner. +2. It sources the *my_datasource* and then runs *my_job* on top of that datasource. +3. Rialto adds VERSION (of your package) and INFORMATION_DATE (as per config) columns automatically. +4. The rialto runner stores the final to a catalog, to a table according to the job's name. + +### Custom job names +Note, that by default, the rialto job name is your function name. To allow more flexibility, we allow renaming of the job: ```python -@job("my_custom_name") +@job(custom_name="my_custom_name") def f(...): ... +``` +Just note that any *WeirdCaseNames* will be transformed to *lower_case_with_underscores*. -@job -def my_custom_name(...): +### Disabling Versioning +If you want to disable versioning of your job (adding package VERSION column to your output): + +```python3 +@job(disable_version=True) +def my_job(...): ... ``` -Up to you, both work. Just note that any *WeirdCaseNames* will be transformed to *lower_case_with_underscores*. -### notes / rules +These parameters can be used separately, or combined. + +### Notes & Rules The rules for the dependencies are fairly straightforward. Both **jobs** and **datasources** can only depend on *pre-defined* dependencies and other *datasources*. Meaning: * *datasource -> datasource -> job* is perfectly fine, * *datasource -> job -> datasource* will result in an error. Secondly, the jobs can, but **don't necessarily have to output** a dataframe. -In case your job doesn't output a dataframe, your job will only return a bunch of rows, which will ensure that rialto notices that the job ran successfully. +In case your job doesn't output a dataframe, your job will return an artificially-created, one-row dataframe, which will ensure that rialto notices that the job ran successfully. This can be useful in **model training**. Finally, remember, that your jobs are still just *Rialto Transformations* internally. Meaning that at the end of the day, you should always read some data, do some operations on it and either return a pyspark DataFrame, or not return anything and let the framework return the placeholder one. +### Testing +One of the main advantages of the jobs module is simplification of unit tests for your transformations. Rialto provides following tools: + +#### 1. Disabling Decorators + +Assuming we have a my_package.test_job_module.py module: +```python3 +@datasource +def datasource_a(...) + ... code ... + +@job +def my_job(datasource_a, ...) + ... code ... +``` +The *disable_job_decorators* context manager, as the name suggests, disables all decorator functionality and lets you access your functions as raw functions - making it super simple to unit-test: +```python3 +from rialto.jobs.decorators.test_utils import disable_job_decorators +import my_package.test_job_module as tjm + +# Datasource Testing +def test_datasource_a(): + ... mocks here ... + + with disable_job_decorators(tjm): + datasource_a_output = tjm.datasource_a(... mocks ...) + + ... asserts ... + +# Job Testing +def test_my_job(): + datasource_a_mock = ... + ... other mocks... + + with disable_job_decorators(tjm): + job_output = tjm.my_job(datasource_a_mock, ... mocks ...) + + ... asserts ... +``` + +#### 2. Testing the @job Dependency Tree +In complex use cases, it may happen that the dependencies of a job become quite complex. Or you simply want to be sure that you didn't accidentally misspelled your dependency name: + +```python3 +from rialto.jobs.decorators.test_utils import resolver_resolves +import my_job.test_job_module as tjm + +def test_my_job_resolves(spark): + assert resolver_resolves(spark, tjm.my_job) +``` + +The code above fails if *my_job* depends on an undefined datasource (even indirectly), and detects cases where there's a circular dependency. + ## 2.4 - loader This module is used to load features from feature store into your models and scripts. Loader provides options to load singular features, whole feature groups, as well as a selection of features from multiple groups defined in a config file, and served as a singular dataframe. It also provides interface to access feature metadata. From f2be7cbbc73d9df762c7a2807d920bddd1906b62 Mon Sep 17 00:00:00 2001 From: Marek Dobransky Date: Fri, 23 Aug 2024 19:09:14 +0200 Subject: [PATCH 05/10] aggregated dependency printout (#11) --- rialto/runner/runner.py | 12 +++++++++--- rialto/runner/transformation.py | 1 + 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/rialto/runner/runner.py b/rialto/runner/runner.py index ade89ff..343d2fe 100644 --- a/rialto/runner/runner.py +++ b/rialto/runner/runner.py @@ -217,6 +217,9 @@ def check_dependencies(self, pipeline: PipelineConfig, run_date: date) -> bool: :return: bool """ logger.info(f"{pipeline.name} checking dependencies for {run_date}") + + error = "" + for dependency in pipeline.dependencies: dep_from = DateManager.date_subtract(run_date, dependency.interval.units, dependency.interval.value) logger.info(f"Looking for {dependency.table} from {dep_from} until {run_date}") @@ -237,12 +240,15 @@ def check_dependencies(self, pipeline: PipelineConfig, run_date: date) -> bool: source = Table(table_path=dependency.table, partition=date_col) if True in self.check_dates_have_partition(source, possible_dep_dates): logger.info(f"Dependency for {dependency.table} from {dep_from} until {run_date} is fulfilled") - continue else: msg = f"Missing dependency for {dependency.table} from {dep_from} until {run_date}" logger.info(msg) - self.tracker.last_error = msg - return False + error = error + msg + "\n" + + if error != "": + self.tracker.last_error = error + return False + return True def get_possible_run_dates(self, schedule: ScheduleConfig) -> List[date]: diff --git a/rialto/runner/transformation.py b/rialto/runner/transformation.py index 210cb0b..4399ce0 100644 --- a/rialto/runner/transformation.py +++ b/rialto/runner/transformation.py @@ -43,6 +43,7 @@ def run( :param run_date: date :param spark: spark session :param metadata_manager: metadata api object + :param dependencies: dictionary of dependencies :return: dataframe """ raise NotImplementedError From 93815c23fa21a191f53df61408210926f3117068 Mon Sep 17 00:00:00 2001 From: Marek Dobransky Date: Tue, 3 Sep 2024 11:35:05 +0200 Subject: [PATCH 06/10] RLG-3595 rialto v2 (#12) * v2 changes --- CHANGELOG.md | 27 +++ README.md | 176 +++++++++------ poetry.lock | 31 +-- pyproject.toml | 5 +- rialto/common/__init__.py | 2 +- rialto/common/table_reader.py | 70 ++---- rialto/common/utils.py | 39 +--- rialto/jobs/__init__.py | 2 + rialto/jobs/configuration/config_holder.py | 130 ----------- rialto/jobs/decorators/__init__.py | 2 +- rialto/jobs/decorators/decorators.py | 22 +- rialto/jobs/decorators/job_base.py | 46 ++-- rialto/jobs/decorators/resolver.py | 4 +- rialto/jobs/decorators/test_utils.py | 7 +- rialto/loader/__init__.py | 1 - rialto/loader/data_loader.py | 45 ---- rialto/loader/interfaces.py | 20 +- rialto/loader/pyspark_feature_loader.py | 43 ++-- rialto/runner/config_loader.py | 59 +++-- rialto/runner/config_overrides.py | 76 +++++++ rialto/runner/runner.py | 215 +++++-------------- rialto/runner/tracker.py | 13 +- rialto/runner/transformation.py | 15 +- rialto/runner/utils.py | 74 +++++++ tests/jobs/test_config_holder.py | 100 --------- tests/jobs/test_decorators.py | 11 +- tests/jobs/test_job/dependency_tests_job.py | 4 +- tests/jobs/test_job/test_job.py | 12 +- tests/jobs/test_job_base.py | 34 ++- tests/loader/pyspark/dummy_loaders.py | 24 --- tests/loader/pyspark/test_from_cfg.py | 11 +- tests/runner/conftest.py | 4 +- tests/runner/overrider.yaml | 86 ++++++++ tests/runner/test_date_manager.py | 4 +- tests/runner/test_overrides.py | 137 ++++++++++++ tests/runner/test_runner.py | 164 ++++---------- tests/runner/transformations/config.yaml | 23 +- tests/runner/transformations/config2.yaml | 8 +- tests/runner/transformations/simple_group.py | 6 +- 39 files changed, 844 insertions(+), 908 deletions(-) delete mode 100644 rialto/jobs/configuration/config_holder.py delete mode 100644 rialto/loader/data_loader.py create mode 100644 rialto/runner/config_overrides.py create mode 100644 rialto/runner/utils.py delete mode 100644 tests/jobs/test_config_holder.py delete mode 100644 tests/loader/pyspark/dummy_loaders.py create mode 100644 tests/runner/overrider.yaml create mode 100644 tests/runner/test_overrides.py diff --git a/CHANGELOG.md b/CHANGELOG.md index cfd48eb..63e9791 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,33 @@ # Change Log All notable changes to this project will be documented in this file. +## 2.0.0 - 2024-mm-dd + #### Runner + - runner config now accepts environment variables + - restructured runner config + - added metadata and feature loader sections + - target moved to pipeline + - dependency date_col is now mandatory + - custom extras config is available in each pipeline and will be passed as dictionary available under pipeline_config.extras + - general section is renamed to runner + - info_date_shift is always a list + - transformation header changed + - added argument to skip dependency checking + - added overrides parameter to allow for dynamic overriding of config values + - removed date_from and date_to from arguments, use overrides instead + #### Jobs + - jobs are now the main way to create all pipelines + - config holder removed from jobs + - metadata_manager and feature_loader are now available arguments, depending on configuration + - added @config decorator, similar use case to @datasource, for parsing configuration + #### TableReader + - function signatures changed + - until -> date_until + - info_date_from -> date_from, info_date_to -> date_to + - date_column is now mandatory + - removed TableReaders ability to infer schema from partitions or properties + #### Loader + - removed DataLoader class, now only PysparkFeatureLoader is needed with additional parameters ## 1.3.0 - 2024-06-07 diff --git a/README.md b/README.md index 4f52d50..2ac915f 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ - +from pydantic import BaseModelfrom rialto.runner.config_loader import PipelineConfigfrom rialto.jobs import config # Rialto @@ -53,31 +53,21 @@ runner() A runner by default executes all the jobs provided in the configuration file, for all the viable execution dates according to the configuration file for which the job has not yet run successfully (i.e. the date partition doesn't exist on the storage) This behavior can be modified by various parameters and switches available. -* **feature_metadata_schema** - path to schema where feature metadata are read and stored, needed for [maker](#maker) jobs and jobs that utilized feature [loader](#loader) * **run_date** - date at which the runner is triggered (defaults to day of running) -* **date_from** - starting date (defaults to rundate - config watch period) -* **date_until** - end date (defaults to rundate) -* **feature_store_schema** - location of features, needed for jobs utilizing feature [loader](#loader) -* **custom_job_config** - dictionary with key-value pairs that will be accessible under the "config" variable in your rialto jobs * **rerun** - rerun all jobs even if they already succeeded in the past runs * **op** - run only selected operation / pipeline - +* **skip_dependencies** - ignore dependency checks and run all jobs +* **overrides** - dictionary of overrides for the configuration Transformations are not included in the runner itself, it imports them dynamically according to the configuration, therefore it's necessary to have them locally installed. -A runner created table has will have automatically created **rialto_date_column** table property set according to target partition set in the configuration. - ### Configuration ```yaml -general: - target_schema: catalog.schema # schema where tables will be created, must exist - target_partition_column: INFORMATION_DATE # date to partition new tables on - source_date_column_property: rialto_date_column # name of the date property on source tables +runner: watched_period_units: "months" # unit of default run period watched_period_value: 2 # value of default run period - job: "run" # run for running the pipelines, check for only checking dependencies mail: to: # a list of email addresses - name@host.domain @@ -100,7 +90,7 @@ pipelines: # a list of pipelines to run dependencies: # list of dependent tables - table: catalog.schema.table1 name: "table1" # Optional table name, used to recall dependency details in transformation - date_col: generation_date # Optional date column name, takes priority + date_col: generation_date # Mandatory date column name interval: # mandatory availability interval, subtracted from scheduled day units: "days" value: 1 @@ -109,6 +99,18 @@ pipelines: # a list of pipelines to run interval: units: "months" value: 1 + target: + target_schema: catalog.schema # schema where tables will be created, must exist + target_partition_column: INFORMATION_DATE # date to partition new tables on + metadata_manager: # optional + metadata_schema: catalog.metadata # schema where metadata is stored + feature_loader: # optional + config_path: model_features_config.yaml # path to the feature loader configuration file + feature_schema: catalog.feature_tables # schema where feature tables are stored + metadata_schema: catalog.metadata # schema where metadata is stored + extras: #optional arguments processed as dictionary + some_value: 3 + some_other_value: giraffe - name: PipelineTable1 # will be written as pipeline_table1 module: @@ -127,8 +129,67 @@ pipelines: # a list of pipelines to run interval: units: "days" value: 6 + target: + target_schema: catalog.schema # schema where tables will be created, must exist + target_partition_column: INFORMATION_DATE # date to partition new tables on +``` + +The configuration can be dynamically overridden by providing a dictionary of overrides to the runner. All overrides must adhere to configurations schema, with pipeline.extras section available for custom schema. +Here are few examples of overrides: + +#### Simple override of a single value +Specify the path to the value in the configuration file as a dot-separated string + +```python +Runner( + spark, + config_path="tests/overrider.yaml", + run_date="2023-03-31", + overrides={"runner.watch_period_value": 4}, + ) +``` + +#### Override list element +You can refer to list elements by their index (starting with 0) +```python +overrides={"runner.mail.to[1]": "a@b.c"} +``` + +#### Append to list +You can append to list by using index -1 +```python +overrides={"runner.mail.to[-1]": "test@test.com"} +``` + +#### Lookup by attribute value in a list +You can use the following syntax to find a specific element in a list by its attribute value +```python +overrides={"pipelines[name=SimpleGroup].target.target_schema": "new_schema"}, +``` + +#### Injecting/Replacing whole sections +You can directly replace a bigger section of the configuration by providing a dictionary +When the whole section doesn't exist, it will be added to the configuration, however it needs to be added as a whole. +i.e. if the yaml file doesn't specify feature_loader, you can't just add a feature_loader.config_path, you need to add the whole section. +```python +overrides={"pipelines[name=SimpleGroup].feature_loader": + {"config_path": "features_cfg.yaml", + "feature_schema": "catalog.features", + "metadata_schema": "catalog.metadata"}} ``` +#### Multiple overrides +You can provide multiple overrides at once, the order of execution is not guaranteed +```python +overrides={"runner.watch_period_value": 4, + "runner.watch_period_units": "weeks", + "pipelines[name=SimpleGroup].target.target_schema": "new_schema", + "pipelines[name=SimpleGroup].feature_loader": + {"config_path": "features_cfg.yaml", + "feature_schema": "catalog.features", + "metadata_schema": "catalog.metadata"} + } +``` ## 2.2 - maker @@ -302,6 +363,7 @@ We have a set of pre-defined dependencies: * **dependencies** returns a dictionary containing the job dependencies config * **table_reader** returns *TableReader* * **feature_loader** provides *PysparkFeatureLoader* +* **metadata_manager** provides *MetadataManager* Apart from that, each **datasource** also becomes a fully usable dependency. Note, that this means that datasources can also be dependent on other datasources - just beware of any circular dependencies! @@ -310,19 +372,30 @@ With that sorted out, we can now provide a quick example of the *rialto.jobs* mo ```python from pyspark.sql import DataFrame from rialto.common import TableReader -from rialto.jobs.decorators import job, datasource +from rialto.jobs.decorators import config, job, datasource +from rialto.runner.config_loader import PipelineConfig +from pydantic import BaseModel + + +class ConfigModel(BaseModel): + some_value: int + some_other_value: str + +@config +def my_config(config: PipelineConfig): + return ConfigModel(**config.extras) @datasource def my_datasource(run_date: datetime.date, table_reader: TableReader) -> DataFrame: - return table_reader.get_latest("my_catalog.my_schema.my_table", until=run_date) + return table_reader.get_latest("my_catalog.my_schema.my_table", date_until=run_date) @job -def my_job(my_datasource: DataFrame) -> DataFrame: - return my_datasource.withColumn("HelloWorld", F.lit(1)) +def my_job(my_datasource: DataFrame, my_config: ConfigModel) -> DataFrame: + return my_datasource.withColumn("HelloWorld", F.lit(my_config.some_value)) ``` -This piece of code -1. creates a rialto transformation called *my_job*, which is then callable by the rialto runner. +This piece of code +1. creates a rialto transformation called *my_job*, which is then callable by the rialto runner. 2. It sources the *my_datasource* and then runs *my_job* on top of that datasource. 3. Rialto adds VERSION (of your package) and INFORMATION_DATE (as per config) columns automatically. 4. The rialto runner stores the final to a catalog, to a table according to the job's name. @@ -383,20 +456,20 @@ import my_package.test_job_module as tjm # Datasource Testing def test_datasource_a(): ... mocks here ... - + with disable_job_decorators(tjm): datasource_a_output = tjm.datasource_a(... mocks ...) - + ... asserts ... - + # Job Testing def test_my_job(): datasource_a_mock = ... ... other mocks... - + with disable_job_decorators(tjm): job_output = tjm.my_job(datasource_a_mock, ... mocks ...) - + ... asserts ... ``` @@ -418,19 +491,6 @@ This module is used to load features from feature store into your models and scr Two public classes are exposed form this module. **DatabricksLoader**(DataLoader), **PysparkFeatureLoader**(FeatureLoaderInterface). -### DatabricksLoader -This is a support class for feature loader and provides the data reading capability from the feature store. - -This class needs to be instantiated with an active spark session and a path to the feature store schema (in the format of "catalog_name.schema_name"). -Optionally a date_column information can be passed, otherwise it defaults to use INFORMATION_DATE -```python -from rialto.loader import DatabricksLoader - -data_loader = DatabricksLoader(spark= spark_instance, schema= "catalog.schema", date_column= "INFORMATION_DATE") -``` - -This class provides one method, read_group(...), which returns a whole feature group for selected date. This is mostly used inside feature loader. - ### PysparkFeatureLoader This class needs to be instantiated with an active spark session, data loader and a path to the metadata schema (in the format of "catalog_name.schema_name"). @@ -438,17 +498,16 @@ This class needs to be instantiated with an active spark session, data loader an ```python from rialto.loader import PysparkFeatureLoader -feature_loader = PysparkFeatureLoader(spark= spark_instance, data_loader= data_loader_instance, metadata_schema= "catalog.schema") +feature_loader = PysparkFeatureLoader(spark= spark_instance, feature_schema="catalog.schema", metadata_schema= "catalog.schema2", date_column="information_date") ``` #### Single feature ```python -from rialto.loader import DatabricksLoader, PysparkFeatureLoader +from rialto.loader import PysparkFeatureLoader from datetime import datetime -data_loader = DatabricksLoader(spark, "feature_catalog.feature_schema") -feature_loader = PysparkFeatureLoader(spark, data_loader, "metadata_catalog.metadata_schema") +feature_loader = PysparkFeatureLoader(spark, "feature_catalog.feature_schema", "metadata_catalog.metadata_schema") my_date = datetime.strptime("2020-01-01", "%Y-%m-%d").date() feature = feature_loader.get_feature(group_name="CustomerFeatures", feature_name="AGE", information_date=my_date) @@ -459,11 +518,10 @@ metadata = feature_loader.get_feature_metadata(group_name="CustomerFeatures", fe This method of data access is only recommended for experimentation, as the group schema can evolve over time. ```python -from rialto.loader import DatabricksLoader, PysparkFeatureLoader +from rialto.loader import PysparkFeatureLoader from datetime import datetime -data_loader = DatabricksLoader(spark, "feature_catalog.feature_schema") -feature_loader = PysparkFeatureLoader(spark, data_loader, "metadata_catalog.metadata_schema") +feature_loader = PysparkFeatureLoader(spark, "feature_catalog.feature_schema", "metadata_catalog.metadata_schema") my_date = datetime.strptime("2020-01-01", "%Y-%m-%d").date() features = feature_loader.get_group(group_name="CustomerFeatures", information_date=my_date) @@ -473,11 +531,10 @@ metadata = feature_loader.get_group_metadata(group_name="CustomerFeatures") #### Configuration ```python -from rialto.loader import DatabricksLoader, PysparkFeatureLoader +from rialto.loader import PysparkFeatureLoader from datetime import datetime -data_loader = DatabricksLoader(spark, "feature_catalog.feature_schema") -feature_loader = PysparkFeatureLoader(spark, data_loader, "metadata_catalog.metadata_schema") +feature_loader = PysparkFeatureLoader(spark, "feature_catalog.feature_schema", "metadata_catalog.metadata_schema") my_date = datetime.strptime("2020-01-01", "%Y-%m-%d").date() features = feature_loader.get_features_from_cfg(path="local/configuration/file.yaml", information_date=my_date) @@ -563,6 +620,7 @@ reader = TableReader(spark=spark_instance) ``` usage of _get_table_: + ```python # get whole table df = reader.get_table(table="catalog.schema.table", date_column="information_date") @@ -573,10 +631,11 @@ from datetime import datetime start = datetime.strptime("2020-01-01", "%Y-%m-%d").date() end = datetime.strptime("2024-01-01", "%Y-%m-%d").date() -df = reader.get_table(table="catalog.schema.table", info_date_from=start, info_date_to=end) +df = reader.get_table(table="catalog.schema.table", date_from=start, date_to=end, date_column="information_date") ``` usage of _get_latest_: + ```python # most recent partition df = reader.get_latest(table="catalog.schema.table", date_column="information_date") @@ -584,7 +643,7 @@ df = reader.get_latest(table="catalog.schema.table", date_column="information_da # most recent partition until until = datetime.strptime("2020-01-01", "%Y-%m-%d").date() -df = reader.get_latest(table="catalog.schema.table", until=until, date_column="information_date") +df = reader.get_latest(table="catalog.schema.table", date_until=until, date_column="information_date") ``` For full information on parameters and their optionality see technical documentation. @@ -592,21 +651,6 @@ For full information on parameters and their optionality see technical documenta _TableReader_ needs an active spark session and an information which column is the **date column**. There are three options how to pass that information on. -In order of priority from highest: -* Explicit _date_column_ parameter in _get_table_ and _get_latest_ -```python -reader.get_latest(table="catalog.schema.table", date_column="information_date") -``` -* Inferred from delta metadata, triggered by init parameter, only works on delta tables (e.g. doesn't work on views) -```python -reader = TableReader(spark=spark_instance, infer_partition=True) -reader.get_latest(table="catalog.schema.table") -``` -* A custom sql property defined on the table containing the date column name, defaults to _rialto_date_column_ -```python -reader = TableReader(spark=spark_instance, date_property="rialto_date_column") -reader.get_latest(table="catalog.schema.table") -``` # 3. Contributing Contributing: diff --git a/poetry.lock b/poetry.lock index 0cb768b..66ca41b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -343,6 +343,20 @@ files = [ {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, ] +[[package]] +name = "env-yaml" +version = "0.0.3" +description = "Provides a yaml loader which substitutes environment variables and supports defaults" +optional = false +python-versions = "*" +files = [ + {file = "env-yaml-0.0.3.tar.gz", hash = "sha256:b6b55b18c28fb623793137a8e55bd666d6483af7fd0162a41a62325ce662fda6"}, + {file = "env_yaml-0.0.3-py3-none-any.whl", hash = "sha256:f56723c8997bea1240bf634b9e29832714dd9745a42cbc2649f1238a6a576244"}, +] + +[package.dependencies] +pyyaml = ">=6.0" + [[package]] name = "exceptiongroup" version = "1.2.2" @@ -751,9 +765,9 @@ files = [ [package.dependencies] numpy = [ - {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, {version = ">=1.22.4", markers = "python_version < \"3.11\""}, {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -906,8 +920,8 @@ files = [ annotated-types = ">=0.4.0" pydantic-core = "2.20.1" typing-extensions = [ - {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, {version = ">=4.6.1", markers = "python_version < \"3.13\""}, + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, ] [package.extras] @@ -1170,7 +1184,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1178,16 +1191,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1204,7 +1209,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1212,7 +1216,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1544,4 +1547,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = ">=3.10,<4.0" -content-hash = "243b1919c3e881039c2cd7b4e786f455b15a78872278050e7850e6a21c706c8e" +content-hash = "6e87c6539147b57b03fb983b28d15396c2eccfe95661805eda7d9f77602d1f58" diff --git a/pyproject.toml b/pyproject.toml index 8255885..5812612 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [tool.poetry] -name = "rialto" +name = "rialto-dev" -version = "1.3.2" +version = "2.0.0" packages = [ { include = "rialto" }, @@ -31,6 +31,7 @@ pandas = "^2.1.0" flake8-broken-line = "^1.0.0" loguru = "^0.7.2" importlib-metadata = "^7.2.1" +env_yaml = "^0.0.3" [tool.poetry.dev-dependencies] pyspark = "^3.4.1" diff --git a/rialto/common/__init__.py b/rialto/common/__init__.py index 93e8922..1bd5055 100644 --- a/rialto/common/__init__.py +++ b/rialto/common/__init__.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -from rialto.common.table_reader import TableReader +from rialto.common.table_reader import DataReader, TableReader diff --git a/rialto/common/table_reader.py b/rialto/common/table_reader.py index 1aef614..d3926f2 100644 --- a/rialto/common/table_reader.py +++ b/rialto/common/table_reader.py @@ -21,8 +21,6 @@ import pyspark.sql.functions as F from pyspark.sql import DataFrame, SparkSession -from rialto.common.utils import get_date_col_property, get_delta_partition - class DataReader(metaclass=abc.ABCMeta): """ @@ -36,16 +34,15 @@ class DataReader(metaclass=abc.ABCMeta): def get_latest( self, table: str, - until: Optional[datetime.date] = None, - date_column: str = None, + date_column: str, + date_until: Optional[datetime.date] = None, uppercase_columns: bool = False, ) -> DataFrame: """ Get latest available date partition of the table until specified date :param table: input table path - :param until: Optional until date (inclusive) - :param date_column: column to filter dates on, takes highest priority + :param date_until: Optional until date (inclusive) :param uppercase_columns: Option to refactor all column names to uppercase :return: Dataframe """ @@ -55,18 +52,17 @@ def get_latest( def get_table( self, table: str, - info_date_from: Optional[datetime.date] = None, - info_date_to: Optional[datetime.date] = None, - date_column: str = None, + date_column: str, + date_from: Optional[datetime.date] = None, + date_to: Optional[datetime.date] = None, uppercase_columns: bool = False, ) -> DataFrame: """ Get a whole table or a slice by selected dates :param table: input table path - :param info_date_from: Optional date from (inclusive) - :param info_date_to: Optional date to (inclusive) - :param date_column: column to filter dates on, takes highest priority + :param date_from: Optional date from (inclusive) + :param date_to: Optional date to (inclusive) :param uppercase_columns: Option to refactor all column names to uppercase :return: Dataframe """ @@ -76,17 +72,13 @@ def get_table( class TableReader(DataReader): """An implementation of data reader for databricks tables""" - def __init__(self, spark: SparkSession, date_property: str = "rialto_date_column", infer_partition: bool = False): + def __init__(self, spark: SparkSession): """ Init :param spark: - :param date_property: Databricks table property specifying date column, take priority over inference - :param infer_partition: infer date column as tables partition from delta metadata """ self.spark = spark - self.date_property = date_property - self.infer_partition = infer_partition super().__init__() def _uppercase_column_names(self, df: DataFrame) -> DataFrame: @@ -106,41 +98,26 @@ def _get_latest_available_date(self, df: DataFrame, date_col: str, until: Option df = df.select(F.max(date_col)).alias("latest") return df.head()[0] - def _get_date_col(self, table: str, date_column: str): - """ - Get tables date column - - column specified at get_table/get_latest takes priority, if inference is enabled it - takes 2nd place, last resort is table property - """ - if date_column: - return date_column - elif self.infer_partition: - return get_delta_partition(self.spark, table) - else: - return get_date_col_property(self.spark, table, self.date_property) - def get_latest( self, table: str, - until: Optional[datetime.date] = None, - date_column: str = None, + date_column: str, + date_until: Optional[datetime.date] = None, uppercase_columns: bool = False, ) -> DataFrame: """ Get latest available date partition of the table until specified date :param table: input table path - :param until: Optional until date (inclusive) + :param date_until: Optional until date (inclusive) :param date_column: column to filter dates on, takes highest priority :param uppercase_columns: Option to refactor all column names to uppercase :return: Dataframe """ - date_col = self._get_date_col(table, date_column) df = self.spark.read.table(table) - selected_date = self._get_latest_available_date(df, date_col, until) - df = df.filter(F.col(date_col) == selected_date) + selected_date = self._get_latest_available_date(df, date_column, date_until) + df = df.filter(F.col(date_column) == selected_date) if uppercase_columns: df = self._uppercase_column_names(df) @@ -149,28 +126,27 @@ def get_latest( def get_table( self, table: str, - info_date_from: Optional[datetime.date] = None, - info_date_to: Optional[datetime.date] = None, - date_column: str = None, + date_column: str, + date_from: Optional[datetime.date] = None, + date_to: Optional[datetime.date] = None, uppercase_columns: bool = False, ) -> DataFrame: """ Get a whole table or a slice by selected dates :param table: input table path - :param info_date_from: Optional date from (inclusive) - :param info_date_to: Optional date to (inclusive) + :param date_from: Optional date from (inclusive) + :param date_to: Optional date to (inclusive) :param date_column: column to filter dates on, takes highest priority :param uppercase_columns: Option to refactor all column names to uppercase :return: Dataframe """ - date_col = self._get_date_col(table, date_column) df = self.spark.read.table(table) - if info_date_from: - df = df.filter(F.col(date_col) >= info_date_from) - if info_date_to: - df = df.filter(F.col(date_col) <= info_date_to) + if date_from: + df = df.filter(F.col(date_column) >= date_from) + if date_to: + df = df.filter(F.col(date_column) <= date_to) if uppercase_columns: df = self._uppercase_column_names(df) return df diff --git a/rialto/common/utils.py b/rialto/common/utils.py index c5527a8..b2e19b4 100644 --- a/rialto/common/utils.py +++ b/rialto/common/utils.py @@ -12,13 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -__all__ = ["load_yaml", "get_date_col_property", "get_delta_partition"] +__all__ = ["load_yaml"] import os from typing import Any import pyspark.sql.functions as F import yaml +from env_yaml import EnvLoader from pyspark.sql import DataFrame from pyspark.sql.types import FloatType @@ -34,46 +35,14 @@ def load_yaml(path: str) -> Any: raise FileNotFoundError(f"Can't find {path}.") with open(path, "r") as stream: - return yaml.safe_load(stream) - - -def get_date_col_property(spark, table: str, property: str) -> str: - """ - Retrieve a data column name from a given table property - - :param spark: spark session - :param table: path to table - :param property: name of the property - :return: data column name - """ - props = spark.sql(f"show tblproperties {table}") - date_col = props.filter(F.col("key") == property).select("value").collect() - if len(date_col): - return date_col[0].value - else: - raise RuntimeError(f"Table {table} has no property {property}.") - - -def get_delta_partition(spark, table: str) -> str: - """ - Select first partition column of the delta table - - :param table: full table name - :return: partition column name - """ - columns = spark.catalog.listColumns(table) - partition_columns = list(filter(lambda c: c.isPartition, columns)) - if len(partition_columns): - return partition_columns[0].name - else: - raise RuntimeError(f"Delta table has no partitions: {table}.") + return yaml.load(stream, EnvLoader) def cast_decimals_to_floats(df: DataFrame) -> DataFrame: """ Find all decimal types in the table and cast them to floats. Fixes errors in .toPandas() conversions. - :param df: pyspark DataFrame + :param df: input df :return: pyspark DataFrame with fixed types """ decimal_cols = [col_name for col_name, data_type in df.dtypes if "decimal" in data_type] diff --git a/rialto/jobs/__init__.py b/rialto/jobs/__init__.py index 79c3773..a6ee6cb 100644 --- a/rialto/jobs/__init__.py +++ b/rialto/jobs/__init__.py @@ -11,3 +11,5 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + +from rialto.jobs.decorators import config, datasource, job diff --git a/rialto/jobs/configuration/config_holder.py b/rialto/jobs/configuration/config_holder.py deleted file mode 100644 index 161c61a..0000000 --- a/rialto/jobs/configuration/config_holder.py +++ /dev/null @@ -1,130 +0,0 @@ -# Copyright 2022 ABSA Group Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -__all__ = ["ConfigException", "FeatureStoreConfig", "ConfigHolder"] - -import datetime -import typing - -from pydantic import BaseModel - - -class ConfigException(Exception): - """Wrong Configuration Exception""" - - pass - - -class FeatureStoreConfig(BaseModel): - """Configuration of Feature Store Paths""" - - feature_store_schema: str = None - feature_metadata_schema: str = None - - -class ConfigHolder: - """ - Main Rialto Jobs config holder. - - Configured via job_runner and then called from job_base / job decorators. - """ - - _config = {} - _dependencies = {} - _run_date = None - _feature_store_config: FeatureStoreConfig = None - - @classmethod - def set_run_date(cls, run_date: datetime.date) -> None: - """ - Inicialize run Date - - :param run_date: datetime.date, run date - :return: None - """ - cls._run_date = run_date - - @classmethod - def get_run_date(cls) -> datetime.date: - """ - Run date - - :return: datetime.date, Run date - """ - if cls._run_date is None: - raise ConfigException("Run Date not Set !") - return cls._run_date - - @classmethod - def set_feature_store_config(cls, feature_store_schema: str, feature_metadata_schema: str) -> None: - """ - Inicialize feature store config - - :param feature_store_schema: str, schema name - :param feature_metadata_schema: str, metadata schema name - :return: None - """ - cls._feature_store_config = FeatureStoreConfig( - feature_store_schema=feature_store_schema, feature_metadata_schema=feature_metadata_schema - ) - - @classmethod - def get_feature_store_config(cls) -> FeatureStoreConfig: - """ - Feature Store Config - - :return: FeatureStoreConfig - """ - if cls._feature_store_config is None: - raise ConfigException("Feature Store Config not Set !") - - return cls._feature_store_config - - @classmethod - def get_config(cls) -> typing.Dict: - """ - Get config dictionary - - :return: dictionary of key-value pairs - """ - return cls._config.copy() - - @classmethod - def set_custom_config(cls, **kwargs) -> None: - """ - Set custom key-value pairs for custom config - - :param kwargs: key-value pairs to setup - :return: None - """ - cls._config.update(kwargs) - - @classmethod - def get_dependency_config(cls) -> typing.Dict: - """ - Get rialto job dependency config - - :return: dictionary with dependency config - """ - return cls._dependencies - - @classmethod - def set_dependency_config(cls, dependencies: typing.Dict) -> None: - """ - Get rialto job dependency config - - :param dependencies: dictionary with the config - :return: None - """ - cls._dependencies = dependencies diff --git a/rialto/jobs/decorators/__init__.py b/rialto/jobs/decorators/__init__.py index ba62141..6f2713a 100644 --- a/rialto/jobs/decorators/__init__.py +++ b/rialto/jobs/decorators/__init__.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -from .decorators import datasource, job +from .decorators import config, datasource, job diff --git a/rialto/jobs/decorators/decorators.py b/rialto/jobs/decorators/decorators.py index f900726..d288b7b 100644 --- a/rialto/jobs/decorators/decorators.py +++ b/rialto/jobs/decorators/decorators.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -__all__ = ["datasource", "job"] +__all__ = ["datasource", "job", "config"] import inspect import typing @@ -24,6 +24,20 @@ from rialto.jobs.decorators.resolver import Resolver +def config(ds_getter: typing.Callable) -> typing.Callable: + """ + Config parser functions decorator. + + Registers a config parsing function into a rialto job prerequisite. + You can then request the job via job function arguments. + + :param ds_getter: dataset reader function + :return: raw reader function, unchanged + """ + Resolver.register_callable(ds_getter) + return ds_getter + + def datasource(ds_getter: typing.Callable) -> typing.Callable: """ Dataset reader functions decorator. @@ -77,14 +91,14 @@ def job(*args, custom_name=None, disable_version=False): """ Rialto jobs decorator. - Transforms a python function into a rialto transormation, which can be imported and ran by Rialto Runner. + Transforms a python function into a rialto transformation, which can be imported and ran by Rialto Runner. Is mainly used as @job and the function's name is used, and the outputs get automatic. To override this behavior, use @job(custom_name=XXX, disable_version=True). :param *args: list of positional arguments. Empty in case custom_name or disable_version is specified. :param custom_name: str for custom job name. - :param disable_version: bool for disabling autofilling the VERSION column in the job's outputs. + :param disable_version: bool for disabling automatically filling the VERSION column in the job's outputs. :return: One more job wrapper for run function (if custom name or version override specified). Otherwise, generates Rialto Transformation Type and returns it for in-module registration. """ @@ -93,7 +107,7 @@ def job(*args, custom_name=None, disable_version=False): module = _get_module(stack) version = _get_version(module) - # Use case where it's just raw @f. Otherwise we get [] here. + # Use case where it's just raw @f. Otherwise, we get [] here. if len(args) == 1 and callable(args[0]): f = args[0] return _generate_rialto_job(callable=f, module=module, class_name=f.__name__, version=version) diff --git a/rialto/jobs/decorators/job_base.py b/rialto/jobs/decorators/job_base.py index 9e3ecc8..d91537f 100644 --- a/rialto/jobs/decorators/job_base.py +++ b/rialto/jobs/decorators/job_base.py @@ -24,11 +24,11 @@ from pyspark.sql import DataFrame, SparkSession from rialto.common import TableReader -from rialto.jobs.configuration.config_holder import ConfigHolder from rialto.jobs.decorators.resolver import Resolver -from rialto.loader import DatabricksLoader, PysparkFeatureLoader +from rialto.loader import PysparkFeatureLoader from rialto.metadata import MetadataManager from rialto.runner import Transformation +from rialto.runner.config_loader import PipelineConfig class JobBase(Transformation): @@ -53,12 +53,14 @@ def get_job_name(self) -> str: def _setup_resolver(self, run_date: datetime.date) -> None: Resolver.register_callable(lambda: run_date, "run_date") - Resolver.register_callable(ConfigHolder.get_config, "config") - Resolver.register_callable(ConfigHolder.get_dependency_config, "dependencies") - Resolver.register_callable(self._get_spark, "spark") Resolver.register_callable(self._get_table_reader, "table_reader") - Resolver.register_callable(self._get_feature_loader, "feature_loader") + Resolver.register_callable(self._get_config, "config") + + if self._get_feature_loader() is not None: + Resolver.register_callable(self._get_feature_loader, "feature_loader") + if self._get_metadata_manager() is not None: + Resolver.register_callable(self._get_metadata_manager, "metadata_manager") try: yield @@ -66,13 +68,18 @@ def _setup_resolver(self, run_date: datetime.date) -> None: Resolver.cache_clear() def _setup( - self, spark: SparkSession, run_date: datetime.date, table_reader: TableReader, dependencies: typing.Dict = None + self, + spark: SparkSession, + table_reader: TableReader, + config: PipelineConfig = None, + metadata_manager: MetadataManager = None, + feature_loader: PysparkFeatureLoader = None, ) -> None: self._spark = spark self._table_rader = table_reader - - ConfigHolder.set_dependency_config(dependencies) - ConfigHolder.set_run_date(run_date) + self._config = config + self._metadata = metadata_manager + self._feature_loader = feature_loader def _get_spark(self) -> SparkSession: return self._spark @@ -80,13 +87,14 @@ def _get_spark(self) -> SparkSession: def _get_table_reader(self) -> TableReader: return self._table_rader - def _get_feature_loader(self) -> PysparkFeatureLoader: - config = ConfigHolder.get_feature_store_config() + def _get_config(self) -> PipelineConfig: + return self._config - databricks_loader = DatabricksLoader(self._spark, config.feature_store_schema) - feature_loader = PysparkFeatureLoader(self._spark, databricks_loader, config.feature_metadata_schema) + def _get_feature_loader(self) -> PysparkFeatureLoader: + return self._feature_loader - return feature_loader + def _get_metadata_manager(self) -> MetadataManager: + return self._metadata def _get_timestamp_holder_result(self) -> DataFrame: spark = self._get_spark() @@ -118,8 +126,9 @@ def run( reader: TableReader, run_date: datetime.date, spark: SparkSession = None, + config: PipelineConfig = None, metadata_manager: MetadataManager = None, - dependencies: typing.Dict = None, + feature_loader: PysparkFeatureLoader = None, ) -> DataFrame: """ Rialto transformation run @@ -127,12 +136,11 @@ def run( :param reader: data store api object :param info_date: date :param spark: spark session - :param metadata_manager: metadata api object - :param dependencies: rialto job dependencies + :param config: pipeline config :return: dataframe """ try: - self._setup(spark, run_date, reader, dependencies) + self._setup(spark, reader, config, metadata_manager, feature_loader) return self._run_main_callable(run_date) except Exception as e: logger.exception(e) diff --git a/rialto/jobs/decorators/resolver.py b/rialto/jobs/decorators/resolver.py index 9f90e5a..26856d1 100644 --- a/rialto/jobs/decorators/resolver.py +++ b/rialto/jobs/decorators/resolver.py @@ -30,7 +30,7 @@ class Resolver: Resolver handles dependency management between datasets and jobs. We register different callables, which can depend on other callables. - Calling resolve() we attempts to resolve these dependencies. + Calling resolve() we attempt to resolve these dependencies. """ _storage = {} @@ -101,7 +101,7 @@ def cache_clear(cls) -> None: """ Clear resolver cache. - The resolve mehtod caches its results to avoid duplication of resolutions. + The resolve method caches its results to avoid duplication of resolutions. However, in case we re-register some callables, we need to clear cache in order to ensure re-execution of all resolutions. diff --git a/rialto/jobs/decorators/test_utils.py b/rialto/jobs/decorators/test_utils.py index 5465d6e..39d76ce 100644 --- a/rialto/jobs/decorators/test_utils.py +++ b/rialto/jobs/decorators/test_utils.py @@ -17,9 +17,10 @@ import importlib import typing from contextlib import contextmanager -from unittest.mock import patch, create_autospec, MagicMock -from rialto.jobs.decorators.resolver import Resolver, ResolverException +from unittest.mock import MagicMock, create_autospec, patch + from rialto.jobs.decorators.job_base import JobBase +from rialto.jobs.decorators.resolver import Resolver, ResolverException def _passthrough_decorator(*args, **kwargs) -> typing.Callable: @@ -34,6 +35,8 @@ def _disable_job_decorators() -> None: patches = [ patch("rialto.jobs.decorators.datasource", _passthrough_decorator), patch("rialto.jobs.decorators.decorators.datasource", _passthrough_decorator), + patch("rialto.jobs.decorators.config", _passthrough_decorator), + patch("rialto.jobs.decorators.decorators.config", _passthrough_decorator), patch("rialto.jobs.decorators.job", _passthrough_decorator), patch("rialto.jobs.decorators.decorators.job", _passthrough_decorator), ] diff --git a/rialto/loader/__init__.py b/rialto/loader/__init__.py index 7adc52d..7e1e936 100644 --- a/rialto/loader/__init__.py +++ b/rialto/loader/__init__.py @@ -12,5 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -from rialto.loader.data_loader import DatabricksLoader from rialto.loader.pyspark_feature_loader import PysparkFeatureLoader diff --git a/rialto/loader/data_loader.py b/rialto/loader/data_loader.py deleted file mode 100644 index 930c2b0..0000000 --- a/rialto/loader/data_loader.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright 2022 ABSA Group Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -__all__ = ["DatabricksLoader"] - -from datetime import date - -from pyspark.sql import DataFrame, SparkSession - -from rialto.common.table_reader import TableReader -from rialto.loader.interfaces import DataLoader - - -class DatabricksLoader(DataLoader): - """Implementation of DataLoader using TableReader to access feature tables""" - - def __init__(self, spark: SparkSession, schema: str, date_column: str = "INFORMATION_DATE"): - super().__init__() - - self.reader = TableReader(spark) - self.schema = schema - self.date_col = date_column - - def read_group(self, group: str, information_date: date) -> DataFrame: - """ - Read a feature group by getting the latest partition by date - - :param group: group name - :param information_date: partition date - :return: dataframe - """ - return self.reader.get_latest( - f"{self.schema}.{group}", until=information_date, date_column=self.date_col, uppercase_columns=True - ) diff --git a/rialto/loader/interfaces.py b/rialto/loader/interfaces.py index dad08e6..9089f40 100644 --- a/rialto/loader/interfaces.py +++ b/rialto/loader/interfaces.py @@ -12,31 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -__all__ = ["DataLoader", "FeatureLoaderInterface"] +__all__ = ["FeatureLoaderInterface"] import abc from datetime import date from typing import Dict -class DataLoader(metaclass=abc.ABCMeta): - """ - An interface to read feature groups from storage - - Requires read_group function. - """ - - @abc.abstractmethod - def read_group(self, group: str, information_date: date): - """ - Read one feature group - - :param group: Group name - :param information_date: date - """ - raise NotImplementedError - - class FeatureLoaderInterface(metaclass=abc.ABCMeta): """ A definition of feature loading interface diff --git a/rialto/loader/pyspark_feature_loader.py b/rialto/loader/pyspark_feature_loader.py index d0eef20..7ee78fc 100644 --- a/rialto/loader/pyspark_feature_loader.py +++ b/rialto/loader/pyspark_feature_loader.py @@ -20,9 +20,9 @@ from pyspark.sql import DataFrame, SparkSession +from rialto.common import TableReader from rialto.common.utils import cast_decimals_to_floats from rialto.loader.config_loader import FeatureConfig, GroupConfig, get_feature_config -from rialto.loader.data_loader import DataLoader from rialto.loader.interfaces import FeatureLoaderInterface from rialto.metadata.metadata_manager import ( FeatureMetadata, @@ -34,7 +34,13 @@ class PysparkFeatureLoader(FeatureLoaderInterface): """Implementation of feature loader for pyspark environment""" - def __init__(self, spark: SparkSession, data_loader: DataLoader, metadata_schema: str): + def __init__( + self, + spark: SparkSession, + feature_schema: str, + metadata_schema: str, + date_column: str = "INFORMATION_DATE", + ): """ Init @@ -44,11 +50,28 @@ def __init__(self, spark: SparkSession, data_loader: DataLoader, metadata_schema """ super().__init__() self.spark = spark - self.data_loader = data_loader + self.reader = TableReader(spark) + self.feature_schema = feature_schema + self.date_col = date_column self.metadata = MetadataManager(spark, metadata_schema) KeyMap = namedtuple("KeyMap", ["df", "key"]) + def read_group(self, group: str, information_date: date) -> DataFrame: + """ + Read a feature group by getting the latest partition by date + + :param group: group name + :param information_date: partition date + :return: dataframe + """ + return self.reader.get_latest( + f"{self.feature_schema}.{group}", + date_until=information_date, + date_column=self.date_col, + uppercase_columns=True, + ) + def get_feature(self, group_name: str, feature_name: str, information_date: date) -> DataFrame: """ Get single feature @@ -60,9 +83,7 @@ def get_feature(self, group_name: str, feature_name: str, information_date: date """ print("This function is untested, use with caution!") key = self.get_group_metadata(group_name).key - return self.data_loader.read_group(self.get_group_fs_name(group_name), information_date).select( - *key, feature_name - ) + return self.read_group(self.get_group_fs_name(group_name), information_date).select(*key, feature_name) def get_feature_metadata(self, group_name: str, feature_name: str) -> FeatureMetadata: """ @@ -83,7 +104,7 @@ def get_group(self, group_name: str, information_date: date) -> DataFrame: :return: A dataframe containing feature group key """ print("This function is untested, use with caution!") - return self.data_loader.read_group(self.get_group_fs_name(group_name), information_date) + return self.read_group(self.get_group_fs_name(group_name), information_date) def get_group_metadata(self, group_name: str) -> GroupMetadata: """ @@ -144,7 +165,7 @@ def _get_keymaps(self, config: FeatureConfig, information_date: date) -> List[Ke """ key_maps = [] for mapping in config.maps: - df = self.data_loader.read_group(self.get_group_fs_name(mapping), information_date).drop("INFORMATION_DATE") + df = self.read_group(self.get_group_fs_name(mapping), information_date).drop("INFORMATION_DATE") key = self.metadata.get_group(mapping).key key_maps.append(PysparkFeatureLoader.KeyMap(df, key)) return key_maps @@ -174,9 +195,7 @@ def get_features_from_cfg(self, path: str, information_date: date) -> DataFrame: """ config = get_feature_config(path) # 1 select keys from base - base = self.data_loader.read_group(self.get_group_fs_name(config.base.group), information_date).select( - config.base.keys - ) + base = self.read_group(self.get_group_fs_name(config.base.group), information_date).select(config.base.keys) # 2 join maps onto base (resolve keys) if config.maps: key_maps = self._get_keymaps(config, information_date) @@ -184,7 +203,7 @@ def get_features_from_cfg(self, path: str, information_date: date) -> DataFrame: # 3 read, select and join other tables for group_cfg in config.selection: - df = self.data_loader.read_group(self.get_group_fs_name(group_cfg.group), information_date) + df = self.read_group(self.get_group_fs_name(group_cfg.group), information_date) base = self._add_feature_group(base, df, group_cfg) # 4 fix dtypes for pandas conversion diff --git a/rialto/runner/config_loader.py b/rialto/runner/config_loader.py index af6640b..86c142d 100644 --- a/rialto/runner/config_loader.py +++ b/rialto/runner/config_loader.py @@ -12,13 +12,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -__all__ = ["get_pipelines_config", "transform_dependencies"] +__all__ = [ + "get_pipelines_config", +] -from typing import Dict, List, Optional, Union +from typing import Dict, List, Optional from pydantic import BaseModel from rialto.common.utils import load_yaml +from rialto.runner.config_overrides import override_config class IntervalConfig(BaseModel): @@ -29,13 +32,13 @@ class IntervalConfig(BaseModel): class ScheduleConfig(BaseModel): frequency: str day: Optional[int] = 0 - info_date_shift: Union[Optional[IntervalConfig], List[IntervalConfig]] = IntervalConfig(units="days", value=0) + info_date_shift: Optional[List[IntervalConfig]] = IntervalConfig(units="days", value=0) class DependencyConfig(BaseModel): table: str name: Optional[str] = None - date_col: Optional[str] = None + date_col: str interval: IntervalConfig @@ -52,37 +55,47 @@ class MailConfig(BaseModel): sent_empty: Optional[bool] = False -class GeneralConfig(BaseModel): - target_schema: str - target_partition_column: str - source_date_column_property: Optional[str] = None +class RunnerConfig(BaseModel): watched_period_units: str watched_period_value: int - job: str mail: MailConfig +class TargetConfig(BaseModel): + target_schema: str + target_partition_column: str + + +class MetadataManagerConfig(BaseModel): + metadata_schema: str + + +class FeatureLoaderConfig(BaseModel): + feature_schema: str + metadata_schema: str + + class PipelineConfig(BaseModel): name: str - module: Optional[ModuleConfig] = None + module: ModuleConfig schedule: ScheduleConfig - dependencies: List[DependencyConfig] = [] + dependencies: Optional[List[DependencyConfig]] = [] + target: TargetConfig = None + metadata_manager: Optional[MetadataManagerConfig] = None + feature_loader: Optional[FeatureLoaderConfig] = None + extras: Optional[Dict] = {} class PipelinesConfig(BaseModel): - general: GeneralConfig + runner: RunnerConfig pipelines: list[PipelineConfig] -def get_pipelines_config(path) -> PipelinesConfig: +def get_pipelines_config(path: str, overrides: Dict) -> PipelinesConfig: """Load and parse yaml config""" - return PipelinesConfig(**load_yaml(path)) - - -def transform_dependencies(dependencies: List[DependencyConfig]) -> Dict: - """Transform dependency config list into a dictionary""" - res = {} - for dep in dependencies: - if dep.name: - res[dep.name] = dep - return res + raw_config = load_yaml(path) + if overrides: + cfg = override_config(raw_config, overrides) + return PipelinesConfig(**cfg) + else: + return PipelinesConfig(**raw_config) diff --git a/rialto/runner/config_overrides.py b/rialto/runner/config_overrides.py new file mode 100644 index 0000000..a525525 --- /dev/null +++ b/rialto/runner/config_overrides.py @@ -0,0 +1,76 @@ +# Copyright 2022 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +__all__ = ["override_config"] + +from typing import Dict, List, Tuple + +from loguru import logger + + +def _split_index_key(key: str) -> Tuple[str, str]: + name = key.split("[")[0] + index = key.split("[")[1].replace("]", "") + return name, index + + +def _find_first_match(config: List, index: str) -> int: + index_key, index_value = index.split("=") + return next(i for i, x in enumerate(config) if x.get(index_key) == index_value) + + +def _override(config, path, value) -> Dict: + key = path[0] + if "[" in key: + name, index = _split_index_key(key) + if name not in config: + raise ValueError(f"Invalid key {name}") + if "=" in index: + index = _find_first_match(config[name], index) + else: + index = int(index) + if index >= 0 and index < len(config[name]): + if len(path) == 1: + config[name][index] = value + else: + config[name][index] = _override(config[name][index], path[1:], value) + elif index == -1: + if len(path) == 1: + config[name].append(value) + else: + raise ValueError(f"Invalid index {index} for key {name} in path {path}") + else: + raise IndexError(f"Index {index} out of bounds for key {key}") + else: + if len(path) == 1: + config[key] = value + else: + if key not in config: + raise ValueError(f"Invalid key {key}") + config[key] = _override(config[key], path[1:], value) + return config + + +def override_config(config: Dict, overrides: Dict) -> Dict: + """Override config with user input + + :param config: config dictionary + :param overrides: dictionary of overrides + :return: Overridden config + """ + for path, value in overrides.items(): + logger.info("Applying override: ", path, value) + config = _override(config, path.split("."), value) + + return config diff --git a/rialto/runner/runner.py b/rialto/runner/runner.py index 343d2fe..ac9d6bc 100644 --- a/rialto/runner/runner.py +++ b/rialto/runner/runner.py @@ -16,25 +16,15 @@ import datetime from datetime import date -from importlib import import_module -from typing import List, Tuple +from typing import Dict, List, Tuple import pyspark.sql.functions as F from loguru import logger from pyspark.sql import DataFrame, SparkSession +import rialto.runner.utils as utils from rialto.common import TableReader -from rialto.common.utils import get_date_col_property, get_delta_partition -from rialto.jobs.configuration.config_holder import ConfigHolder -from rialto.metadata import MetadataManager -from rialto.runner.config_loader import ( - DependencyConfig, - ModuleConfig, - PipelineConfig, - ScheduleConfig, - get_pipelines_config, - transform_dependencies, -) +from rialto.runner.config_loader import PipelineConfig, get_pipelines_config from rialto.runner.date_manager import DateManager from rialto.runner.table import Table from rialto.runner.tracker import Record, Tracker @@ -48,100 +38,60 @@ def __init__( self, spark: SparkSession, config_path: str, - feature_metadata_schema: str = None, run_date: str = None, - date_from: str = None, - date_until: str = None, - feature_store_schema: str = None, - custom_job_config: dict = None, rerun: bool = False, op: str = None, + skip_dependencies: bool = False, + overrides: Dict = None, ): self.spark = spark - self.config = get_pipelines_config(config_path) - self.reader = TableReader( - spark, date_property=self.config.general.source_date_column_property, infer_partition=False - ) - if feature_metadata_schema: - self.metadata = MetadataManager(spark, feature_metadata_schema) - else: - self.metadata = None - self.date_from = date_from - self.date_until = date_until + self.config = get_pipelines_config(config_path, overrides) + self.reader = TableReader(spark) self.rerun = rerun + self.skip_dependencies = skip_dependencies self.op = op - self.tracker = Tracker(self.config.general.target_schema) - - if (feature_store_schema is not None) and (feature_metadata_schema is not None): - ConfigHolder.set_feature_store_config(feature_store_schema, feature_metadata_schema) - - if custom_job_config is not None: - ConfigHolder.set_custom_config(**custom_job_config) + self.tracker = Tracker() if run_date: run_date = DateManager.str_to_date(run_date) else: run_date = date.today() - if self.date_from: - self.date_from = DateManager.str_to_date(date_from) - if self.date_until: - self.date_until = DateManager.str_to_date(date_until) - - if not self.date_from: - self.date_from = DateManager.date_subtract( - run_date=run_date, - units=self.config.general.watched_period_units, - value=self.config.general.watched_period_value, - ) - if not self.date_until: - self.date_until = run_date + + self.date_from = DateManager.date_subtract( + run_date=run_date, + units=self.config.runner.watched_period_units, + value=self.config.runner.watched_period_value, + ) + + self.date_until = run_date + if self.date_from > self.date_until: raise ValueError(f"Invalid date range from {self.date_from} until {self.date_until}") logger.info(f"Running period from {self.date_from} until {self.date_until}") - def _load_module(self, cfg: ModuleConfig) -> Transformation: - """ - Load feature group - - :param cfg: Feature configuration - :return: Transformation object - """ - module = import_module(cfg.python_module) - class_obj = getattr(module, cfg.python_class) - return class_obj() - - def _generate( - self, instance: Transformation, run_date: date, dependencies: List[DependencyConfig] = None - ) -> DataFrame: + def _execute(self, instance: Transformation, run_date: date, pipeline: PipelineConfig) -> DataFrame: """ - Run feature group + Run the job :param instance: Instance of Transformation :param run_date: date to run for + :param pipeline: pipeline configuration :return: Dataframe """ - if dependencies is not None: - dependencies = transform_dependencies(dependencies) + metadata_manager, feature_loader = utils.init_tools(self.spark, pipeline) + df = instance.run( - reader=self.reader, - run_date=run_date, spark=self.spark, - metadata_manager=self.metadata, - dependencies=dependencies, + run_date=run_date, + config=pipeline, + reader=self.reader, + metadata_manager=metadata_manager, + feature_loader=feature_loader, ) logger.info(f"Generated {df.count()} records") return df - def _table_exists(self, table: str) -> bool: - """ - Check table exists in spark catalog - - :param table: full table path - :return: bool - """ - return self.spark.catalog.tableExists(table) - def _write(self, df: DataFrame, info_date: date, table: Table) -> None: """ Write dataframe to storage @@ -155,44 +105,6 @@ def _write(self, df: DataFrame, info_date: date, table: Table) -> None: df.write.partitionBy(table.partition).mode("overwrite").saveAsTable(table.get_table_path()) logger.info(f"Results writen to {table.get_table_path()}") - try: - get_date_col_property(self.spark, table.get_table_path(), "rialto_date_column") - except RuntimeError: - sql_query = ( - f"ALTER TABLE {table.get_table_path()} SET TBLPROPERTIES ('rialto_date_column' = '{table.partition}')" - ) - self.spark.sql(sql_query) - logger.info(f"Set table property rialto_date_column to {table.partition}") - - def _delta_partition(self, table: str) -> str: - """ - Select first partition column, should be only one - - :param table: full table name - :return: partition column name - """ - columns = self.spark.catalog.listColumns(table) - partition_columns = list(filter(lambda c: c.isPartition, columns)) - if len(partition_columns): - return partition_columns[0].name - else: - raise RuntimeError(f"Delta table has no partitions: {table}.") - - def _get_partitions(self, table: Table) -> List[date]: - """ - Get partition values - - :param table: Table object - :return: List of partition values - """ - rows = ( - self.reader.get_table(table.get_table_path(), date_column=table.partition) - .select(table.partition) - .distinct() - .collect() - ) - return [r[table.partition] for r in rows] - def check_dates_have_partition(self, table: Table, dates: List[date]) -> List[bool]: """ For given list of dates, check if there is a matching partition for each @@ -201,8 +113,8 @@ def check_dates_have_partition(self, table: Table, dates: List[date]) -> List[bo :param dates: list of dates to check :return: list of bool """ - if self._table_exists(table.get_table_path()): - partitions = self._get_partitions(table) + if utils.table_exists(self.spark, table.get_table_path()): + partitions = utils.get_partitions(self.reader, table) return [(date in partitions) for date in dates] else: logger.info(f"Table {table.get_table_path()} doesn't exist!") @@ -226,18 +138,9 @@ def check_dependencies(self, pipeline: PipelineConfig, run_date: date) -> bool: possible_dep_dates = DateManager.all_dates(dep_from, run_date) - # date column options prioritization (manual column, table property, inferred from delta) - if dependency.date_col: - date_col = dependency.date_col - elif self.config.general.source_date_column_property: - date_col = get_date_col_property( - self.spark, dependency.table, self.config.general.source_date_column_property - ) - else: - date_col = get_delta_partition(self.spark, dependency.table) - logger.debug(f"Date column for {dependency.table} is {date_col}") + logger.debug(f"Date column for {dependency.table} is {dependency.date_col}") - source = Table(table_path=dependency.table, partition=date_col) + source = Table(table_path=dependency.table, partition=dependency.date_col) if True in self.check_dates_have_partition(source, possible_dep_dates): logger.info(f"Dependency for {dependency.table} from {dep_from} until {run_date} is fulfilled") else: @@ -251,25 +154,6 @@ def check_dependencies(self, pipeline: PipelineConfig, run_date: date) -> bool: return True - def get_possible_run_dates(self, schedule: ScheduleConfig) -> List[date]: - """ - List possible run dates according to parameters and config - - :param schedule: schedule config - :return: List of dates - """ - return DateManager.run_dates(self.date_from, self.date_until, schedule) - - def get_info_dates(self, schedule: ScheduleConfig, run_dates: List[date]) -> List[date]: - """ - Transform given dates into info dates according to the config - - :param schedule: schedule config - :param run_dates: date list - :return: list of modified dates - """ - return [DateManager.to_info_date(x, schedule) for x in run_dates] - def _get_completion(self, target: Table, info_dates: List[date]) -> List[bool]: """ Check if model has run for given dates @@ -291,8 +175,8 @@ def _select_run_dates(self, pipeline: PipelineConfig, table: Table) -> Tuple[Lis :param table: table path :return: list of run dates and list of info dates """ - possible_run_dates = self.get_possible_run_dates(pipeline.schedule) - possible_info_dates = self.get_info_dates(pipeline.schedule, possible_run_dates) + possible_run_dates = DateManager.run_dates(self.date_from, self.date_until, pipeline.schedule) + possible_info_dates = [DateManager.to_info_date(x, pipeline.schedule) for x in possible_run_dates] current_state = self._get_completion(table, possible_info_dates) selection = [ @@ -318,18 +202,17 @@ def _run_one_date(self, pipeline: PipelineConfig, run_date: date, info_date: dat :param target: target Table :return: success bool """ - if self.check_dependencies(pipeline, run_date): + if self.skip_dependencies or self.check_dependencies(pipeline, run_date): logger.info(f"Running {pipeline.name} for {run_date}") - if self.config.general.job == "run": - feature_group = self._load_module(pipeline.module) - df = self._generate(feature_group, run_date, pipeline.dependencies) - records = df.count() - if records > 0: - self._write(df, info_date, target) - return records - else: - raise RuntimeError("No records generated") + feature_group = utils.load_module(pipeline.module) + df = self._execute(feature_group, run_date, pipeline) + records = df.count() + if records > 0: + self._write(df, info_date, target) + return records + else: + raise RuntimeError("No records generated") return 0 def _run_pipeline(self, pipeline: PipelineConfig): @@ -340,9 +223,9 @@ def _run_pipeline(self, pipeline: PipelineConfig): :return: success bool """ target = Table( - schema_path=self.config.general.target_schema, + schema_path=pipeline.target.target_schema, class_name=pipeline.module.python_class, - partition=self.config.general.target_partition_column, + partition=pipeline.target.target_partition_column, ) logger.info(f"Loaded pipeline {pipeline.name}") @@ -371,8 +254,8 @@ def _run_pipeline(self, pipeline: PipelineConfig): ) ) except Exception as error: - print(f"An exception occurred in pipeline {pipeline.name}") - print(error) + logger.error(f"An exception occurred in pipeline {pipeline.name}") + logger.error(error) self.tracker.add( Record( job=pipeline.name, @@ -386,7 +269,7 @@ def _run_pipeline(self, pipeline: PipelineConfig): ) ) except KeyboardInterrupt: - print(f"Pipeline {pipeline.name} interrupted") + logger.error(f"Pipeline {pipeline.name} interrupted") self.tracker.add( Record( job=pipeline.name, @@ -413,4 +296,4 @@ def __call__(self): self._run_pipeline(pipeline) finally: print(self.tracker.records) - self.tracker.report(self.config.general.mail) + self.tracker.report(self.config.runner.mail) diff --git a/rialto/runner/tracker.py b/rialto/runner/tracker.py index de97fb0..57a24e6 100644 --- a/rialto/runner/tracker.py +++ b/rialto/runner/tracker.py @@ -41,8 +41,7 @@ class Record: class Tracker: """Collect information about runs and sent them out via email""" - def __init__(self, target_schema: str): - self.target_schema = target_schema + def __init__(self): self.records = [] self.last_error = None self.pipeline_start = datetime.now() @@ -55,7 +54,7 @@ def add(self, record: Record) -> None: def report(self, mail_cfg: MailConfig): """Create and send html report""" if len(self.records) or mail_cfg.sent_empty: - report = HTMLMessage.make_report(self.target_schema, self.pipeline_start, self.records) + report = HTMLMessage.make_report(self.pipeline_start, self.records) for receiver in mail_cfg.to: message = Mailer.create_message( subject=mail_cfg.subject, sender=mail_cfg.sender, receiver=receiver, body=report @@ -118,7 +117,7 @@ def _make_overview_header(): """ @staticmethod - def _make_header(target: str, start: datetime): + def _make_header(start: datetime): return f"""
@@ -127,7 +126,7 @@ def _make_header(target: str, start: datetime):
- Jobs started {str(start).split('.')[0]}, targeting {target} + Jobs started {str(start).split('.')[0]}
@@ -228,14 +227,14 @@ def _make_insights(records: List[Record]): """ @staticmethod - def make_report(target: str, start: datetime, records: List[Record]) -> str: + def make_report(start: datetime, records: List[Record]) -> str: """Create html email report""" html = [ """ """, HTMLMessage._head(), HTMLMessage._body_open(), - HTMLMessage._make_header(target, start), + HTMLMessage._make_header(start), HTMLMessage._make_overview(records), HTMLMessage._make_insights(records), HTMLMessage._body_close(), diff --git a/rialto/runner/transformation.py b/rialto/runner/transformation.py index 4399ce0..5b6f2eb 100644 --- a/rialto/runner/transformation.py +++ b/rialto/runner/transformation.py @@ -16,12 +16,13 @@ import abc import datetime -from typing import Dict from pyspark.sql import DataFrame, SparkSession -from rialto.common import TableReader +from rialto.common import DataReader +from rialto.loader import PysparkFeatureLoader from rialto.metadata import MetadataManager +from rialto.runner.config_loader import PipelineConfig class Transformation(metaclass=abc.ABCMeta): @@ -30,11 +31,12 @@ class Transformation(metaclass=abc.ABCMeta): @abc.abstractmethod def run( self, - reader: TableReader, + reader: DataReader, run_date: datetime.date, spark: SparkSession = None, + config: PipelineConfig = None, metadata_manager: MetadataManager = None, - dependencies: Dict = None, + feature_loader: PysparkFeatureLoader = None, ) -> DataFrame: """ Run the transformation @@ -42,8 +44,9 @@ def run( :param reader: data store api object :param run_date: date :param spark: spark session - :param metadata_manager: metadata api object - :param dependencies: dictionary of dependencies + :param config: pipeline config + :param metadata_manager: metadata manager + :param feature_loader: feature loader :return: dataframe """ raise NotImplementedError diff --git a/rialto/runner/utils.py b/rialto/runner/utils.py new file mode 100644 index 0000000..b74ec1b --- /dev/null +++ b/rialto/runner/utils.py @@ -0,0 +1,74 @@ +from datetime import date +from importlib import import_module +from typing import List, Tuple + +from pyspark.sql import SparkSession + +from rialto.common import DataReader +from rialto.loader import PysparkFeatureLoader +from rialto.metadata import MetadataManager +from rialto.runner.config_loader import ModuleConfig, PipelineConfig +from rialto.runner.table import Table +from rialto.runner.transformation import Transformation + + +def load_module(cfg: ModuleConfig) -> Transformation: + """ + Load feature group + + :param cfg: Feature configuration + :return: Transformation object + """ + module = import_module(cfg.python_module) + class_obj = getattr(module, cfg.python_class) + return class_obj() + + +def table_exists(spark: SparkSession, table: str) -> bool: + """ + Check table exists in spark catalog + + :param table: full table path + :return: bool + """ + return spark.catalog.tableExists(table) + + +def get_partitions(reader: DataReader, table: Table) -> List[date]: + """ + Get partition values + + :param table: Table object + :return: List of partition values + """ + rows = ( + reader.get_table(table.get_table_path(), date_column=table.partition) + .select(table.partition) + .distinct() + .collect() + ) + return [r[table.partition] for r in rows] + + +def init_tools(spark: SparkSession, pipeline: PipelineConfig) -> Tuple[MetadataManager, PysparkFeatureLoader]: + """ + Initialize metadata manager and feature loader + + :param spark: Spark session + :param pipeline: Pipeline configuration + :return: MetadataManager and PysparkFeatureLoader + """ + if pipeline.metadata_manager is not None: + metadata_manager = MetadataManager(spark, pipeline.metadata_manager.metadata_schema) + else: + metadata_manager = None + + if pipeline.feature_loader is not None: + feature_loader = PysparkFeatureLoader( + spark, + feature_schema=pipeline.feature_loader.feature_schema, + metadata_schema=pipeline.feature_loader.metadata_schema, + ) + else: + feature_loader = None + return metadata_manager, feature_loader diff --git a/tests/jobs/test_config_holder.py b/tests/jobs/test_config_holder.py deleted file mode 100644 index 38fadb1..0000000 --- a/tests/jobs/test_config_holder.py +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright 2022 ABSA Group Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from datetime import date - -import pytest - -from rialto.jobs.configuration.config_holder import ( - ConfigException, - ConfigHolder, - FeatureStoreConfig, -) - - -def test_run_date_unset(): - with pytest.raises(ConfigException): - ConfigHolder.get_run_date() - - -def test_run_date(): - dt = date(2023, 1, 1) - - ConfigHolder.set_run_date(dt) - - assert ConfigHolder.get_run_date() == dt - - -def test_feature_store_config_unset(): - with pytest.raises(ConfigException): - ConfigHolder.get_feature_store_config() - - -def test_feature_store_config(): - ConfigHolder.set_feature_store_config("store_schema", "metadata_schema") - - fsc = ConfigHolder.get_feature_store_config() - - assert type(fsc) is FeatureStoreConfig - assert fsc.feature_store_schema == "store_schema" - assert fsc.feature_metadata_schema == "metadata_schema" - - -def test_config_unset(): - config = ConfigHolder.get_config() - - assert type(config) is type({}) - assert len(config.items()) == 0 - - -def test_config_dict_copied_not_ref(): - """Test that config holder config can't be set from outside""" - config = ConfigHolder.get_config() - - config["test"] = 123 - - assert "test" not in ConfigHolder.get_config() - - -def test_config(): - ConfigHolder.set_custom_config(hello=123) - ConfigHolder.set_custom_config(world="test") - - config = ConfigHolder.get_config() - - assert config["hello"] == 123 - assert config["world"] == "test" - - -def test_config_from_dict(): - ConfigHolder.set_custom_config(**{"dict_item_1": 123, "dict_item_2": 456}) - - config = ConfigHolder.get_config() - - assert config["dict_item_1"] == 123 - assert config["dict_item_2"] == 456 - - -def test_dependencies_unset(): - deps = ConfigHolder.get_dependency_config() - assert len(deps.keys()) == 0 - - -def test_dependencies(): - ConfigHolder.set_dependency_config({"hello": 123}) - - deps = ConfigHolder.get_dependency_config() - - assert deps["hello"] == 123 diff --git a/tests/jobs/test_decorators.py b/tests/jobs/test_decorators.py index c6d05e6..54cb4a4 100644 --- a/tests/jobs/test_decorators.py +++ b/tests/jobs/test_decorators.py @@ -14,7 +14,6 @@ from importlib import import_module -from rialto.jobs.configuration.config_holder import ConfigHolder from rialto.jobs.decorators.job_base import JobBase from rialto.jobs.decorators.resolver import Resolver @@ -26,6 +25,13 @@ def test_dataset_decorator(): assert test_dataset == "dataset_return" +def test_config_decorator(): + _ = import_module("tests.jobs.test_job.test_job") + test_dataset = Resolver.resolve("custom_config") + + assert test_dataset == "config_return" + + def _rialto_import_stub(module_name, class_name): module = import_module(module_name) class_obj = getattr(module, class_name) @@ -70,7 +76,6 @@ def test_job_disabling_version(): def test_job_dependencies_registered(spark): - ConfigHolder.set_custom_config(value=123) job_class = _rialto_import_stub("tests.jobs.test_job.test_job", "job_asking_for_all_deps") # asserts part of the run - job_class.run(spark=spark, run_date=456, reader=789, metadata_manager=None, dependencies=1011) + job_class.run(spark=spark, run_date=456, reader=789, config=123, metadata_manager=654, feature_loader=321) diff --git a/tests/jobs/test_job/dependency_tests_job.py b/tests/jobs/test_job/dependency_tests_job.py index 3029b33..38e10ba 100644 --- a/tests/jobs/test_job/dependency_tests_job.py +++ b/tests/jobs/test_job/dependency_tests_job.py @@ -1,4 +1,4 @@ -from rialto.jobs.decorators import job, datasource +from rialto.jobs.decorators import datasource, job @datasource @@ -47,5 +47,5 @@ def missing_dependency_job(a, x): @job -def default_dependency_job(run_date, spark, config, dependencies, table_reader, feature_loader): +def default_dependency_job(run_date, spark, config, table_reader, feature_loader): return 1 diff --git a/tests/jobs/test_job/test_job.py b/tests/jobs/test_job/test_job.py index 460490a..3d648b5 100644 --- a/tests/jobs/test_job/test_job.py +++ b/tests/jobs/test_job/test_job.py @@ -11,9 +11,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from rialto.jobs.decorators import config, datasource, job -from rialto.jobs.decorators import datasource, job +@config +def custom_config(): + return "config_return" @datasource @@ -37,9 +40,10 @@ def disable_version_job_function(): @job -def job_asking_for_all_deps(spark, run_date, config, dependencies, table_reader): +def job_asking_for_all_deps(spark, run_date, config, table_reader, metadata_manager, feature_loader): assert spark is not None assert run_date == 456 - assert config["value"] == 123 + assert config == 123 assert table_reader == 789 - assert dependencies == 1011 + assert metadata_manager == 654 + assert feature_loader == 321 diff --git a/tests/jobs/test_job_base.py b/tests/jobs/test_job_base.py index ab8284a..55fced1 100644 --- a/tests/jobs/test_job_base.py +++ b/tests/jobs/test_job_base.py @@ -14,42 +14,36 @@ import datetime -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock import pyspark.sql.types import tests.jobs.resources as resources -from rialto.jobs.configuration.config_holder import ConfigHolder, FeatureStoreConfig from rialto.jobs.decorators.resolver import Resolver from rialto.loader import PysparkFeatureLoader def test_setup_except_feature_loader(spark): table_reader = MagicMock() + config = MagicMock() date = datetime.date(2023, 1, 1) - ConfigHolder.set_custom_config(hello=1, world=2) - - resources.CustomJobNoReturnVal().run( - reader=table_reader, run_date=date, spark=spark, metadata_manager=None, dependencies={1: 1} - ) + resources.CustomJobNoReturnVal().run(reader=table_reader, run_date=date, spark=spark, config=config) assert Resolver.resolve("run_date") == date - assert Resolver.resolve("config") == ConfigHolder.get_config() - assert Resolver.resolve("dependencies") == ConfigHolder.get_dependency_config() + assert Resolver.resolve("config") == config assert Resolver.resolve("spark") == spark assert Resolver.resolve("table_reader") == table_reader -@patch( - "rialto.jobs.configuration.config_holder.ConfigHolder.get_feature_store_config", - return_value=FeatureStoreConfig(feature_store_schema="schema", feature_metadata_schema="metadata_schema"), -) def test_setup_feature_loader(spark): table_reader = MagicMock() date = datetime.date(2023, 1, 1) + feature_loader = PysparkFeatureLoader(spark, "", "", "") - resources.CustomJobNoReturnVal().run(reader=table_reader, run_date=date, spark=spark, metadata_manager=None) + resources.CustomJobNoReturnVal().run( + reader=table_reader, run_date=date, spark=spark, config=None, feature_loader=feature_loader + ) assert type(Resolver.resolve("feature_loader")) == PysparkFeatureLoader @@ -60,7 +54,7 @@ def test_custom_callable_called(spark, mocker): table_reader = MagicMock() date = datetime.date(2023, 1, 1) - resources.CustomJobNoReturnVal().run(reader=table_reader, run_date=date, spark=spark, metadata_manager=None) + resources.CustomJobNoReturnVal().run(reader=table_reader, run_date=date, spark=spark, config=None) spy_cc.assert_called_once() @@ -69,9 +63,7 @@ def test_no_return_vaue_adds_version_timestamp_dataframe(spark): table_reader = MagicMock() date = datetime.date(2023, 1, 1) - result = resources.CustomJobNoReturnVal().run( - reader=table_reader, run_date=date, spark=spark, metadata_manager=None - ) + result = resources.CustomJobNoReturnVal().run(reader=table_reader, run_date=date, spark=spark, config=None) assert type(result) is pyspark.sql.DataFrame assert result.columns == ["JOB_NAME", "CREATION_TIME", "VERSION"] @@ -83,9 +75,7 @@ def test_return_dataframe_forwarded_with_version(spark): table_reader = MagicMock() date = datetime.date(2023, 1, 1) - result = resources.CustomJobReturnsDataFrame().run( - reader=table_reader, run_date=date, spark=spark, metadata_manager=None - ) + result = resources.CustomJobReturnsDataFrame().run(reader=table_reader, run_date=date, spark=spark, config=None) assert type(result) is pyspark.sql.DataFrame assert result.columns == ["FIRST", "SECOND", "VERSION"] @@ -97,7 +87,7 @@ def test_none_job_version_wont_fill_job_colun(spark): table_reader = MagicMock() date = datetime.date(2023, 1, 1) - result = resources.CustomJobNoVersion().run(reader=table_reader, run_date=date, spark=spark, metadata_manager=None) + result = resources.CustomJobNoVersion().run(reader=table_reader, run_date=date, spark=spark, config=None) assert type(result) is pyspark.sql.DataFrame assert "VERSION" not in result.columns diff --git a/tests/loader/pyspark/dummy_loaders.py b/tests/loader/pyspark/dummy_loaders.py deleted file mode 100644 index a2b0cb8..0000000 --- a/tests/loader/pyspark/dummy_loaders.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2022 ABSA Group Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from datetime import date - -from rialto.loader.data_loader import DataLoader - - -class DummyDataLoader(DataLoader): - def __init__(self): - super().__init__() - - def read_group(self, group: str, information_date: date): - return None diff --git a/tests/loader/pyspark/test_from_cfg.py b/tests/loader/pyspark/test_from_cfg.py index 3ad653e..dd2049f 100644 --- a/tests/loader/pyspark/test_from_cfg.py +++ b/tests/loader/pyspark/test_from_cfg.py @@ -21,7 +21,6 @@ from rialto.loader.config_loader import get_feature_config from rialto.loader.pyspark_feature_loader import PysparkFeatureLoader from tests.loader.pyspark.dataframe_builder import dataframe_builder as dfb -from tests.loader.pyspark.dummy_loaders import DummyDataLoader @pytest.fixture(scope="session") @@ -45,7 +44,7 @@ def spark(request): @pytest.fixture(scope="session") def loader(spark): - return PysparkFeatureLoader(spark, DummyDataLoader(), MagicMock()) + return PysparkFeatureLoader(spark, MagicMock(), MagicMock()) VALID_LIST = [(["a"], ["a"]), (["a"], ["a", "b", "c"]), (["c", "a"], ["a", "b", "c"])] @@ -90,7 +89,7 @@ def __call__(self, *args, **kwargs): metadata = MagicMock() monkeypatch.setattr(metadata, "get_group", GroupMd()) - loader = PysparkFeatureLoader(spark, DummyDataLoader(), "") + loader = PysparkFeatureLoader(spark, "", "") loader.metadata = metadata base = dfb(spark, data=r.base_frame_data, columns=r.base_frame_columns) @@ -105,7 +104,7 @@ def __call__(self, *args, **kwargs): def test_get_group_metadata(spark, mocker): mocker.patch("rialto.loader.pyspark_feature_loader.MetadataManager.get_group", return_value=7) - loader = PysparkFeatureLoader(spark, DummyDataLoader(), "") + loader = PysparkFeatureLoader(spark, "", "") ret_val = loader.get_group_metadata("group_name") assert ret_val == 7 @@ -115,7 +114,7 @@ def test_get_group_metadata(spark, mocker): def test_get_feature_metadata(spark, mocker): mocker.patch("rialto.loader.pyspark_feature_loader.MetadataManager.get_feature", return_value=8) - loader = PysparkFeatureLoader(spark, DummyDataLoader(), "") + loader = PysparkFeatureLoader(spark, "", "") ret_val = loader.get_feature_metadata("group_name", "feature") assert ret_val == 8 @@ -129,7 +128,7 @@ def test_get_metadata_from_cfg(spark, mocker): ) mocker.patch("rialto.loader.pyspark_feature_loader.MetadataManager.get_group", side_effect=lambda g: {"B": 10}[g]) - loader = PysparkFeatureLoader(spark, DummyDataLoader(), "") + loader = PysparkFeatureLoader(spark, "", "") metadata = loader.get_metadata_from_cfg("tests/loader/pyspark/example_cfg.yaml") assert metadata["B_F1"] == 1 diff --git a/tests/runner/conftest.py b/tests/runner/conftest.py index 44f0c09..4e527be 100644 --- a/tests/runner/conftest.py +++ b/tests/runner/conftest.py @@ -39,6 +39,4 @@ def spark(request): @pytest.fixture(scope="function") def basic_runner(spark): - return Runner( - spark, config_path="tests/runner/transformations/config.yaml", feature_metadata_schema="", run_date="2023-03-31" - ) + return Runner(spark, config_path="tests/runner/transformations/config.yaml", run_date="2023-03-31") diff --git a/tests/runner/overrider.yaml b/tests/runner/overrider.yaml new file mode 100644 index 0000000..3029730 --- /dev/null +++ b/tests/runner/overrider.yaml @@ -0,0 +1,86 @@ +# Copyright 2022 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +runner: + watched_period_units: "months" + watched_period_value: 2 + mail: + sender: test@testing.org + smtp: server.test + to: + - developer@testing.org + - developer2@testing.org + subject: test report +pipelines: + - name: SimpleGroup + module: + python_module: tests.runner.transformations + python_class: SimpleGroup + schedule: + frequency: weekly + day: 7 + info_date_shift: + - value: 3 + units: days + - value: 2 + units: weeks + dependencies: + - table: source.schema.dep1 + interval: + units: "days" + value: 1 + date_col: "DATE" + - table: source.schema.dep2 + interval: + units: "months" + value: 3 + date_col: "DATE" + target: + target_schema: catalog.schema + target_partition_column: "INFORMATION_DATE" + feature_loader: + config_path: path/to/config.yaml + feature_schema: catalog.feature_tables + metadata_schema: catalog.metadata + metadata_manager: + metadata_schema: catalog.metadata + - name: OtherGroup + module: + python_module: tests.runner.transformations + python_class: SimpleGroup + schedule: + frequency: weekly + day: 7 + info_date_shift: + - value: 3 + units: days + dependencies: + - table: source.schema.dep1 + name: source1 + interval: + units: "days" + value: 1 + date_col: "DATE" + - table: source.schema.dep2 + name: source2 + interval: + units: "months" + value: 3 + date_col: "batch" + target: + target_schema: catalog.schema + target_partition_column: "INFORMATION_DATE" + extras: + some_value: 3 + some_other_value: cat diff --git a/tests/runner/test_date_manager.py b/tests/runner/test_date_manager.py index 9088e0c..73b61b8 100644 --- a/tests/runner/test_date_manager.py +++ b/tests/runner/test_date_manager.py @@ -144,7 +144,7 @@ def test_run_dates_invalid(): [(7, "2023-02-26"), (3, "2023-03-02"), (-5, "2023-03-10"), (0, "2023-03-05")], ) def test_to_info_date(shift, res): - cfg = ScheduleConfig(frequency="daily", info_date_shift=IntervalConfig(units="days", value=shift)) + cfg = ScheduleConfig(frequency="daily", info_date_shift=[IntervalConfig(units="days", value=shift)]) base = DateManager.str_to_date("2023-03-05") info = DateManager.to_info_date(base, cfg) assert DateManager.str_to_date(res) == info @@ -155,7 +155,7 @@ def test_to_info_date(shift, res): [("days", "2023-03-02"), ("weeks", "2023-02-12"), ("months", "2022-12-05"), ("years", "2020-03-05")], ) def test_info_date_shift_units(unit, result): - cfg = ScheduleConfig(frequency="daily", info_date_shift=IntervalConfig(units=unit, value=3)) + cfg = ScheduleConfig(frequency="daily", info_date_shift=[IntervalConfig(units=unit, value=3)]) base = DateManager.str_to_date("2023-03-05") info = DateManager.to_info_date(base, cfg) assert DateManager.str_to_date(result) == info diff --git a/tests/runner/test_overrides.py b/tests/runner/test_overrides.py new file mode 100644 index 0000000..17fcdbe --- /dev/null +++ b/tests/runner/test_overrides.py @@ -0,0 +1,137 @@ +# Copyright 2022 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import pytest + +from rialto.runner import Runner + + +def test_overrides_simple(spark): + runner = Runner( + spark, + config_path="tests/runner/overrider.yaml", + run_date="2023-03-31", + overrides={"runner.mail.to": ["x@b.c", "y@b.c", "z@b.c"]}, + ) + assert runner.config.runner.mail.to == ["x@b.c", "y@b.c", "z@b.c"] + + +def test_overrides_array_index(spark): + runner = Runner( + spark, + config_path="tests/runner/overrider.yaml", + run_date="2023-03-31", + overrides={"runner.mail.to[1]": "a@b.c"}, + ) + assert runner.config.runner.mail.to == ["developer@testing.org", "a@b.c"] + + +def test_overrides_array_append(spark): + runner = Runner( + spark, + config_path="tests/runner/overrider.yaml", + run_date="2023-03-31", + overrides={"runner.mail.to[-1]": "test"}, + ) + assert runner.config.runner.mail.to == ["developer@testing.org", "developer2@testing.org", "test"] + + +def test_overrides_array_lookup(spark): + runner = Runner( + spark, + config_path="tests/runner/overrider.yaml", + run_date="2023-03-31", + overrides={"pipelines[name=SimpleGroup].target.target_schema": "new_schema"}, + ) + assert runner.config.pipelines[0].target.target_schema == "new_schema" + + +def test_overrides_combined(spark): + runner = Runner( + spark, + config_path="tests/runner/overrider.yaml", + run_date="2023-03-31", + overrides={ + "runner.mail.to": ["x@b.c", "y@b.c", "z@b.c"], + "pipelines[name=SimpleGroup].target.target_schema": "new_schema", + "pipelines[name=SimpleGroup].schedule.info_date_shift[0].value": 1, + }, + ) + assert runner.config.runner.mail.to == ["x@b.c", "y@b.c", "z@b.c"] + assert runner.config.pipelines[0].target.target_schema == "new_schema" + assert runner.config.pipelines[0].schedule.info_date_shift[0].value == 1 + + +def test_index_out_of_range(spark): + with pytest.raises(IndexError) as error: + Runner( + spark, + config_path="tests/runner/overrider.yaml", + run_date="2023-03-31", + overrides={"runner.mail.to[8]": "test"}, + ) + assert error.value.args[0] == "Index 8 out of bounds for key to[8]" + + +def test_invalid_index_key(spark): + with pytest.raises(ValueError) as error: + Runner( + spark, + config_path="tests/runner/overrider.yaml", + run_date="2023-03-31", + overrides={"runner.mail.test[8]": "test"}, + ) + assert error.value.args[0] == "Invalid key test" + + +def test_invalid_key(spark): + with pytest.raises(ValueError) as error: + Runner( + spark, + config_path="tests/runner/overrider.yaml", + run_date="2023-03-31", + overrides={"runner.mail.test.param": "test"}, + ) + assert error.value.args[0] == "Invalid key test" + + +def test_replace_section(spark): + runner = Runner( + spark, + config_path="tests/runner/overrider.yaml", + run_date="2023-03-31", + overrides={ + "pipelines[name=SimpleGroup].feature_loader": { + "config_path": "features_cfg.yaml", + "feature_schema": "catalog.features", + "metadata_schema": "catalog.metadata", + } + }, + ) + assert runner.config.pipelines[0].feature_loader.feature_schema == "catalog.features" + + +def test_add_section(spark): + runner = Runner( + spark, + config_path="tests/runner/overrider.yaml", + run_date="2023-03-31", + overrides={ + "pipelines[name=OtherGroup].feature_loader": { + "config_path": "features_cfg.yaml", + "feature_schema": "catalog.features", + "metadata_schema": "catalog.metadata", + } + }, + ) + assert runner.config.pipelines[1].feature_loader.feature_schema == "catalog.features" diff --git a/tests/runner/test_runner.py b/tests/runner/test_runner.py index 0459411..e23eee8 100644 --- a/tests/runner/test_runner.py +++ b/tests/runner/test_runner.py @@ -11,15 +11,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from collections import namedtuple from datetime import datetime from typing import Optional import pytest from pyspark.sql import DataFrame +import rialto.runner.utils as utils from rialto.common.table_reader import DataReader -from rialto.jobs.configuration.config_holder import ConfigHolder from rialto.runner.runner import DateManager, Runner from rialto.runner.table import Table from tests.runner.runner_resources import ( @@ -38,8 +37,8 @@ def __init__(self, spark): def get_table( self, table: str, - info_date_from: Optional[datetime.date] = None, - info_date_to: Optional[datetime.date] = None, + date_from: Optional[datetime.date] = None, + date_to: Optional[datetime.date] = None, date_column: str = None, uppercase_columns: bool = False, ) -> DataFrame: @@ -53,114 +52,79 @@ def get_table( def get_latest( self, table: str, - until: Optional[datetime.date] = None, + date_until: Optional[datetime.date] = None, date_column: str = None, uppercase_columns: bool = False, ) -> DataFrame: pass -def test_table_exists(spark, mocker, basic_runner): +def test_table_exists(spark, mocker): mock = mocker.patch("pyspark.sql.Catalog.tableExists", return_value=True) - basic_runner._table_exists("abc") + utils.table_exists(spark, "abc") mock.assert_called_once_with("abc") -def test_infer_column(spark, mocker, basic_runner): - column = namedtuple("catalog", ["name", "isPartition"]) - catalog = [column("a", True), column("b", False), column("c", False)] - - mock = mocker.patch("pyspark.sql.Catalog.listColumns", return_value=catalog) - partition = basic_runner._delta_partition("aaa") - assert partition == "a" - mock.assert_called_once_with("aaa") - - def test_load_module(spark, basic_runner): - module = basic_runner._load_module(basic_runner.config.pipelines[0].module) + module = utils.load_module(basic_runner.config.pipelines[0].module) assert isinstance(module, SimpleGroup) def test_generate(spark, mocker, basic_runner): run = mocker.patch("tests.runner.transformations.simple_group.SimpleGroup.run") group = SimpleGroup() - basic_runner._generate(group, DateManager.str_to_date("2023-01-31")) + config = basic_runner.config.pipelines[0] + basic_runner._execute(group, DateManager.str_to_date("2023-01-31"), config) + run.assert_called_once_with( reader=basic_runner.reader, run_date=DateManager.str_to_date("2023-01-31"), spark=spark, - metadata_manager=basic_runner.metadata, - dependencies=None, + config=config, + metadata_manager=None, + feature_loader=None, ) def test_generate_w_dep(spark, mocker, basic_runner): run = mocker.patch("tests.runner.transformations.simple_group.SimpleGroup.run") group = SimpleGroup() - basic_runner._generate(group, DateManager.str_to_date("2023-01-31"), basic_runner.config.pipelines[2].dependencies) + basic_runner._execute(group, DateManager.str_to_date("2023-01-31"), basic_runner.config.pipelines[2]) run.assert_called_once_with( reader=basic_runner.reader, run_date=DateManager.str_to_date("2023-01-31"), spark=spark, - metadata_manager=basic_runner.metadata, - dependencies={ - "source1": basic_runner.config.pipelines[2].dependencies[0], - "source2": basic_runner.config.pipelines[2].dependencies[1], - }, + config=basic_runner.config.pipelines[2], + metadata_manager=None, + feature_loader=None, ) def test_init_dates(spark): - runner = Runner( - spark, config_path="tests/runner/transformations/config.yaml", feature_metadata_schema="", run_date="2023-03-31" - ) + runner = Runner(spark, config_path="tests/runner/transformations/config.yaml", run_date="2023-03-31") assert runner.date_from == DateManager.str_to_date("2023-01-31") assert runner.date_until == DateManager.str_to_date("2023-03-31") runner = Runner( spark, config_path="tests/runner/transformations/config.yaml", - feature_metadata_schema="", - date_from="2023-03-01", - date_until="2023-03-31", + run_date="2023-03-31", + overrides={"runner.watched_period_units": "weeks", "runner.watched_period_value": 2}, ) - assert runner.date_from == DateManager.str_to_date("2023-03-01") + assert runner.date_from == DateManager.str_to_date("2023-03-17") assert runner.date_until == DateManager.str_to_date("2023-03-31") runner = Runner( spark, config_path="tests/runner/transformations/config2.yaml", - feature_metadata_schema="", run_date="2023-03-31", ) assert runner.date_from == DateManager.str_to_date("2023-02-24") assert runner.date_until == DateManager.str_to_date("2023-03-31") -def test_possible_run_dates(spark): - runner = Runner( - spark, - config_path="tests/runner/transformations/config.yaml", - feature_metadata_schema="", - date_from="2023-03-01", - date_until="2023-03-31", - ) - - dates = runner.get_possible_run_dates(runner.config.pipelines[0].schedule) - expected = ["2023-03-05", "2023-03-12", "2023-03-19", "2023-03-26"] - assert dates == [DateManager.str_to_date(d) for d in expected] - - -def test_info_dates(spark, basic_runner): - run = ["2023-02-05", "2023-02-12", "2023-02-19", "2023-02-26", "2023-03-05"] - run = [DateManager.str_to_date(d) for d in run] - info = basic_runner.get_info_dates(basic_runner.config.pipelines[0].schedule, run) - expected = ["2023-02-02", "2023-02-09", "2023-02-16", "2023-02-23", "2023-03-02"] - assert info == [DateManager.str_to_date(d) for d in expected] - - def test_completion(spark, mocker, basic_runner): - mocker.patch("rialto.runner.runner.Runner._table_exists", return_value=True) + mocker.patch("rialto.runner.utils.table_exists", return_value=True) basic_runner.reader = MockReader(spark) @@ -173,11 +137,9 @@ def test_completion(spark, mocker, basic_runner): def test_completion_rerun(spark, mocker, basic_runner): - mocker.patch("rialto.runner.runner.Runner._table_exists", return_value=True) + mocker.patch("rialto.runner.runner.utils.table_exists", return_value=True) - runner = Runner( - spark, config_path="tests/runner/transformations/config.yaml", feature_metadata_schema="", run_date="2023-03-31" - ) + runner = Runner(spark, config_path="tests/runner/transformations/config.yaml", run_date="2023-03-31") runner.reader = MockReader(spark) dates = ["2023-02-26", "2023-03-05", "2023-03-12", "2023-03-19", "2023-03-26"] @@ -189,14 +151,12 @@ def test_completion_rerun(spark, mocker, basic_runner): def test_check_dates_have_partition(spark, mocker): - mocker.patch("rialto.runner.runner.Runner._table_exists", return_value=True) + mocker.patch("rialto.runner.runner.utils.table_exists", return_value=True) runner = Runner( spark, config_path="tests/runner/transformations/config.yaml", - feature_metadata_schema="", - date_from="2023-03-01", - date_until="2023-03-31", + run_date="2023-03-31", ) runner.reader = MockReader(spark) dates = ["2023-03-04", "2023-03-05", "2023-03-06"] @@ -207,14 +167,12 @@ def test_check_dates_have_partition(spark, mocker): def test_check_dates_have_partition_no_table(spark, mocker): - mocker.patch("rialto.runner.runner.Runner._table_exists", return_value=False) + mocker.patch("rialto.runner.runner.utils.table_exists", return_value=False) runner = Runner( spark, config_path="tests/runner/transformations/config.yaml", - feature_metadata_schema="", - date_from="2023-03-01", - date_until="2023-03-31", + run_date="2023-03-31", ) dates = ["2023-03-04", "2023-03-05", "2023-03-06"] dates = [DateManager.str_to_date(d) for d in dates] @@ -228,14 +186,12 @@ def test_check_dates_have_partition_no_table(spark, mocker): [("2023-02-26", False), ("2023-03-05", True)], ) def test_check_dependencies(spark, mocker, r_date, expected): - mocker.patch("rialto.runner.runner.Runner._table_exists", return_value=True) + mocker.patch("rialto.runner.runner.utils.table_exists", return_value=True) runner = Runner( spark, config_path="tests/runner/transformations/config.yaml", - feature_metadata_schema="", - date_from="2023-03-01", - date_until="2023-03-31", + run_date="2023-03-31", ) runner.reader = MockReader(spark) res = runner.check_dependencies(runner.config.pipelines[0], DateManager.str_to_date(r_date)) @@ -243,14 +199,12 @@ def test_check_dependencies(spark, mocker, r_date, expected): def test_check_no_dependencies(spark, mocker): - mocker.patch("rialto.runner.runner.Runner._table_exists", return_value=True) + mocker.patch("rialto.runner.runner.utils.table_exists", return_value=True) runner = Runner( spark, config_path="tests/runner/transformations/config.yaml", - feature_metadata_schema="", - date_from="2023-03-01", - date_until="2023-03-31", + run_date="2023-03-31", ) runner.reader = MockReader(spark) res = runner.check_dependencies(runner.config.pipelines[1], DateManager.str_to_date("2023-03-05")) @@ -258,14 +212,13 @@ def test_check_no_dependencies(spark, mocker): def test_select_dates(spark, mocker): - mocker.patch("rialto.runner.runner.Runner._table_exists", return_value=True) + mocker.patch("rialto.runner.runner.utils.table_exists", return_value=True) runner = Runner( spark, config_path="tests/runner/transformations/config.yaml", - feature_metadata_schema="", - date_from="2023-03-01", - date_until="2023-03-31", + run_date="2023-03-31", + overrides={"runner.watched_period_units": "months", "runner.watched_period_value": 1}, ) runner.reader = MockReader(spark) @@ -281,14 +234,13 @@ def test_select_dates(spark, mocker): def test_select_dates_all_done(spark, mocker): - mocker.patch("rialto.runner.runner.Runner._table_exists", return_value=True) + mocker.patch("rialto.runner.runner.utils.table_exists", return_value=True) runner = Runner( spark, config_path="tests/runner/transformations/config.yaml", - feature_metadata_schema="", - date_from="2023-03-02", - date_until="2023-03-02", + run_date="2023-03-02", + overrides={"runner.watched_period_units": "months", "runner.watched_period_value": 0}, ) runner.reader = MockReader(spark) @@ -307,9 +259,7 @@ def test_op_selected(spark, mocker): mocker.patch("rialto.runner.tracker.Tracker.report") run = mocker.patch("rialto.runner.runner.Runner._run_pipeline") - runner = Runner( - spark, config_path="tests/runner/transformations/config.yaml", feature_metadata_schema="", op="SimpleGroup" - ) + runner = Runner(spark, config_path="tests/runner/transformations/config.yaml", op="SimpleGroup") runner() run.called_once() @@ -319,42 +269,8 @@ def test_op_bad(spark, mocker): mocker.patch("rialto.runner.tracker.Tracker.report") mocker.patch("rialto.runner.runner.Runner._run_pipeline") - runner = Runner( - spark, config_path="tests/runner/transformations/config.yaml", feature_metadata_schema="", op="BadOp" - ) + runner = Runner(spark, config_path="tests/runner/transformations/config.yaml", op="BadOp") with pytest.raises(ValueError) as exception: runner() assert str(exception.value) == "Unknown operation selected: BadOp" - - -def test_custom_config(spark, mocker): - cc_spy = mocker.spy(ConfigHolder, "set_custom_config") - custom_config = {"cc": 42} - - _ = Runner(spark, config_path="tests/runner/transformations/config.yaml", custom_job_config=custom_config) - - cc_spy.assert_called_once_with(cc=42) - - -def test_feature_store_config(spark, mocker): - fs_spy = mocker.spy(ConfigHolder, "set_feature_store_config") - - _ = Runner( - spark, - config_path="tests/runner/transformations/config.yaml", - feature_store_schema="schema", - feature_metadata_schema="metadata", - ) - - fs_spy.assert_called_once_with("schema", "metadata") - - -def test_no_configs(spark, mocker): - cc_spy = mocker.spy(ConfigHolder, "set_custom_config") - fs_spy = mocker.spy(ConfigHolder, "set_feature_store_config") - - _ = Runner(spark, config_path="tests/runner/transformations/config.yaml") - - cc_spy.assert_not_called() - fs_spy.assert_not_called() diff --git a/tests/runner/transformations/config.yaml b/tests/runner/transformations/config.yaml index 2bfeaf1..3b72107 100644 --- a/tests/runner/transformations/config.yaml +++ b/tests/runner/transformations/config.yaml @@ -12,12 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -general: - target_schema: catalog.schema - target_partition_column: "INFORMATION_DATE" +runner: watched_period_units: "months" watched_period_value: 2 - job: "run" # run/check mail: sender: test@testing.org smtp: server.test @@ -34,8 +31,8 @@ pipelines: frequency: weekly day: 7 info_date_shift: - value: 3 - units: days + - value: 3 + units: days dependencies: - table: source.schema.dep1 interval: @@ -47,6 +44,9 @@ pipelines: units: "months" value: 3 date_col: "DATE" + target: + target_schema: catalog.schema + target_partition_column: "INFORMATION_DATE" - name: GroupNoDeps module: python_module: tests.runner.transformations @@ -55,8 +55,8 @@ pipelines: frequency: weekly day: 7 info_date_shift: - value: 3 - units: days + - value: 3 + units: days - name: NamedDeps module: python_module: tests.runner.transformations @@ -65,8 +65,8 @@ pipelines: frequency: weekly day: 7 info_date_shift: - value: 3 - units: days + - value: 3 + units: days dependencies: - table: source.schema.dep1 name: source1 @@ -80,3 +80,6 @@ pipelines: units: "months" value: 3 date_col: "batch" + target: + target_schema: catalog.schema + target_partition_column: "INFORMATION_DATE" diff --git a/tests/runner/transformations/config2.yaml b/tests/runner/transformations/config2.yaml index a91894b..f7b9604 100644 --- a/tests/runner/transformations/config2.yaml +++ b/tests/runner/transformations/config2.yaml @@ -12,12 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -general: - target_schema: catalog.schema - target_partition_column: "INFORMATION_DATE" +runner: watched_period_units: "weeks" watched_period_value: 5 - job: "run" # run/check mail: sender: test@testing.org smtp: server.test @@ -43,3 +40,6 @@ pipelines: units: "months" value: 1 date_col: "DATE" + target: + target_schema: catalog.schema + target_partition_column: "INFORMATION_DATE" diff --git a/tests/runner/transformations/simple_group.py b/tests/runner/transformations/simple_group.py index fcda5c7..ec2311c 100644 --- a/tests/runner/transformations/simple_group.py +++ b/tests/runner/transformations/simple_group.py @@ -18,6 +18,7 @@ from pyspark.sql.types import StructType from rialto.common import TableReader +from rialto.loader import PysparkFeatureLoader from rialto.metadata import MetadataManager from rialto.runner import Transformation @@ -28,7 +29,8 @@ def run( reader: TableReader, run_date: datetime.date, spark: SparkSession = None, - metadata_manager: MetadataManager = None, - dependencies: Dict = None, + config: Dict = None, + metadata: MetadataManager = None, + feature_loader: PysparkFeatureLoader = None, ) -> DataFrame: return spark.createDataFrame([], StructType([])) From 6b2831d81d57ed8c4c208dbfa8c0215a26e140f1 Mon Sep 17 00:00:00 2001 From: Marek Dobransky Date: Thu, 5 Sep 2024 10:15:16 +0200 Subject: [PATCH 07/10] renamed config, flatten structure (#13) * renamed config, flatten structure * readme --- README.md | 34 ++++++++++++--------- rialto/jobs/__init__.py | 2 +- rialto/jobs/{decorators => }/decorators.py | 16 +++++----- rialto/jobs/decorators/__init__.py | 15 --------- rialto/jobs/{decorators => }/job_base.py | 2 +- rialto/jobs/{decorators => }/resolver.py | 0 rialto/jobs/{decorators => }/test_utils.py | 14 ++++----- tests/jobs/resources.py | 2 +- tests/jobs/test_decorators.py | 4 +-- tests/jobs/test_job/dependency_tests_job.py | 4 +-- tests/jobs/test_job/test_job.py | 4 +-- tests/jobs/test_job_base.py | 2 +- tests/jobs/test_resolver.py | 6 ++-- tests/jobs/test_test_utils.py | 6 ++-- 14 files changed, 51 insertions(+), 60 deletions(-) rename rialto/jobs/{decorators => }/decorators.py (92%) delete mode 100644 rialto/jobs/decorators/__init__.py rename rialto/jobs/{decorators => }/job_base.py (98%) rename rialto/jobs/{decorators => }/resolver.py (100%) rename rialto/jobs/{decorators => }/test_utils.py (84%) diff --git a/README.md b/README.md index 2ac915f..f179dc6 100644 --- a/README.md +++ b/README.md @@ -85,8 +85,8 @@ pipelines: # a list of pipelines to run frequency: weekly # daily/weekly/monthly day: 7 # day of the week or month info_date_shift: #Optional shift in the written information date from the scheduled day - units: "days" # days/weeks/months/years - value: 5 # subtracted from scheduled day + - units: "days" # days/weeks/months/years + value: 5 # subtracted from scheduled day dependencies: # list of dependent tables - table: catalog.schema.table1 name: "table1" # Optional table name, used to recall dependency details in transformation @@ -372,7 +372,7 @@ With that sorted out, we can now provide a quick example of the *rialto.jobs* mo ```python from pyspark.sql import DataFrame from rialto.common import TableReader -from rialto.jobs.decorators import config, job, datasource +from rialto.jobs.decorators import config_parser, job, datasource from rialto.runner.config_loader import PipelineConfig from pydantic import BaseModel @@ -381,10 +381,12 @@ class ConfigModel(BaseModel): some_value: int some_other_value: str -@config + +@config_parser def my_config(config: PipelineConfig): return ConfigModel(**config.extras) + @datasource def my_datasource(run_date: datetime.date, table_reader: TableReader) -> DataFrame: return table_reader.get_latest("my_catalog.my_schema.my_table", date_until=run_date) @@ -442,44 +444,48 @@ Assuming we have a my_package.test_job_module.py module: ```python3 @datasource def datasource_a(...) - ... code ... + ... code... @job def my_job(datasource_a, ...) - ... code ... + ... code... ``` The *disable_job_decorators* context manager, as the name suggests, disables all decorator functionality and lets you access your functions as raw functions - making it super simple to unit-test: + ```python3 -from rialto.jobs.decorators.test_utils import disable_job_decorators +from rialto.jobs.test_utils import disable_job_decorators import my_package.test_job_module as tjm + # Datasource Testing def test_datasource_a(): - ... mocks here ... + ... mocks here... with disable_job_decorators(tjm): - datasource_a_output = tjm.datasource_a(... mocks ...) + datasource_a_output = tjm.datasource_a(...mocks...) + + ...asserts... - ... asserts ... # Job Testing def test_my_job(): datasource_a_mock = ... - ... other mocks... + ...other mocks... with disable_job_decorators(tjm): - job_output = tjm.my_job(datasource_a_mock, ... mocks ...) + job_output = tjm.my_job(datasource_a_mock, ...mocks...) - ... asserts ... + ...asserts... ``` #### 2. Testing the @job Dependency Tree In complex use cases, it may happen that the dependencies of a job become quite complex. Or you simply want to be sure that you didn't accidentally misspelled your dependency name: ```python3 -from rialto.jobs.decorators.test_utils import resolver_resolves +from rialto.jobs.test_utils import resolver_resolves import my_job.test_job_module as tjm + def test_my_job_resolves(spark): assert resolver_resolves(spark, tjm.my_job) ``` diff --git a/rialto/jobs/__init__.py b/rialto/jobs/__init__.py index a6ee6cb..0c3e01c 100644 --- a/rialto/jobs/__init__.py +++ b/rialto/jobs/__init__.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -from rialto.jobs.decorators import config, datasource, job +from rialto.jobs.decorators import config_parser, datasource, job diff --git a/rialto/jobs/decorators/decorators.py b/rialto/jobs/decorators.py similarity index 92% rename from rialto/jobs/decorators/decorators.py rename to rialto/jobs/decorators.py index d288b7b..dd79bdd 100644 --- a/rialto/jobs/decorators/decorators.py +++ b/rialto/jobs/decorators.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -__all__ = ["datasource", "job", "config"] +__all__ = ["datasource", "job", "config_parser"] import inspect import typing @@ -20,22 +20,22 @@ import importlib_metadata from loguru import logger -from rialto.jobs.decorators.job_base import JobBase -from rialto.jobs.decorators.resolver import Resolver +from rialto.jobs.job_base import JobBase +from rialto.jobs.resolver import Resolver -def config(ds_getter: typing.Callable) -> typing.Callable: +def config_parser(cf_getter: typing.Callable) -> typing.Callable: """ Config parser functions decorator. Registers a config parsing function into a rialto job prerequisite. You can then request the job via job function arguments. - :param ds_getter: dataset reader function - :return: raw reader function, unchanged + :param cf_getter: dataset reader function + :return: raw function, unchanged """ - Resolver.register_callable(ds_getter) - return ds_getter + Resolver.register_callable(cf_getter) + return cf_getter def datasource(ds_getter: typing.Callable) -> typing.Callable: diff --git a/rialto/jobs/decorators/__init__.py b/rialto/jobs/decorators/__init__.py deleted file mode 100644 index 6f2713a..0000000 --- a/rialto/jobs/decorators/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright 2022 ABSA Group Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .decorators import config, datasource, job diff --git a/rialto/jobs/decorators/job_base.py b/rialto/jobs/job_base.py similarity index 98% rename from rialto/jobs/decorators/job_base.py rename to rialto/jobs/job_base.py index d91537f..c65341d 100644 --- a/rialto/jobs/decorators/job_base.py +++ b/rialto/jobs/job_base.py @@ -24,7 +24,7 @@ from pyspark.sql import DataFrame, SparkSession from rialto.common import TableReader -from rialto.jobs.decorators.resolver import Resolver +from rialto.jobs.resolver import Resolver from rialto.loader import PysparkFeatureLoader from rialto.metadata import MetadataManager from rialto.runner import Transformation diff --git a/rialto/jobs/decorators/resolver.py b/rialto/jobs/resolver.py similarity index 100% rename from rialto/jobs/decorators/resolver.py rename to rialto/jobs/resolver.py diff --git a/rialto/jobs/decorators/test_utils.py b/rialto/jobs/test_utils.py similarity index 84% rename from rialto/jobs/decorators/test_utils.py rename to rialto/jobs/test_utils.py index 39d76ce..3f6e3e2 100644 --- a/rialto/jobs/decorators/test_utils.py +++ b/rialto/jobs/test_utils.py @@ -19,8 +19,8 @@ from contextlib import contextmanager from unittest.mock import MagicMock, create_autospec, patch -from rialto.jobs.decorators.job_base import JobBase -from rialto.jobs.decorators.resolver import Resolver, ResolverException +from rialto.jobs.job_base import JobBase +from rialto.jobs.resolver import Resolver, ResolverException def _passthrough_decorator(*args, **kwargs) -> typing.Callable: @@ -33,12 +33,12 @@ def _passthrough_decorator(*args, **kwargs) -> typing.Callable: @contextmanager def _disable_job_decorators() -> None: patches = [ + patch("rialto.jobs.datasource", _passthrough_decorator), patch("rialto.jobs.decorators.datasource", _passthrough_decorator), - patch("rialto.jobs.decorators.decorators.datasource", _passthrough_decorator), - patch("rialto.jobs.decorators.config", _passthrough_decorator), - patch("rialto.jobs.decorators.decorators.config", _passthrough_decorator), + patch("rialto.jobs.config_parser", _passthrough_decorator), + patch("rialto.jobs.decorators.config_parser", _passthrough_decorator), + patch("rialto.jobs.job", _passthrough_decorator), patch("rialto.jobs.decorators.job", _passthrough_decorator), - patch("rialto.jobs.decorators.decorators.job", _passthrough_decorator), ] for i in patches: @@ -101,7 +101,7 @@ def __getitem__(self, func_name): return fake_method - with patch("rialto.jobs.decorators.resolver.Resolver._storage", SmartStorage()): + with patch("rialto.jobs.resolver.Resolver._storage", SmartStorage()): job().run(reader=MagicMock(), run_date=MagicMock(), spark=spark) return True diff --git a/tests/jobs/resources.py b/tests/jobs/resources.py index 60fda7b..273bf38 100644 --- a/tests/jobs/resources.py +++ b/tests/jobs/resources.py @@ -15,7 +15,7 @@ import pandas as pd -from rialto.jobs.decorators.job_base import JobBase +from rialto.jobs.job_base import JobBase def custom_callable(): diff --git a/tests/jobs/test_decorators.py b/tests/jobs/test_decorators.py index 54cb4a4..a09ee69 100644 --- a/tests/jobs/test_decorators.py +++ b/tests/jobs/test_decorators.py @@ -14,8 +14,8 @@ from importlib import import_module -from rialto.jobs.decorators.job_base import JobBase -from rialto.jobs.decorators.resolver import Resolver +from rialto.jobs.job_base import JobBase +from rialto.jobs.resolver import Resolver def test_dataset_decorator(): diff --git a/tests/jobs/test_job/dependency_tests_job.py b/tests/jobs/test_job/dependency_tests_job.py index 38e10ba..7452d02 100644 --- a/tests/jobs/test_job/dependency_tests_job.py +++ b/tests/jobs/test_job/dependency_tests_job.py @@ -1,4 +1,4 @@ -from rialto.jobs.decorators import datasource, job +from rialto.jobs import datasource, job @datasource @@ -47,5 +47,5 @@ def missing_dependency_job(a, x): @job -def default_dependency_job(run_date, spark, config, table_reader, feature_loader): +def default_dependency_job(run_date, spark, config): return 1 diff --git a/tests/jobs/test_job/test_job.py b/tests/jobs/test_job/test_job.py index 3d648b5..4e47364 100644 --- a/tests/jobs/test_job/test_job.py +++ b/tests/jobs/test_job/test_job.py @@ -11,10 +11,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from rialto.jobs.decorators import config, datasource, job +from rialto.jobs import config_parser, datasource, job -@config +@config_parser def custom_config(): return "config_return" diff --git a/tests/jobs/test_job_base.py b/tests/jobs/test_job_base.py index 55fced1..2fb01ea 100644 --- a/tests/jobs/test_job_base.py +++ b/tests/jobs/test_job_base.py @@ -19,7 +19,7 @@ import pyspark.sql.types import tests.jobs.resources as resources -from rialto.jobs.decorators.resolver import Resolver +from rialto.jobs.resolver import Resolver from rialto.loader import PysparkFeatureLoader diff --git a/tests/jobs/test_resolver.py b/tests/jobs/test_resolver.py index df56b72..c6ccdb0 100644 --- a/tests/jobs/test_resolver.py +++ b/tests/jobs/test_resolver.py @@ -13,7 +13,7 @@ # limitations under the License. import pytest -from rialto.jobs.decorators.resolver import Resolver, ResolverException +from rialto.jobs.resolver import Resolver, ResolverException def test_simple_resolve_custom_name(): @@ -56,8 +56,8 @@ def test_register_resolve(mocker): def f(): return 7 - mocker.patch("rialto.jobs.decorators.resolver.Resolver.register_callable", return_value="f") - mocker.patch("rialto.jobs.decorators.resolver.Resolver.resolve") + mocker.patch("rialto.jobs.resolver.Resolver.register_callable", return_value="f") + mocker.patch("rialto.jobs.resolver.Resolver.resolve") Resolver.register_resolve(f) diff --git a/tests/jobs/test_test_utils.py b/tests/jobs/test_test_utils.py index 63884b4..e6ef9da 100644 --- a/tests/jobs/test_test_utils.py +++ b/tests/jobs/test_test_utils.py @@ -14,10 +14,10 @@ import pytest import rialto.jobs.decorators as decorators -import tests.jobs.test_job.test_job as test_job import tests.jobs.test_job.dependency_tests_job as dependency_tests_job -from rialto.jobs.decorators.resolver import Resolver -from rialto.jobs.decorators.test_utils import disable_job_decorators, resolver_resolves +import tests.jobs.test_job.test_job as test_job +from rialto.jobs.resolver import Resolver +from rialto.jobs.test_utils import disable_job_decorators, resolver_resolves def test_raw_dataset_patch(mocker): From 0defd47090f8784d4478db8176019f5d9d23dcd4 Mon Sep 17 00:00:00 2001 From: Marek Dobransky Date: Thu, 5 Sep 2024 14:48:26 +0200 Subject: [PATCH 08/10] custom env loader (#14) * custom env loader * allow for env in the middle of string --- .flake8 | 1 + pyproject.toml | 1 - rialto/common/env_yaml.py | 35 +++++++++++++++++ rialto/common/utils.py | 3 +- tests/common/test_yaml.py | 81 +++++++++++++++++++++++++++++++++++++++ 5 files changed, 119 insertions(+), 2 deletions(-) create mode 100644 rialto/common/env_yaml.py create mode 100644 tests/common/test_yaml.py diff --git a/.flake8 b/.flake8 index 21099b7..c2cf6c9 100644 --- a/.flake8 +++ b/.flake8 @@ -14,3 +14,4 @@ extend-ignore = D100, D104, D107, + E203, diff --git a/pyproject.toml b/pyproject.toml index 5812612..23aa34e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,6 @@ pandas = "^2.1.0" flake8-broken-line = "^1.0.0" loguru = "^0.7.2" importlib-metadata = "^7.2.1" -env_yaml = "^0.0.3" [tool.poetry.dev-dependencies] pyspark = "^3.4.1" diff --git a/rialto/common/env_yaml.py b/rialto/common/env_yaml.py new file mode 100644 index 0000000..e92bf67 --- /dev/null +++ b/rialto/common/env_yaml.py @@ -0,0 +1,35 @@ +import os +import re + +import yaml +from loguru import logger + +__all__ = ["EnvLoader"] + +# Regex pattern to capture variable and the rest of the string +_path_matcher = re.compile(r"(?P.*)\$\{(?P[^}^{:]+)(?::(?P[^}^{]*))?\}(?P.*)") + + +def _path_constructor(loader, node): + value = node.value + match = _path_matcher.search(value) + if match: + before = match.group("before") + after = match.group("after") + sub = os.getenv(match.group("env_name"), match.group("default_value")) + if sub is None: + raise ValueError(f"Environment variable {match.group('env_name')} has no assigned value") + new_value = before + sub + after + logger.info(f"Config: Replacing {value}, with {new_value}") + return new_value + return value + + +class EnvLoader(yaml.SafeLoader): + """Custom loader that replaces values with environment variables""" + + pass + + +EnvLoader.add_implicit_resolver("!env_substitute", _path_matcher, None) +EnvLoader.add_constructor("!env_substitute", _path_constructor) diff --git a/rialto/common/utils.py b/rialto/common/utils.py index b2e19b4..6f5ed1f 100644 --- a/rialto/common/utils.py +++ b/rialto/common/utils.py @@ -19,10 +19,11 @@ import pyspark.sql.functions as F import yaml -from env_yaml import EnvLoader from pyspark.sql import DataFrame from pyspark.sql.types import FloatType +from rialto.common.env_yaml import EnvLoader + def load_yaml(path: str) -> Any: """ diff --git a/tests/common/test_yaml.py b/tests/common/test_yaml.py new file mode 100644 index 0000000..9d63b66 --- /dev/null +++ b/tests/common/test_yaml.py @@ -0,0 +1,81 @@ +import os + +import pytest +import yaml + +from rialto.common.env_yaml import EnvLoader + + +def test_plain(): + data = {"a": "string_value", "b": 2} + cfg = """ + a: string_value + b: 2 + """ + assert yaml.load(cfg, EnvLoader) == data + + +def test_full_sub_default(): + data = {"a": "default_value", "b": 2} + cfg = """ + a: ${EMPTY_VAR:default_value} + b: 2 + """ + assert yaml.load(cfg, EnvLoader) == data + + +def test_full_sub_env(): + os.environ["FILLED_VAR"] = "env_value" + data = {"a": "env_value", "b": 2} + cfg = """ + a: ${FILLED_VAR:default_value} + b: 2 + """ + assert yaml.load(cfg, EnvLoader) == data + + +def test_partial_sub_start(): + data = {"a": "start_string", "b": 2} + cfg = """ + a: ${START_VAR:start}_string + b: 2 + """ + assert yaml.load(cfg, EnvLoader) == data + + +def test_partial_sub_end(): + data = {"a": "string_end", "b": 2} + cfg = """ + a: string_${END_VAR:end} + b: 2 + """ + assert yaml.load(cfg, EnvLoader) == data + + +def test_partial_sub_mid(): + data = {"a": "string_mid_sub", "b": 2} + cfg = """ + a: string_${MID_VAR:mid}_sub + b: 2 + """ + assert yaml.load(cfg, EnvLoader) == data + + +def test_partial_sub_no_default_no_value(): + with pytest.raises(Exception) as e: + cfg = """ + a: string_${MANDATORY_VAL_MISSING}_sub + b: 2 + """ + assert yaml.load(cfg, EnvLoader) + assert str(e.value) == "Environment variable MANDATORY_VAL_MISSING has no assigned value" + + +def test_partial_sub_no_default(): + os.environ["MANDATORY_VAL"] = "mandatory_value" + data = {"a": "string_mandatory_value_sub", "b": 2} + cfg = """ + a: string_${MANDATORY_VAL}_sub + b: 2 + """ + assert yaml.load(cfg, EnvLoader) == data From b25a56242c9ab1b18c9b1d910bd06ebcfbc301c7 Mon Sep 17 00:00:00 2001 From: Marek Dobransky Date: Thu, 12 Sep 2024 15:46:21 +0200 Subject: [PATCH 09/10] added dependency finder util --- rialto/runner/utils.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/rialto/runner/utils.py b/rialto/runner/utils.py index b74ec1b..5af1723 100644 --- a/rialto/runner/utils.py +++ b/rialto/runner/utils.py @@ -1,3 +1,19 @@ +# Copyright 2022 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +__all__ = ["load_module", "table_exists", "get_partitions", "init_tools", "find_dependency"] + from datetime import date from importlib import import_module from typing import List, Tuple @@ -72,3 +88,17 @@ def init_tools(spark: SparkSession, pipeline: PipelineConfig) -> Tuple[MetadataM else: feature_loader = None return metadata_manager, feature_loader + + +def find_dependency(config: PipelineConfig, name: str): + """ + Get dependency from config + + :param config: Pipeline configuration + :param name: Dependency name + :return: Dependency object + """ + for dep in config.dependencies: + if dep.name == name: + return dep + return None From cf2a12cb2a914221788e8944f5bdf917392eebbe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Vladislav=20Vanc=C3=A1k?= Date: Sat, 21 Sep 2024 21:01:51 +0200 Subject: [PATCH 10/10] Module Register rework of dependency registration (#15) * Module Register * Resolver Reworked * Overlapping function names prevention * Documentation * consitent arguments --------- Co-authored-by: Marek Dobransky --- CHANGELOG.md | 4 + README.md | 29 +- poetry.lock | 719 +++++++++--------- pyproject.toml | 3 +- rialto/common/utils.py | 24 +- rialto/jobs/__init__.py | 4 + rialto/jobs/decorators.py | 18 +- rialto/jobs/job_base.py | 79 +- rialto/jobs/module_register.py | 121 +++ rialto/jobs/resolver.py | 86 +-- rialto/jobs/test_utils.py | 62 +- .../complex_dependency_job.py | 26 + .../dependency_checks_job/datasources_a.py | 16 + .../dependency_checks_job/datasources_b.py | 9 + .../dependency_checks_job/datasources_c.py | 11 + .../dependency_checks_job.py | 32 + .../duplicate_dependency_job.py | 15 + .../dependency_checks_job/main_datasources.py | 37 + tests/jobs/resources.py | 15 +- tests/jobs/test_decorators.py | 10 +- tests/jobs/test_job/dependency_tests_job.py | 51 -- tests/jobs/test_job_base.py | 19 +- tests/jobs/test_resolver.py | 55 +- tests/jobs/test_test_utils.py | 63 +- 24 files changed, 904 insertions(+), 604 deletions(-) create mode 100644 rialto/jobs/module_register.py create mode 100644 tests/jobs/dependency_checks_job/complex_dependency_job.py create mode 100644 tests/jobs/dependency_checks_job/datasources_a.py create mode 100644 tests/jobs/dependency_checks_job/datasources_b.py create mode 100644 tests/jobs/dependency_checks_job/datasources_c.py create mode 100644 tests/jobs/dependency_checks_job/dependency_checks_job.py create mode 100644 tests/jobs/dependency_checks_job/duplicate_dependency_job.py create mode 100644 tests/jobs/dependency_checks_job/main_datasources.py delete mode 100644 tests/jobs/test_job/dependency_tests_job.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 63e9791..e6eee74 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,10 @@ All notable changes to this project will be documented in this file. - config holder removed from jobs - metadata_manager and feature_loader are now available arguments, depending on configuration - added @config decorator, similar use case to @datasource, for parsing configuration + - reworked Resolver + Added ModuleRegister + - datasources no longer just by importing, thus are no longer available for all jobs + - register_dependency_callable and register_dependency_module added to register datasources + - together, it's now possilbe to have 2 datasources with the same name, but different implementations for 2 jobs. #### TableReader - function signatures changed - until -> date_until diff --git a/README.md b/README.md index f179dc6..6264534 100644 --- a/README.md +++ b/README.md @@ -372,7 +372,7 @@ With that sorted out, we can now provide a quick example of the *rialto.jobs* mo ```python from pyspark.sql import DataFrame from rialto.common import TableReader -from rialto.jobs.decorators import config_parser, job, datasource +from rialto.jobs import config_parser, job, datasource from rialto.runner.config_loader import PipelineConfig from pydantic import BaseModel @@ -419,7 +419,6 @@ If you want to disable versioning of your job (adding package VERSION column to def my_job(...): ... ``` - These parameters can be used separately, or combined. ### Notes & Rules @@ -435,6 +434,32 @@ This can be useful in **model training**. Finally, remember, that your jobs are still just *Rialto Transformations* internally. Meaning that at the end of the day, you should always read some data, do some operations on it and either return a pyspark DataFrame, or not return anything and let the framework return the placeholder one. + +### Importing / Registering Datasources +Datasources required for a job (or another datasource) can be defined in a different module. +To register your module as a datasource, you can use the following functions: + +```python3 +from rialto.jobs import register_dependency_callable, register_dependency_module +import my_package.my_datasources as md +import my_package.my_datasources_big as big_md + +# Register an entire dependency module +register_dependency_module(md) + +# Register a single datasource from a bigger module +register_dependency_callable(big_md.sample_datasource) + +@job +def my_job(my_datasource, sample_datasource: DataFrame, ...): + ... +``` + +Each job/datasource can only resolve datasources it has defined as dependencies. + +**NOTE**: While ```register_dependency_module``` only registers a module as available dependencies, the ```register_dependency_callable``` actually brings the datasource into the targed module - and thus becomes available for export in the dependency chains. + + ### Testing One of the main advantages of the jobs module is simplification of unit tests for your transformations. Rialto provides following tools: diff --git a/poetry.lock b/poetry.lock index 66ca41b..d077273 100644 --- a/poetry.lock +++ b/poetry.lock @@ -24,13 +24,13 @@ files = [ [[package]] name = "babel" -version = "2.15.0" +version = "2.16.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" files = [ - {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, - {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, + {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, + {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, ] [package.extras] @@ -84,13 +84,13 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2024.7.4" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] @@ -241,63 +241,83 @@ files = [ [[package]] name = "coverage" -version = "7.6.0" +version = "7.6.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dff044f661f59dace805eedb4a7404c573b6ff0cdba4a524141bc63d7be5c7fd"}, - {file = "coverage-7.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8659fd33ee9e6ca03950cfdcdf271d645cf681609153f218826dd9805ab585c"}, - {file = "coverage-7.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7792f0ab20df8071d669d929c75c97fecfa6bcab82c10ee4adb91c7a54055463"}, - {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b3cd1ca7cd73d229487fa5caca9e4bc1f0bca96526b922d61053ea751fe791"}, - {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7e128f85c0b419907d1f38e616c4f1e9f1d1b37a7949f44df9a73d5da5cd53c"}, - {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a94925102c89247530ae1dab7dc02c690942566f22e189cbd53579b0693c0783"}, - {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dcd070b5b585b50e6617e8972f3fbbee786afca71b1936ac06257f7e178f00f6"}, - {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d50a252b23b9b4dfeefc1f663c568a221092cbaded20a05a11665d0dbec9b8fb"}, - {file = "coverage-7.6.0-cp310-cp310-win32.whl", hash = "sha256:0e7b27d04131c46e6894f23a4ae186a6a2207209a05df5b6ad4caee6d54a222c"}, - {file = "coverage-7.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dece71673b3187c86226c3ca793c5f891f9fc3d8aa183f2e3653da18566169"}, - {file = "coverage-7.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7b525ab52ce18c57ae232ba6f7010297a87ced82a2383b1afd238849c1ff933"}, - {file = "coverage-7.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bea27c4269234e06f621f3fac3925f56ff34bc14521484b8f66a580aacc2e7d"}, - {file = "coverage-7.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed8d1d1821ba5fc88d4a4f45387b65de52382fa3ef1f0115a4f7a20cdfab0e94"}, - {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c322ef2bbe15057bc4bf132b525b7e3f7206f071799eb8aa6ad1940bcf5fb1"}, - {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03cafe82c1b32b770a29fd6de923625ccac3185a54a5e66606da26d105f37dac"}, - {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0d1b923fc4a40c5832be4f35a5dab0e5ff89cddf83bb4174499e02ea089daf57"}, - {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4b03741e70fb811d1a9a1d75355cf391f274ed85847f4b78e35459899f57af4d"}, - {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a73d18625f6a8a1cbb11eadc1d03929f9510f4131879288e3f7922097a429f63"}, - {file = "coverage-7.6.0-cp311-cp311-win32.whl", hash = "sha256:65fa405b837060db569a61ec368b74688f429b32fa47a8929a7a2f9b47183713"}, - {file = "coverage-7.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:6379688fb4cfa921ae349c76eb1a9ab26b65f32b03d46bb0eed841fd4cb6afb1"}, - {file = "coverage-7.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f7db0b6ae1f96ae41afe626095149ecd1b212b424626175a6633c2999eaad45b"}, - {file = "coverage-7.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bbdf9a72403110a3bdae77948b8011f644571311c2fb35ee15f0f10a8fc082e8"}, - {file = "coverage-7.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc44bf0315268e253bf563f3560e6c004efe38f76db03a1558274a6e04bf5d5"}, - {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da8549d17489cd52f85a9829d0e1d91059359b3c54a26f28bec2c5d369524807"}, - {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0086cd4fc71b7d485ac93ca4239c8f75732c2ae3ba83f6be1c9be59d9e2c6382"}, - {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fad32ee9b27350687035cb5fdf9145bc9cf0a094a9577d43e909948ebcfa27b"}, - {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:044a0985a4f25b335882b0966625270a8d9db3d3409ddc49a4eb00b0ef5e8cee"}, - {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:76d5f82213aa78098b9b964ea89de4617e70e0d43e97900c2778a50856dac605"}, - {file = "coverage-7.6.0-cp312-cp312-win32.whl", hash = "sha256:3c59105f8d58ce500f348c5b56163a4113a440dad6daa2294b5052a10db866da"}, - {file = "coverage-7.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca5d79cfdae420a1d52bf177de4bc2289c321d6c961ae321503b2ca59c17ae67"}, - {file = "coverage-7.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d39bd10f0ae453554798b125d2f39884290c480f56e8a02ba7a6ed552005243b"}, - {file = "coverage-7.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:beb08e8508e53a568811016e59f3234d29c2583f6b6e28572f0954a6b4f7e03d"}, - {file = "coverage-7.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2e16f4cd2bc4d88ba30ca2d3bbf2f21f00f382cf4e1ce3b1ddc96c634bc48ca"}, - {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6616d1c9bf1e3faea78711ee42a8b972367d82ceae233ec0ac61cc7fec09fa6b"}, - {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4567d6c334c46046d1c4c20024de2a1c3abc626817ae21ae3da600f5779b44"}, - {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d17c6a415d68cfe1091d3296ba5749d3d8696e42c37fca5d4860c5bf7b729f03"}, - {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9146579352d7b5f6412735d0f203bbd8d00113a680b66565e205bc605ef81bc6"}, - {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cdab02a0a941af190df8782aafc591ef3ad08824f97850b015c8c6a8b3877b0b"}, - {file = "coverage-7.6.0-cp38-cp38-win32.whl", hash = "sha256:df423f351b162a702c053d5dddc0fc0ef9a9e27ea3f449781ace5f906b664428"}, - {file = "coverage-7.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f2501d60d7497fd55e391f423f965bbe9e650e9ffc3c627d5f0ac516026000b8"}, - {file = "coverage-7.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7221f9ac9dad9492cecab6f676b3eaf9185141539d5c9689d13fd6b0d7de840c"}, - {file = "coverage-7.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ddaaa91bfc4477d2871442bbf30a125e8fe6b05da8a0015507bfbf4718228ab2"}, - {file = "coverage-7.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cbe651f3904e28f3a55d6f371203049034b4ddbce65a54527a3f189ca3b390"}, - {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831b476d79408ab6ccfadaaf199906c833f02fdb32c9ab907b1d4aa0713cfa3b"}, - {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46c3d091059ad0b9c59d1034de74a7f36dcfa7f6d3bde782c49deb42438f2450"}, - {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4d5fae0a22dc86259dee66f2cc6c1d3e490c4a1214d7daa2a93d07491c5c04b6"}, - {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:07ed352205574aad067482e53dd606926afebcb5590653121063fbf4e2175166"}, - {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:49c76cdfa13015c4560702574bad67f0e15ca5a2872c6a125f6327ead2b731dd"}, - {file = "coverage-7.6.0-cp39-cp39-win32.whl", hash = "sha256:482855914928c8175735a2a59c8dc5806cf7d8f032e4820d52e845d1f731dca2"}, - {file = "coverage-7.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:543ef9179bc55edfd895154a51792b01c017c87af0ebaae092720152e19e42ca"}, - {file = "coverage-7.6.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:6fe885135c8a479d3e37a7aae61cbd3a0fb2deccb4dda3c25f92a49189f766d6"}, - {file = "coverage-7.6.0.tar.gz", hash = "sha256:289cc803fa1dc901f84701ac10c9ee873619320f2f9aff38794db4a4a0268d51"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, ] [package.dependencies] @@ -343,20 +363,6 @@ files = [ {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, ] -[[package]] -name = "env-yaml" -version = "0.0.3" -description = "Provides a yaml loader which substitutes environment variables and supports defaults" -optional = false -python-versions = "*" -files = [ - {file = "env-yaml-0.0.3.tar.gz", hash = "sha256:b6b55b18c28fb623793137a8e55bd666d6483af7fd0162a41a62325ce662fda6"}, - {file = "env_yaml-0.0.3-py3-none-any.whl", hash = "sha256:f56723c8997bea1240bf634b9e29832714dd9745a42cbc2649f1238a6a576244"}, -] - -[package.dependencies] -pyyaml = ">=6.0" - [[package]] name = "exceptiongroup" version = "1.2.2" @@ -373,19 +379,19 @@ test = ["pytest (>=6)"] [[package]] name = "filelock" -version = "3.15.4" +version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, - {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] -typing = ["typing-extensions (>=4.8)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "flake8" @@ -434,13 +440,13 @@ pydocstyle = ">=2.1" [[package]] name = "identify" -version = "2.6.0" +version = "2.6.1" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, - {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, + {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"}, + {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"}, ] [package.extras] @@ -448,15 +454,18 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.7" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "imagesize" version = "1.4.1" @@ -662,56 +671,47 @@ files = [ [[package]] name = "numpy" -version = "2.0.1" +version = "1.26.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" files = [ - {file = "numpy-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fbb536eac80e27a2793ffd787895242b7f18ef792563d742c2d673bfcb75134"}, - {file = "numpy-2.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:69ff563d43c69b1baba77af455dd0a839df8d25e8590e79c90fcbe1499ebde42"}, - {file = "numpy-2.0.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:1b902ce0e0a5bb7704556a217c4f63a7974f8f43e090aff03fcf262e0b135e02"}, - {file = "numpy-2.0.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:f1659887361a7151f89e79b276ed8dff3d75877df906328f14d8bb40bb4f5101"}, - {file = "numpy-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4658c398d65d1b25e1760de3157011a80375da861709abd7cef3bad65d6543f9"}, - {file = "numpy-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4127d4303b9ac9f94ca0441138acead39928938660ca58329fe156f84b9f3015"}, - {file = "numpy-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e5eeca8067ad04bc8a2a8731183d51d7cbaac66d86085d5f4766ee6bf19c7f87"}, - {file = "numpy-2.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9adbd9bb520c866e1bfd7e10e1880a1f7749f1f6e5017686a5fbb9b72cf69f82"}, - {file = "numpy-2.0.1-cp310-cp310-win32.whl", hash = "sha256:7b9853803278db3bdcc6cd5beca37815b133e9e77ff3d4733c247414e78eb8d1"}, - {file = "numpy-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:81b0893a39bc5b865b8bf89e9ad7807e16717f19868e9d234bdaf9b1f1393868"}, - {file = "numpy-2.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75b4e316c5902d8163ef9d423b1c3f2f6252226d1aa5cd8a0a03a7d01ffc6268"}, - {file = "numpy-2.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6e4eeb6eb2fced786e32e6d8df9e755ce5be920d17f7ce00bc38fcde8ccdbf9e"}, - {file = "numpy-2.0.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:a1e01dcaab205fbece13c1410253a9eea1b1c9b61d237b6fa59bcc46e8e89343"}, - {file = "numpy-2.0.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:a8fc2de81ad835d999113ddf87d1ea2b0f4704cbd947c948d2f5513deafe5a7b"}, - {file = "numpy-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a3d94942c331dd4e0e1147f7a8699a4aa47dffc11bf8a1523c12af8b2e91bbe"}, - {file = "numpy-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15eb4eca47d36ec3f78cde0a3a2ee24cf05ca7396ef808dda2c0ddad7c2bde67"}, - {file = "numpy-2.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b83e16a5511d1b1f8a88cbabb1a6f6a499f82c062a4251892d9ad5d609863fb7"}, - {file = "numpy-2.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f87fec1f9bc1efd23f4227becff04bd0e979e23ca50cc92ec88b38489db3b55"}, - {file = "numpy-2.0.1-cp311-cp311-win32.whl", hash = "sha256:36d3a9405fd7c511804dc56fc32974fa5533bdeb3cd1604d6b8ff1d292b819c4"}, - {file = "numpy-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:08458fbf403bff5e2b45f08eda195d4b0c9b35682311da5a5a0a0925b11b9bd8"}, - {file = "numpy-2.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bf4e6f4a2a2e26655717a1983ef6324f2664d7011f6ef7482e8c0b3d51e82ac"}, - {file = "numpy-2.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6fddc5fe258d3328cd8e3d7d3e02234c5d70e01ebe377a6ab92adb14039cb4"}, - {file = "numpy-2.0.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5daab361be6ddeb299a918a7c0864fa8618af66019138263247af405018b04e1"}, - {file = "numpy-2.0.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:ea2326a4dca88e4a274ba3a4405eb6c6467d3ffbd8c7d38632502eaae3820587"}, - {file = "numpy-2.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529af13c5f4b7a932fb0e1911d3a75da204eff023ee5e0e79c1751564221a5c8"}, - {file = "numpy-2.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6790654cb13eab303d8402354fabd47472b24635700f631f041bd0b65e37298a"}, - {file = "numpy-2.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cbab9fc9c391700e3e1287666dfd82d8666d10e69a6c4a09ab97574c0b7ee0a7"}, - {file = "numpy-2.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99d0d92a5e3613c33a5f01db206a33f8fdf3d71f2912b0de1739894668b7a93b"}, - {file = "numpy-2.0.1-cp312-cp312-win32.whl", hash = "sha256:173a00b9995f73b79eb0191129f2455f1e34c203f559dd118636858cc452a1bf"}, - {file = "numpy-2.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:bb2124fdc6e62baae159ebcfa368708867eb56806804d005860b6007388df171"}, - {file = "numpy-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bfc085b28d62ff4009364e7ca34b80a9a080cbd97c2c0630bb5f7f770dae9414"}, - {file = "numpy-2.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8fae4ebbf95a179c1156fab0b142b74e4ba4204c87bde8d3d8b6f9c34c5825ef"}, - {file = "numpy-2.0.1-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:72dc22e9ec8f6eaa206deb1b1355eb2e253899d7347f5e2fae5f0af613741d06"}, - {file = "numpy-2.0.1-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:ec87f5f8aca726117a1c9b7083e7656a9d0d606eec7299cc067bb83d26f16e0c"}, - {file = "numpy-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f682ea61a88479d9498bf2091fdcd722b090724b08b31d63e022adc063bad59"}, - {file = "numpy-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8efc84f01c1cd7e34b3fb310183e72fcdf55293ee736d679b6d35b35d80bba26"}, - {file = "numpy-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3fdabe3e2a52bc4eff8dc7a5044342f8bd9f11ef0934fcd3289a788c0eb10018"}, - {file = "numpy-2.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:24a0e1befbfa14615b49ba9659d3d8818a0f4d8a1c5822af8696706fbda7310c"}, - {file = "numpy-2.0.1-cp39-cp39-win32.whl", hash = "sha256:f9cf5ea551aec449206954b075db819f52adc1638d46a6738253a712d553c7b4"}, - {file = "numpy-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:e9e81fa9017eaa416c056e5d9e71be93d05e2c3c2ab308d23307a8bc4443c368"}, - {file = "numpy-2.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:61728fba1e464f789b11deb78a57805c70b2ed02343560456190d0501ba37b0f"}, - {file = "numpy-2.0.1-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:12f5d865d60fb9734e60a60f1d5afa6d962d8d4467c120a1c0cda6eb2964437d"}, - {file = "numpy-2.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eacf3291e263d5a67d8c1a581a8ebbcfd6447204ef58828caf69a5e3e8c75990"}, - {file = "numpy-2.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2c3a346ae20cfd80b6cfd3e60dc179963ef2ea58da5ec074fd3d9e7a1e7ba97f"}, - {file = "numpy-2.0.1.tar.gz", hash = "sha256:485b87235796410c3519a699cfe1faab097e509e90ebb05dcd098db2ae87e7b3"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] [[package]] @@ -727,47 +727,60 @@ files = [ [[package]] name = "pandas" -version = "2.2.2" +version = "2.2.3" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, - {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, - {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, - {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, - {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, - {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, - {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, + {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, + {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, ] [package.dependencies] numpy = [ + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, {version = ">=1.22.4", markers = "python_version < \"3.11\""}, {version = ">=1.23.2", markers = "python_version == \"3.11\""}, - {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -836,19 +849,19 @@ flake8 = ">=5.0.0" [[package]] name = "platformdirs" -version = "4.2.2" +version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" @@ -867,13 +880,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "3.7.1" +version = "3.8.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.9" files = [ - {file = "pre_commit-3.7.1-py2.py3-none-any.whl", hash = "sha256:fae36fd1d7ad7d6a5a1c0b0d5adb2ed1a3bda5a21bf6c3e5372073d7a11cd4c5"}, - {file = "pre_commit-3.7.1.tar.gz", hash = "sha256:8ca3ad567bc78a4972a3f1a477e94a79d4597e8140a6e0b651c5e33899c3654a"}, + {file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"}, + {file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"}, ] [package.dependencies] @@ -907,122 +920,123 @@ files = [ [[package]] name = "pydantic" -version = "2.8.2" +version = "2.9.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, - {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, + {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, + {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, ] [package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.20.1" +annotated-types = ">=0.6.0" +pydantic-core = "2.23.4" typing-extensions = [ - {version = ">=4.6.1", markers = "python_version < \"3.13\""}, {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, ] [package.extras] email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.20.1" +version = "2.23.4" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, - {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, - {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, - {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, - {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, - {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, - {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, - {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, - {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, - {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, - {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, - {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, - {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, - {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, + {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, + {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, + {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, + {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, + {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, + {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, + {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, + {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, + {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, + {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, + {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, + {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, + {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, ] [package.dependencies] @@ -1163,62 +1177,75 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2024.1" +version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, ] [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] @@ -1339,49 +1366,49 @@ dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.8" +version = "2.0.0" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"}, - {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"}, + {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, + {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-devhelp" -version = "1.0.6" +version = "2.0.0" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"}, - {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"}, + {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, + {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.6" +version = "2.1.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_htmlhelp-2.0.6-py3-none-any.whl", hash = "sha256:1b9af5a2671a61410a868fce050cab7ca393c218e6205cbc7f590136f207395c"}, - {file = "sphinxcontrib_htmlhelp-2.0.6.tar.gz", hash = "sha256:c6597da06185f0e3b4dc952777a04200611ef563882e0c244d27a15ee22afa73"}, + {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, + {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["html5lib", "pytest"] @@ -1415,33 +1442,33 @@ test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" -version = "1.0.8" +version = "2.0.0" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_qthelp-1.0.8-py3-none-any.whl", hash = "sha256:323d6acc4189af76dfe94edd2a27d458902319b60fcca2aeef3b2180c106a75f"}, - {file = "sphinxcontrib_qthelp-1.0.8.tar.gz", hash = "sha256:db3f8fa10789c7a8e76d173c23364bdf0ebcd9449969a9e6a3dd31b8b7469f03"}, + {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, + {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["defusedxml (>=0.7.1)", "pytest"] [[package]] name = "sphinxcontrib-serializinghtml" -version = "1.1.10" +version = "2.0.0" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"}, - {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"}, + {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, + {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["pytest"] @@ -1480,13 +1507,13 @@ files = [ [[package]] name = "urllib3" -version = "2.2.2" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] @@ -1497,13 +1524,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.26.3" +version = "20.26.5" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, - {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, + {file = "virtualenv-20.26.5-py3-none-any.whl", hash = "sha256:4f3ac17b81fba3ce3bd6f4ead2749a72da5929c01774948e243db9ba41df4ff6"}, + {file = "virtualenv-20.26.5.tar.gz", hash = "sha256:ce489cac131aa58f4b25e321d6d186171f78e6cb13fafbf32a840cee67733ff4"}, ] [package.dependencies] @@ -1531,20 +1558,24 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [[package]] name = "zipp" -version = "3.19.2" +version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, ] [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<4.0" -content-hash = "6e87c6539147b57b03fb983b28d15396c2eccfe95661805eda7d9f77602d1f58" +content-hash = "2a29db2488b261a68d0deb2c353b79e6a8b437e5efb9d7f8a71a65cdf1ad49ee" diff --git a/pyproject.toml b/pyproject.toml index 23aa34e..230a48f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [tool.poetry] -name = "rialto-dev" +name = "rialto" version = "2.0.0" @@ -31,6 +31,7 @@ pandas = "^2.1.0" flake8-broken-line = "^1.0.0" loguru = "^0.7.2" importlib-metadata = "^7.2.1" +numpy = "<2.0.0" [tool.poetry.dev-dependencies] pyspark = "^3.4.1" diff --git a/rialto/common/utils.py b/rialto/common/utils.py index 6f5ed1f..296cba8 100644 --- a/rialto/common/utils.py +++ b/rialto/common/utils.py @@ -12,10 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -__all__ = ["load_yaml"] +__all__ = ["load_yaml", "cast_decimals_to_floats", "get_caller_module"] +import inspect import os -from typing import Any +from typing import Any, List import pyspark.sql.functions as F import yaml @@ -51,3 +52,22 @@ def cast_decimals_to_floats(df: DataFrame) -> DataFrame: df = df.withColumn(c, F.col(c).cast(FloatType())) return df + + +def get_caller_module() -> Any: + """ + Ged module containing the function which is calling your function. + + Inspects the call stack, where: + 0th entry is this function + 1st entry is the function which needs to know who called it + 2nd entry is the calling function + + Therefore, we'll return a module which contains the function at the 2nd place on the stack. + + :return: Python Module containing the calling function. + """ + + stack = inspect.stack() + last_stack = stack[2] + return inspect.getmodule(last_stack[0]) diff --git a/rialto/jobs/__init__.py b/rialto/jobs/__init__.py index 0c3e01c..46eb756 100644 --- a/rialto/jobs/__init__.py +++ b/rialto/jobs/__init__.py @@ -13,3 +13,7 @@ # limitations under the License. from rialto.jobs.decorators import config_parser, datasource, job +from rialto.jobs.module_register import ( + register_dependency_callable, + register_dependency_module, +) diff --git a/rialto/jobs/decorators.py b/rialto/jobs/decorators.py index dd79bdd..68bb58f 100644 --- a/rialto/jobs/decorators.py +++ b/rialto/jobs/decorators.py @@ -14,14 +14,14 @@ __all__ = ["datasource", "job", "config_parser"] -import inspect import typing import importlib_metadata from loguru import logger +from rialto.common.utils import get_caller_module from rialto.jobs.job_base import JobBase -from rialto.jobs.resolver import Resolver +from rialto.jobs.module_register import ModuleRegister def config_parser(cf_getter: typing.Callable) -> typing.Callable: @@ -34,7 +34,7 @@ def config_parser(cf_getter: typing.Callable) -> typing.Callable: :param cf_getter: dataset reader function :return: raw function, unchanged """ - Resolver.register_callable(cf_getter) + ModuleRegister.register_callable(cf_getter) return cf_getter @@ -48,16 +48,10 @@ def datasource(ds_getter: typing.Callable) -> typing.Callable: :param ds_getter: dataset reader function :return: raw reader function, unchanged """ - Resolver.register_callable(ds_getter) + ModuleRegister.register_callable(ds_getter) return ds_getter -def _get_module(stack: typing.List) -> typing.Any: - last_stack = stack[1] - mod = inspect.getmodule(last_stack[0]) - return mod - - def _get_version(module: typing.Any) -> str: try: package_name, _, _ = module.__name__.partition(".") @@ -102,9 +96,7 @@ def job(*args, custom_name=None, disable_version=False): :return: One more job wrapper for run function (if custom name or version override specified). Otherwise, generates Rialto Transformation Type and returns it for in-module registration. """ - stack = inspect.stack() - - module = _get_module(stack) + module = get_caller_module() version = _get_version(module) # Use case where it's just raw @f. Otherwise, we get [] here. diff --git a/rialto/jobs/job_base.py b/rialto/jobs/job_base.py index c65341d..0a4779d 100644 --- a/rialto/jobs/job_base.py +++ b/rialto/jobs/job_base.py @@ -17,7 +17,6 @@ import abc import datetime import typing -from contextlib import contextmanager import pyspark.sql.functions as F from loguru import logger @@ -49,55 +48,33 @@ def get_job_name(self) -> str: """Job name getter""" pass - @contextmanager - def _setup_resolver(self, run_date: datetime.date) -> None: - Resolver.register_callable(lambda: run_date, "run_date") - - Resolver.register_callable(self._get_spark, "spark") - Resolver.register_callable(self._get_table_reader, "table_reader") - Resolver.register_callable(self._get_config, "config") - - if self._get_feature_loader() is not None: - Resolver.register_callable(self._get_feature_loader, "feature_loader") - if self._get_metadata_manager() is not None: - Resolver.register_callable(self._get_metadata_manager, "metadata_manager") - - try: - yield - finally: - Resolver.cache_clear() - - def _setup( + def _get_resolver( self, spark: SparkSession, + run_date: datetime.date, table_reader: TableReader, config: PipelineConfig = None, metadata_manager: MetadataManager = None, feature_loader: PysparkFeatureLoader = None, - ) -> None: - self._spark = spark - self._table_rader = table_reader - self._config = config - self._metadata = metadata_manager - self._feature_loader = feature_loader + ) -> Resolver: + resolver = Resolver() - def _get_spark(self) -> SparkSession: - return self._spark + # Static Always - Available dependencies + resolver.register_object(spark, "spark") + resolver.register_object(run_date, "run_date") + resolver.register_object(config, "config") + resolver.register_object(table_reader, "table_reader") - def _get_table_reader(self) -> TableReader: - return self._table_rader + # Optionals + if feature_loader is not None: + resolver.register_object(feature_loader, "feature_loader") - def _get_config(self) -> PipelineConfig: - return self._config + if metadata_manager is not None: + resolver.register_object(metadata_manager, "metadata_manager") - def _get_feature_loader(self) -> PysparkFeatureLoader: - return self._feature_loader + return resolver - def _get_metadata_manager(self) -> MetadataManager: - return self._metadata - - def _get_timestamp_holder_result(self) -> DataFrame: - spark = self._get_spark() + def _get_timestamp_holder_result(self, spark) -> DataFrame: return spark.createDataFrame( [(self.get_job_name(), datetime.datetime.now())], schema="JOB_NAME string, CREATION_TIME timestamp" ) @@ -110,17 +87,6 @@ def _add_job_version(self, df: DataFrame) -> DataFrame: return df - def _run_main_callable(self, run_date: datetime.date) -> DataFrame: - with self._setup_resolver(run_date): - custom_callable = self.get_custom_callable() - raw_result = Resolver.register_resolve(custom_callable) - - if raw_result is None: - raw_result = self._get_timestamp_holder_result() - - result_with_version = self._add_job_version(raw_result) - return result_with_version - def run( self, reader: TableReader, @@ -140,8 +106,17 @@ def run( :return: dataframe """ try: - self._setup(spark, reader, config, metadata_manager, feature_loader) - return self._run_main_callable(run_date) + resolver = self._get_resolver(spark, run_date, reader, config, metadata_manager, feature_loader) + + custom_callable = self.get_custom_callable() + raw_result = resolver.resolve(custom_callable) + + if raw_result is None: + raw_result = self._get_timestamp_holder_result(spark) + + result_with_version = self._add_job_version(raw_result) + return result_with_version + except Exception as e: logger.exception(e) raise e diff --git a/rialto/jobs/module_register.py b/rialto/jobs/module_register.py new file mode 100644 index 0000000..8283454 --- /dev/null +++ b/rialto/jobs/module_register.py @@ -0,0 +1,121 @@ +# Copyright 2022 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +__all__ = ["ModuleRegister", "register_dependency_module", "register_dependency_callable"] + +from rialto.common.utils import get_caller_module + + +class ModuleRegisterException(Exception): + """Module Register Exception - Usually, means a clash in the dependencies""" + + pass + + +class ModuleRegister: + """ + Module register. Class which is used by @datasource and @config_parser decorators to register callables / getters. + + Resolver, when searching for a getter for f() defined in module M, uses find_callable("f", "M"). + """ + + _storage = {} + _dependency_tree = {} + + @classmethod + def add_callable_to_module(cls, callable, parent_name): + """ + Add a callable to the specified module's storage. + + :param callable: The callable to be added. + :param parent_name: The name of the module to which the callable is added. + """ + module_callables = cls._storage.get(parent_name, []) + module_callables.append(callable) + + cls._storage[parent_name] = module_callables + + @classmethod + def register_callable(cls, callable): + """ + Register a callable by adding it to the module's storage. + + :param callable: The callable to be registered. + """ + callable_module = callable.__module__ + cls.add_callable_to_module(callable, callable_module) + + @classmethod + def register_dependency(cls, module, parent_name): + """ + Register a module as a dependency of the caller module. + + :param module: The module to be registered as a dependency. + :param parent_name: The module that is registering the dependency. + + """ + module_dep_tree = cls._dependency_tree.get(parent_name, []) + module_dep_tree.append(module) + + cls._dependency_tree[parent_name] = module_dep_tree + + @classmethod + def find_callable(cls, callable_name, module_name): + """ + Find a callable by its name in the specified module and its dependencies. + + :param callable_name: The name of the callable to find. + :param module_name: The name of the module to search in. + :return: The found callable or None if not found. + """ + found_functions = [] + + # Loop through this module, and its dependencies + searched_modules = [module_name] + cls._dependency_tree.get(module_name, []) + for module in searched_modules: + # Loop through all functions registered in the module + for func in cls._storage.get(module, []): + if func.__name__ == callable_name: + found_functions.append(func) + + if len(found_functions) == 0: + return None + + if len(found_functions) > 1: + raise ModuleRegisterException(f"Multiple functions with the same name {callable_name} found !") + + else: + return found_functions[0] + + +def register_dependency_module(module): + """ + Register a module as a dependency of the caller module. + + :param module: The module to be registered as a dependency. + """ + caller_module = get_caller_module().__name__ + ModuleRegister.register_dependency(module.__name__, caller_module) + + +def register_dependency_callable(callable): + """ + Register a callable as a dependency of the caller module. + + Note that the function will be added to the module's list of available dependencies. + + :param callable: The callable to be registered as a dependency. + """ + caller_module = get_caller_module().__name__ + ModuleRegister.add_callable_to_module(callable, caller_module) diff --git a/rialto/jobs/resolver.py b/rialto/jobs/resolver.py index 26856d1..34b08e8 100644 --- a/rialto/jobs/resolver.py +++ b/rialto/jobs/resolver.py @@ -16,7 +16,8 @@ import inspect import typing -from functools import cache + +from rialto.jobs.module_register import ModuleRegister class ResolverException(Exception): @@ -33,20 +34,21 @@ class Resolver: Calling resolve() we attempt to resolve these dependencies. """ - _storage = {} + def __init__(self): + self._storage = {} - @classmethod - def _get_args_for_call(cls, function: typing.Callable) -> typing.Dict[str, typing.Any]: - result_dict = {} - signature = inspect.signature(function) + def register_object(self, object: typing.Any, name: str) -> None: + """ + Register an object with a given name for later resolution. - for param in signature.parameters.values(): - result_dict[param.name] = cls.resolve(param.name) + :param object: object to register (getter) + :param name: str, custom name + :return: None + """ - return result_dict + self.register_getter(lambda: object, name) - @classmethod - def register_callable(cls, callable: typing.Callable, name: str = None) -> str: + def register_getter(self, callable: typing.Callable, name: str = None) -> str: """ Register callable with a given name for later resolution. @@ -59,52 +61,44 @@ def register_callable(cls, callable: typing.Callable, name: str = None) -> str: if name is None: name = getattr(callable, "__name__", repr(callable)) - cls._storage[name] = callable - return name + if name in self._storage: + raise ResolverException(f"Resolver already registered {name}!") - @classmethod - @cache - def resolve(cls, name: str) -> typing.Any: - """ - Search for a callable registered prior and attempt to call it with correct arguents. + self._storage[name] = callable + return name - Arguments are resolved recursively according to requirements; For example, if we have - a(b, c), b(d), and c(), d() registered, then we recursively call resolve() methods until we resolve - c, d -> b -> a + def _find_getter(self, name: str, module_name) -> typing.Callable: + if name in self._storage.keys(): + return self._storage[name] - :param name: name of the callable to resolve - :return: result of the callable - """ - if name not in cls._storage.keys(): + callable_from_dependencies = ModuleRegister.find_callable(name, module_name) + if callable_from_dependencies is None: raise ResolverException(f"{name} declaration not found!") - getter = cls._storage[name] - args = cls._get_args_for_call(getter) - - return getter(**args) + return callable_from_dependencies - @classmethod - def register_resolve(cls, callable: typing.Callable) -> typing.Any: + def resolve(self, callable: typing.Callable) -> typing.Dict[str, typing.Any]: """ - Register and Resolve a callable. + Take a callable and resolve its dependencies / arguments. Arguments can be + a) objects registered via register_object + b) callables registered via register_getter + c) ModuleRegister registered callables via ModuleRegister.register_callable (+ dependencies) - Combination of the register() and resolve() methods for a simplified execution. + Arguments are resolved recursively according to requirements; For example, if we have + a(b, c), b(d), and c(), d() registered, then we recursively call resolve() methods until we resolve + c, d -> b -> a - :param callable: callable to register and immediately resolve + :param callable: function to resolve :return: result of the callable """ - name = cls.register_callable(callable) - return cls.resolve(name) - @classmethod - def cache_clear(cls) -> None: - """ - Clear resolver cache. + arg_list = {} - The resolve method caches its results to avoid duplication of resolutions. - However, in case we re-register some callables, we need to clear cache - in order to ensure re-execution of all resolutions. + signature = inspect.signature(callable) + module_name = callable.__module__ - :return: None - """ - cls.resolve.cache_clear() + for param in signature.parameters.values(): + param_getter = self._find_getter(param.name, module_name) + arg_list[param.name] = self.resolve(param_getter) + + return callable(**arg_list) diff --git a/rialto/jobs/test_utils.py b/rialto/jobs/test_utils.py index 3f6e3e2..d8f2945 100644 --- a/rialto/jobs/test_utils.py +++ b/rialto/jobs/test_utils.py @@ -77,31 +77,37 @@ def resolver_resolves(spark, job: JobBase) -> bool: :return: bool, True if job can be resolved """ - - class SmartStorage: - def __init__(self): - self._storage = Resolver._storage.copy() - self._call_stack = [] - - def __setitem__(self, key, value): - self._storage[key] = value - - def keys(self): - return self._storage.keys() - - def __getitem__(self, func_name): - if func_name in self._call_stack: - raise ResolverException(f"Circular Dependence on {func_name}!") - - self._call_stack.append(func_name) - - real_method = self._storage[func_name] - fake_method = create_autospec(real_method) - fake_method.side_effect = lambda *args, **kwargs: self._call_stack.remove(func_name) - - return fake_method - - with patch("rialto.jobs.resolver.Resolver._storage", SmartStorage()): - job().run(reader=MagicMock(), run_date=MagicMock(), spark=spark) - - return True + call_stack = [] + original_resolve_method = Resolver.resolve + + def stack_watching_resolver_resolve(self, callable): + # Check for cycles + if callable in call_stack: + raise ResolverException(f"Circular Dependence in {callable.__name__}!") + + # Append to call stack + call_stack.append(callable) + + # Create fake method + fake_method = create_autospec(callable) + fake_method.__module__ = callable.__module__ + + # Resolve fake method + result = original_resolve_method(self, fake_method) + + # Remove from call stack + call_stack.remove(callable) + + return result + + with patch(f"rialto.jobs.job_base.Resolver.resolve", stack_watching_resolver_resolve): + with patch(f"rialto.jobs.job_base.JobBase._add_job_version", lambda _, x: x): + job().run( + reader=MagicMock(), + run_date=MagicMock(), + spark=spark, + config=MagicMock(), + metadata_manager=MagicMock(), + feature_loader=MagicMock(), + ) + return True diff --git a/tests/jobs/dependency_checks_job/complex_dependency_job.py b/tests/jobs/dependency_checks_job/complex_dependency_job.py new file mode 100644 index 0000000..eabb70a --- /dev/null +++ b/tests/jobs/dependency_checks_job/complex_dependency_job.py @@ -0,0 +1,26 @@ +import tests.jobs.dependency_checks_job.datasources_a as a +import tests.jobs.dependency_checks_job.datasources_b as b +from rialto.jobs import job, register_dependency_callable, register_dependency_module + +# module "A" has i(), j(), k() +# module "B" has i(j), and dependency on module C +# module "C" has j(), k() + +register_dependency_module(b) +register_dependency_callable(a.j) + + +@job +def complex_dependency_job(i, j): + # If we import module B, and A.j, we should not see any conflicts, because: + # A.i won't get imported, thus won't clash with B.i + # B has no j it only sees C.j as registered dependency + + assert i == "B.i-C.j" + assert j == "A.j" + + +@job +def unimported_dependency_job(k): + # k is in both A and C, but it's not imported here, thus won't get resolved + pass diff --git a/tests/jobs/dependency_checks_job/datasources_a.py b/tests/jobs/dependency_checks_job/datasources_a.py new file mode 100644 index 0000000..f8ff293 --- /dev/null +++ b/tests/jobs/dependency_checks_job/datasources_a.py @@ -0,0 +1,16 @@ +from rialto.jobs import datasource + + +@datasource +def i(): + return "A.i" + + +@datasource +def j(): + return "A.j" + + +@datasource +def k(): + return "A.k" diff --git a/tests/jobs/dependency_checks_job/datasources_b.py b/tests/jobs/dependency_checks_job/datasources_b.py new file mode 100644 index 0000000..fce58bc --- /dev/null +++ b/tests/jobs/dependency_checks_job/datasources_b.py @@ -0,0 +1,9 @@ +import tests.jobs.dependency_checks_job.datasources_c as c +from rialto.jobs import datasource, register_dependency_module + +register_dependency_module(c) + + +@datasource +def i(j): + return f"B.i-{j}" diff --git a/tests/jobs/dependency_checks_job/datasources_c.py b/tests/jobs/dependency_checks_job/datasources_c.py new file mode 100644 index 0000000..5a08eb0 --- /dev/null +++ b/tests/jobs/dependency_checks_job/datasources_c.py @@ -0,0 +1,11 @@ +from rialto.jobs import datasource + + +@datasource +def j(): + return "C.j" + + +@datasource +def k(): + return "C.k" diff --git a/tests/jobs/dependency_checks_job/dependency_checks_job.py b/tests/jobs/dependency_checks_job/dependency_checks_job.py new file mode 100644 index 0000000..5952705 --- /dev/null +++ b/tests/jobs/dependency_checks_job/dependency_checks_job.py @@ -0,0 +1,32 @@ +import tests.jobs.dependency_checks_job.main_datasources as ds +from rialto.jobs import job, register_dependency_module + +register_dependency_module(ds) + + +@job +def ok_dependency_job(c): + return c + 1 + + +@job +def circular_dependency_job(circle_third): + return circle_third + 1 + + +@job +def missing_dependency_job(a, x): + return x + a + + +@job +def self_dependency_job(self_dependency): + return self_dependency + 1 + + +@job +def default_dependency_job(run_date, spark, config, table_reader): + assert run_date is not None + assert spark is not None + assert config is not None + assert table_reader is not None diff --git a/tests/jobs/dependency_checks_job/duplicate_dependency_job.py b/tests/jobs/dependency_checks_job/duplicate_dependency_job.py new file mode 100644 index 0000000..50f49b2 --- /dev/null +++ b/tests/jobs/dependency_checks_job/duplicate_dependency_job.py @@ -0,0 +1,15 @@ +import tests.jobs.dependency_checks_job.datasources_a as a +import tests.jobs.dependency_checks_job.datasources_b as b +from rialto.jobs import job, register_dependency_module + +# module "A" has i(), j(), k() +# module "B" has i(j), and dependency on module C + +register_dependency_module(b) +register_dependency_module(a) + + +@job +def duplicate_dependency_job(i): + # i is in both A and B + pass diff --git a/tests/jobs/dependency_checks_job/main_datasources.py b/tests/jobs/dependency_checks_job/main_datasources.py new file mode 100644 index 0000000..8ac2d94 --- /dev/null +++ b/tests/jobs/dependency_checks_job/main_datasources.py @@ -0,0 +1,37 @@ +from rialto.jobs import datasource + + +@datasource +def a(): + return 1 + + +@datasource +def b(a): + return a + 10 + + +@datasource +def c(a, b): + # 1 + 11 = 12 + return a + b + + +@datasource +def circle_first(circle_second): + return circle_second + 1 + + +@datasource +def circle_second(circle_third): + return circle_third + 1 + + +@datasource +def circle_third(circle_first): + return circle_first + 1 + + +@datasource +def self_dependency(a, b, c, self_dependency): + return a diff --git a/tests/jobs/resources.py b/tests/jobs/resources.py index 273bf38..ddb8bf8 100644 --- a/tests/jobs/resources.py +++ b/tests/jobs/resources.py @@ -16,10 +16,18 @@ import pandas as pd from rialto.jobs.job_base import JobBase +from rialto.jobs.resolver import Resolver def custom_callable(): - pass + return None + + +def asserting_callable(): + assert Resolver.resolve("run_date") + assert Resolver.resolve("config") + assert Resolver.resolve("spark") + assert Resolver.resolve("table_reader") class CustomJobNoReturnVal(JobBase): @@ -46,3 +54,8 @@ def f(spark): class CustomJobNoVersion(CustomJobNoReturnVal): def get_job_version(self) -> str: return None + + +def CustomJobAssertResolverSetup(CustomJobNoReturnVal): + def get_custom_callable(): + return asserting_callable diff --git a/tests/jobs/test_decorators.py b/tests/jobs/test_decorators.py index a09ee69..d1931c3 100644 --- a/tests/jobs/test_decorators.py +++ b/tests/jobs/test_decorators.py @@ -15,21 +15,17 @@ from importlib import import_module from rialto.jobs.job_base import JobBase -from rialto.jobs.resolver import Resolver +from rialto.jobs.module_register import ModuleRegister def test_dataset_decorator(): _ = import_module("tests.jobs.test_job.test_job") - test_dataset = Resolver.resolve("dataset") - - assert test_dataset == "dataset_return" + assert ModuleRegister.find_callable("dataset", "tests.jobs.test_job.test_job") is not None def test_config_decorator(): _ = import_module("tests.jobs.test_job.test_job") - test_dataset = Resolver.resolve("custom_config") - - assert test_dataset == "config_return" + assert ModuleRegister.find_callable("custom_config", "tests.jobs.test_job.test_job") is not None def _rialto_import_stub(module_name, class_name): diff --git a/tests/jobs/test_job/dependency_tests_job.py b/tests/jobs/test_job/dependency_tests_job.py deleted file mode 100644 index 7452d02..0000000 --- a/tests/jobs/test_job/dependency_tests_job.py +++ /dev/null @@ -1,51 +0,0 @@ -from rialto.jobs import datasource, job - - -@datasource -def a(): - return 1 - - -@datasource -def b(a): - return a + 1 - - -@datasource -def c(a, b): - return a + b - - -@job -def ok_dependency_job(c): - return c + 1 - - -@datasource -def d(a, circle_1): - return circle_1 + a - - -@datasource -def circle_1(circle_2): - return circle_2 + 1 - - -@datasource -def circle_2(circle_1): - return circle_1 + 1 - - -@job -def circular_dependency_job(d): - return d + 1 - - -@job -def missing_dependency_job(a, x): - return x + a - - -@job -def default_dependency_job(run_date, spark, config): - return 1 diff --git a/tests/jobs/test_job_base.py b/tests/jobs/test_job_base.py index 2fb01ea..1514957 100644 --- a/tests/jobs/test_job_base.py +++ b/tests/jobs/test_job_base.py @@ -23,30 +23,13 @@ from rialto.loader import PysparkFeatureLoader -def test_setup_except_feature_loader(spark): +def test_setup(spark): table_reader = MagicMock() config = MagicMock() date = datetime.date(2023, 1, 1) resources.CustomJobNoReturnVal().run(reader=table_reader, run_date=date, spark=spark, config=config) - assert Resolver.resolve("run_date") == date - assert Resolver.resolve("config") == config - assert Resolver.resolve("spark") == spark - assert Resolver.resolve("table_reader") == table_reader - - -def test_setup_feature_loader(spark): - table_reader = MagicMock() - date = datetime.date(2023, 1, 1) - feature_loader = PysparkFeatureLoader(spark, "", "", "") - - resources.CustomJobNoReturnVal().run( - reader=table_reader, run_date=date, spark=spark, config=None, feature_loader=feature_loader - ) - - assert type(Resolver.resolve("feature_loader")) == PysparkFeatureLoader - def test_custom_callable_called(spark, mocker): spy_cc = mocker.spy(resources, "custom_callable") diff --git a/tests/jobs/test_resolver.py b/tests/jobs/test_resolver.py index c6ccdb0..443e27b 100644 --- a/tests/jobs/test_resolver.py +++ b/tests/jobs/test_resolver.py @@ -20,46 +20,51 @@ def test_simple_resolve_custom_name(): def f(): return 7 - Resolver.register_callable(f, "hello") + resolver = Resolver() + resolver.register_getter(f, "hello") - assert Resolver.resolve("hello") == 7 + assert resolver.resolve(lambda hello: hello) == 7 def test_simple_resolve_infer_f_name(): def f(): - return 7 + return 8 - Resolver.register_callable(f) + resolver = Resolver() + resolver.register_getter(f) - assert Resolver.resolve("f") == 7 + assert resolver.resolve(lambda f: f) == 8 -def test_dependency_resolve(): - def f(): - return 7 - - def g(f): - return f + 1 +def test_resolve_non_defined(): + resolver = Resolver() + with pytest.raises(ResolverException): + resolver.resolve(lambda x: ...) - Resolver.register_callable(f) - Resolver.register_callable(g) - assert Resolver.resolve("g") == 8 +def test_resolve_multi_dependency(): + def a(b, c): + return b + c + def b(): + return 1 -def test_resolve_non_defined(): - with pytest.raises(ResolverException): - Resolver.resolve("whatever") + def c(d): + return d + 10 + def d(): + return 100 -def test_register_resolve(mocker): - def f(): - return 7 + resolver = Resolver() + resolver.register_getter(a) + resolver.register_getter(b) + resolver.register_getter(c) + resolver.register_getter(d) - mocker.patch("rialto.jobs.resolver.Resolver.register_callable", return_value="f") - mocker.patch("rialto.jobs.resolver.Resolver.resolve") + assert resolver.resolve(a) == 111 - Resolver.register_resolve(f) - Resolver.register_callable.assert_called_once_with(f) - Resolver.resolve.assert_called_once_with("f") +def test_register_objects(): + resolver = Resolver() + resolver.register_object(7, "seven") + assert resolver.resolve(lambda seven: seven) == 7 diff --git a/tests/jobs/test_test_utils.py b/tests/jobs/test_test_utils.py index e6ef9da..dcf41ab 100644 --- a/tests/jobs/test_test_utils.py +++ b/tests/jobs/test_test_utils.py @@ -14,21 +14,20 @@ import pytest import rialto.jobs.decorators as decorators -import tests.jobs.test_job.dependency_tests_job as dependency_tests_job +import tests.jobs.dependency_checks_job.complex_dependency_job as complex_dependency_job +import tests.jobs.dependency_checks_job.dependency_checks_job as dependency_checks_job +import tests.jobs.dependency_checks_job.duplicate_dependency_job as duplicate_dependency_job import tests.jobs.test_job.test_job as test_job -from rialto.jobs.resolver import Resolver from rialto.jobs.test_utils import disable_job_decorators, resolver_resolves def test_raw_dataset_patch(mocker): - spy_rc = mocker.spy(Resolver, "register_callable") spy_dec = mocker.spy(decorators, "datasource") with disable_job_decorators(test_job): assert test_job.dataset() == "dataset_return" - spy_dec.assert_not_called() - spy_rc.assert_not_called() + spy_dec.assert_not_called() def test_job_function_patch(mocker): @@ -37,7 +36,7 @@ def test_job_function_patch(mocker): with disable_job_decorators(test_job): assert test_job.job_function() == "job_function_return" - spy_dec.assert_not_called() + spy_dec.assert_not_called() def test_custom_name_job_function_patch(mocker): @@ -46,28 +45,64 @@ def test_custom_name_job_function_patch(mocker): with disable_job_decorators(test_job): assert test_job.custom_name_job_function() == "custom_job_name_return" - spy_dec.assert_not_called() + spy_dec.assert_not_called() def test_resolver_resolves_ok_job(spark): - assert resolver_resolves(spark, dependency_tests_job.ok_dependency_job) + assert resolver_resolves(spark, dependency_checks_job.ok_dependency_job) def test_resolver_resolves_default_dependency(spark): - assert resolver_resolves(spark, dependency_tests_job.default_dependency_job) + assert resolver_resolves(spark, dependency_checks_job.default_dependency_job) -def test_resolver_resolves_fails_circular_dependency(spark): +def test_resolver_fails_circular_dependency(spark): with pytest.raises(Exception) as exc_info: - assert resolver_resolves(spark, dependency_tests_job.circular_dependency_job) + assert resolver_resolves(spark, dependency_checks_job.circular_dependency_job) assert exc_info is not None - assert str(exc_info.value) == "Circular Dependence on circle_1!" + assert str(exc_info.value) == "Circular Dependence in circle_third!" -def test_resolver_resolves_fails_missing_dependency(spark): +def test_resolver_fails_missing_dependency(spark): with pytest.raises(Exception) as exc_info: - assert resolver_resolves(spark, dependency_tests_job.missing_dependency_job) + assert resolver_resolves(spark, dependency_checks_job.missing_dependency_job) assert exc_info is not None assert str(exc_info.value) == "x declaration not found!" + + +def tests_resolver_fails_self_dependency(spark): + with pytest.raises(Exception) as exc_info: + assert resolver_resolves(spark, dependency_checks_job.self_dependency_job) + + assert exc_info is not None + assert str(exc_info.value) == "Circular Dependence in self_dependency!" + + +def test_complex_dependencies_resolves_correctly(spark): + assert resolver_resolves(spark, complex_dependency_job.complex_dependency_job) + + +def test_complex_dependencies_fails_on_unimported(spark): + with pytest.raises(Exception) as exc_info: + assert resolver_resolves(spark, complex_dependency_job.unimported_dependency_job) + + assert exc_info is not None + assert str(exc_info.value) == "k declaration not found!" + + +def test_complex_dependencies_fails_on_unimported(spark): + with pytest.raises(Exception) as exc_info: + assert resolver_resolves(spark, complex_dependency_job.unimported_dependency_job) + + assert exc_info is not None + assert str(exc_info.value) == "k declaration not found!" + + +def test_duplicate_dependency_fails_on_duplicate(spark): + with pytest.raises(Exception) as exc_info: + assert resolver_resolves(spark, duplicate_dependency_job.duplicate_dependency_job) + + assert exc_info is not None + assert str(exc_info.value) == f"Multiple functions with the same name i found !"