From 20f33f0d7bb368b474501f6d5d255ffbdda1d150 Mon Sep 17 00:00:00 2001 From: fern-api <115122769+fern-api[bot]@users.noreply.github.com> Date: Thu, 23 May 2024 06:41:23 +0000 Subject: [PATCH] Release v0.2.40 --- poetry.lock | 109 +- pyproject.toml | 14 +- src/superagent/__init__.py | 2 + src/superagent/client.py | 80 +- src/superagent/core/__init__.py | 5 + src/superagent/core/client_wrapper.py | 20 +- src/superagent/core/http_client.py | 11 +- src/superagent/core/jsonable_encoder.py | 18 +- src/superagent/core/pydantic_utilities.py | 28 + src/superagent/core/query_encoder.py | 33 + src/superagent/resources/agent/client.py | 1453 ++++++++++++----- src/superagent/resources/api_key/client.py | 353 +++- src/superagent/resources/api_user/client.py | 339 +++- src/superagent/resources/datasource/client.py | 515 ++++-- src/superagent/resources/llm/client.py | 361 +++- src/superagent/resources/tool/client.py | 527 ++++-- .../resources/vector_database/client.py | 433 +++-- src/superagent/resources/workflow/client.py | 883 +++++++--- .../resources/workflow_config/client.py | 159 +- .../types/agent_datasosurce_list.py | 19 +- src/superagent/types/agent_list.py | 19 +- src/superagent/types/agent_tool_list.py | 19 +- src/superagent/types/api_key_create.py | 19 +- src/superagent/types/api_key_create_model.py | 30 +- src/superagent/types/api_key_list.py | 19 +- .../types/app_models_request_api_key.py | 17 +- .../types/app_models_request_api_user.py | 26 +- .../types/app_models_request_datasource.py | 32 +- .../types/app_models_request_llm.py | 22 +- .../types/app_models_request_vector_db.py | 17 +- .../types/app_models_request_workflow.py | 17 +- .../types/app_models_request_workflow_step.py | 20 +- .../types/app_models_response_agent.py | 19 +- .../types/app_models_response_agent_invoke.py | 19 +- .../types/app_models_response_api_key.py | 19 +- .../types/app_models_response_api_user.py | 19 +- .../types/app_models_response_datasource.py | 19 +- .../types/app_models_response_llm.py | 19 +- .../types/app_models_response_tool.py | 19 +- .../types/app_models_response_vector_db.py | 19 +- .../types/app_models_response_workflow.py | 19 +- .../app_models_response_workflow_step.py | 19 +- src/superagent/types/datasource_list.py | 19 +- src/superagent/types/function_definition.py | 23 +- src/superagent/types/http_validation_error.py | 19 +- src/superagent/types/llm_list.py | 19 +- src/superagent/types/llm_params.py | 21 +- .../types/open_ai_assistant_parameters.py | 24 +- ...open_ai_assistant_parameters_tools_item.py | 65 +- src/superagent/types/prisma_models_agent.py | 50 +- .../types/prisma_models_agent_datasource.py | 30 +- .../types/prisma_models_agent_llm.py | 30 +- .../types/prisma_models_agent_tool.py | 30 +- src/superagent/types/prisma_models_api_key.py | 28 +- .../types/prisma_models_api_user.py | 44 +- .../types/prisma_models_datasource.py | 40 +- src/superagent/types/prisma_models_llm.py | 32 +- src/superagent/types/prisma_models_tool.py | 34 +- .../types/prisma_models_vector_db.py | 30 +- .../types/prisma_models_workflow.py | 34 +- .../types/prisma_models_workflow_step.py | 34 +- .../types/tool_assistant_tools_code.py | 17 +- .../types/tool_assistant_tools_function.py | 19 +- .../types/tool_assistant_tools_retrieval.py | 17 +- src/superagent/types/tool_list.py | 19 +- src/superagent/types/validation_error.py | 17 +- src/superagent/types/vector_db_list.py | 19 +- src/superagent/types/workflow_config.py | 32 +- src/superagent/types/workflow_list.py | 19 +- src/superagent/types/workflow_step_list.py | 19 +- src/superagent/version.py | 4 + tests/__init__.py | 0 tests/{ => custom}/test_client.py | 0 73 files changed, 4696 insertions(+), 1901 deletions(-) create mode 100644 src/superagent/core/pydantic_utilities.py create mode 100644 src/superagent/core/query_encoder.py create mode 100644 src/superagent/version.py delete mode 100644 tests/__init__.py rename tests/{ => custom}/test_client.py (100%) diff --git a/poetry.lock b/poetry.lock index 978678f..37cbe9b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "annotated-types" -version = "0.6.0" +version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" files = [ - {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, - {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] [package.dependencies] @@ -152,38 +152,38 @@ files = [ [[package]] name = "mypy" -version = "1.10.0" +version = "1.9.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da1cbf08fb3b851ab3b9523a884c232774008267b1f83371ace57f412fe308c2"}, - {file = "mypy-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:12b6bfc1b1a66095ab413160a6e520e1dc076a28f3e22f7fb25ba3b000b4ef99"}, - {file = "mypy-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e36fb078cce9904c7989b9693e41cb9711e0600139ce3970c6ef814b6ebc2b2"}, - {file = "mypy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b0695d605ddcd3eb2f736cd8b4e388288c21e7de85001e9f85df9187f2b50f9"}, - {file = "mypy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:cd777b780312ddb135bceb9bc8722a73ec95e042f911cc279e2ec3c667076051"}, - {file = "mypy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3be66771aa5c97602f382230165b856c231d1277c511c9a8dd058be4784472e1"}, - {file = "mypy-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8b2cbaca148d0754a54d44121b5825ae71868c7592a53b7292eeb0f3fdae95ee"}, - {file = "mypy-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ec404a7cbe9fc0e92cb0e67f55ce0c025014e26d33e54d9e506a0f2d07fe5de"}, - {file = "mypy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e22e1527dc3d4aa94311d246b59e47f6455b8729f4968765ac1eacf9a4760bc7"}, - {file = "mypy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:a87dbfa85971e8d59c9cc1fcf534efe664d8949e4c0b6b44e8ca548e746a8d53"}, - {file = "mypy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a781f6ad4bab20eef8b65174a57e5203f4be627b46291f4589879bf4e257b97b"}, - {file = "mypy-1.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b808e12113505b97d9023b0b5e0c0705a90571c6feefc6f215c1df9381256e30"}, - {file = "mypy-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f55583b12156c399dce2df7d16f8a5095291354f1e839c252ec6c0611e86e2e"}, - {file = "mypy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cf18f9d0efa1b16478c4c129eabec36148032575391095f73cae2e722fcf9d5"}, - {file = "mypy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:bc6ac273b23c6b82da3bb25f4136c4fd42665f17f2cd850771cb600bdd2ebeda"}, - {file = "mypy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9fd50226364cd2737351c79807775136b0abe084433b55b2e29181a4c3c878c0"}, - {file = "mypy-1.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f90cff89eea89273727d8783fef5d4a934be2fdca11b47def50cf5d311aff727"}, - {file = "mypy-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcfc70599efde5c67862a07a1aaf50e55bce629ace26bb19dc17cece5dd31ca4"}, - {file = "mypy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:075cbf81f3e134eadaf247de187bd604748171d6b79736fa9b6c9685b4083061"}, - {file = "mypy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:3f298531bca95ff615b6e9f2fc0333aae27fa48052903a0ac90215021cdcfa4f"}, - {file = "mypy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa7ef5244615a2523b56c034becde4e9e3f9b034854c93639adb667ec9ec2976"}, - {file = "mypy-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3236a4c8f535a0631f85f5fcdffba71c7feeef76a6002fcba7c1a8e57c8be1ec"}, - {file = "mypy-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2b5cdbb5dd35aa08ea9114436e0d79aceb2f38e32c21684dcf8e24e1e92821"}, - {file = "mypy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92f93b21c0fe73dc00abf91022234c79d793318b8a96faac147cd579c1671746"}, - {file = "mypy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:28d0e038361b45f099cc086d9dd99c15ff14d0188f44ac883010e172ce86c38a"}, - {file = "mypy-1.10.0-py3-none-any.whl", hash = "sha256:f8c083976eb530019175aabadb60921e73b4f45736760826aa1689dda8208aee"}, - {file = "mypy-1.10.0.tar.gz", hash = "sha256:3d087fcbec056c4ee34974da493a826ce316947485cef3901f511848e687c131"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, + {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, + {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, + {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, + {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, + {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, + {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, + {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, + {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, + {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, + {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, + {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, + {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, + {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, + {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, + {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, + {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, + {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, ] [package.dependencies] @@ -366,6 +366,49 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-asyncio" +version = "0.23.7" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_asyncio-0.23.7-py3-none-any.whl", hash = "sha256:009b48127fbe44518a547bddd25611551b0e43ccdbf1e67d12479f569832c20b"}, + {file = "pytest_asyncio-0.23.7.tar.gz", hash = "sha256:5f5c72948f4c49e7db4f29f2521d4031f1c27f86e57b046126654083d4770268"}, +] + +[package.dependencies] +pytest = ">=7.0.0,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + [[package]] name = "sniffio" version = "1.3.1" @@ -402,4 +445,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "3c8fae8de68e5484c48073bf191e51acbe3b9a32fd98e6b5e4d165e42a7fc7aa" +content-hash = "6f0b8d5044d8d4461b9850fcdfd7c382335fc9481fcfd5bd42e760121c1e1450" diff --git a/pyproject.toml b/pyproject.toml index 67bb25d..23f4a6c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "superagent-py" -version = "v0.2.39" +version = "v0.2.40" description = "" readme = "README.md" authors = [] @@ -15,8 +15,18 @@ pydantic = ">= 1.9.2" typing_extensions = ">= 4.0.0" [tool.poetry.dev-dependencies] -mypy = "^1.8.0" +mypy = "1.9.0" pytest = "^7.4.0" +pytest-asyncio = "^0.23.5" +python-dateutil = "^2.9.0" + +[tool.pytest.ini_options] +testpaths = [ "tests" ] +asyncio_mode = "auto" + +[tool.mypy] +plugins = ["pydantic.mypy"] + [build-system] requires = ["poetry-core"] diff --git a/src/superagent/__init__.py b/src/superagent/__init__.py index 964d5e9..67d5a97 100644 --- a/src/superagent/__init__.py +++ b/src/superagent/__init__.py @@ -68,6 +68,7 @@ from .errors import UnprocessableEntityError from .resources import agent, api_key, api_user, datasource, llm, tool, vector_database, workflow, workflow_config from .environment import SuperagentEnvironment +from .version import __version__ __all__ = [ "AgentDatasosurceList", @@ -135,6 +136,7 @@ "WorkflowConfig", "WorkflowList", "WorkflowStepList", + "__version__", "agent", "api_key", "api_user", diff --git a/src/superagent/client.py b/src/superagent/client.py index 55301af..50c4009 100644 --- a/src/superagent/client.py +++ b/src/superagent/client.py @@ -21,19 +21,32 @@ class Superagent: """ Use this class to access the different functions within the SDK. You can instantiate any number of clients with different configuration that will propogate to these functions. - Parameters: - - base_url: typing.Optional[str]. The base url to use for requests from the client. + Parameters + ---------- + base_url : typing.Optional[str] + The base url to use for requests from the client. - - environment: SuperagentEnvironment. The environment to use for requests from the client. from .environment import SuperagentEnvironment + environment : SuperagentEnvironment + The environment to use for requests from the client. from .environment import SuperagentEnvironment - Defaults to SuperagentEnvironment.DEFAULT - - token: typing.Optional[typing.Union[str, typing.Callable[[], str]]]. - - timeout: typing.Optional[float]. The timeout to be used, in seconds, for requests by default the timeout is 60 seconds. + Defaults to SuperagentEnvironment.DEFAULT - - httpx_client: typing.Optional[httpx.Client]. The httpx client to use for making requests, a preconfigured client is used by default, however this is useful should you want to pass in any custom httpx configuration. - --- + + + token : typing.Optional[typing.Union[str, typing.Callable[[], str]]] + timeout : typing.Optional[float] + The timeout to be used, in seconds, for requests by default the timeout is 60 seconds, unless a custom httpx client is used, in which case a default is not set. + + follow_redirects : typing.Optional[bool] + Whether the default httpx client follows redirects or not, this is irrelevant if a custom httpx client is passed in. + + httpx_client : typing.Optional[httpx.Client] + The httpx client to use for making requests, a preconfigured client is used by default, however this is useful should you want to pass in any custom httpx configuration. + + Examples + -------- from superagent.client import Superagent client = Superagent( @@ -47,13 +60,20 @@ def __init__( base_url: typing.Optional[str] = None, environment: SuperagentEnvironment = SuperagentEnvironment.DEFAULT, token: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, - timeout: typing.Optional[float] = 60, + timeout: typing.Optional[float] = None, + follow_redirects: typing.Optional[bool] = True, httpx_client: typing.Optional[httpx.Client] = None ): + _defaulted_timeout = timeout if timeout is not None else 60 if httpx_client is None else None self._client_wrapper = SyncClientWrapper( base_url=_get_base_url(base_url=base_url, environment=environment), token=token, - httpx_client=httpx.Client(timeout=timeout) if httpx_client is None else httpx_client, + httpx_client=httpx_client + if httpx_client is not None + else httpx.Client(timeout=_defaulted_timeout, follow_redirects=follow_redirects) + if follow_redirects is not None + else httpx.Client(timeout=_defaulted_timeout), + timeout=_defaulted_timeout, ) self.agent = AgentClient(client_wrapper=self._client_wrapper) self.llm = LlmClient(client_wrapper=self._client_wrapper) @@ -70,19 +90,32 @@ class AsyncSuperagent: """ Use this class to access the different functions within the SDK. You can instantiate any number of clients with different configuration that will propogate to these functions. - Parameters: - - base_url: typing.Optional[str]. The base url to use for requests from the client. + Parameters + ---------- + base_url : typing.Optional[str] + The base url to use for requests from the client. + + environment : SuperagentEnvironment + The environment to use for requests from the client. from .environment import SuperagentEnvironment + + + + Defaults to SuperagentEnvironment.DEFAULT + - - environment: SuperagentEnvironment. The environment to use for requests from the client. from .environment import SuperagentEnvironment - Defaults to SuperagentEnvironment.DEFAULT + token : typing.Optional[typing.Union[str, typing.Callable[[], str]]] + timeout : typing.Optional[float] + The timeout to be used, in seconds, for requests by default the timeout is 60 seconds, unless a custom httpx client is used, in which case a default is not set. - - token: typing.Optional[typing.Union[str, typing.Callable[[], str]]]. + follow_redirects : typing.Optional[bool] + Whether the default httpx client follows redirects or not, this is irrelevant if a custom httpx client is passed in. - - timeout: typing.Optional[float]. The timeout to be used, in seconds, for requests by default the timeout is 60 seconds. + httpx_client : typing.Optional[httpx.AsyncClient] + The httpx client to use for making requests, a preconfigured client is used by default, however this is useful should you want to pass in any custom httpx configuration. - - httpx_client: typing.Optional[httpx.AsyncClient]. The httpx client to use for making requests, a preconfigured client is used by default, however this is useful should you want to pass in any custom httpx configuration. - --- + Examples + -------- from superagent.client import AsyncSuperagent client = AsyncSuperagent( @@ -96,13 +129,20 @@ def __init__( base_url: typing.Optional[str] = None, environment: SuperagentEnvironment = SuperagentEnvironment.DEFAULT, token: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, - timeout: typing.Optional[float] = 60, + timeout: typing.Optional[float] = None, + follow_redirects: typing.Optional[bool] = True, httpx_client: typing.Optional[httpx.AsyncClient] = None ): + _defaulted_timeout = timeout if timeout is not None else 60 if httpx_client is None else None self._client_wrapper = AsyncClientWrapper( base_url=_get_base_url(base_url=base_url, environment=environment), token=token, - httpx_client=httpx.AsyncClient(timeout=timeout) if httpx_client is None else httpx_client, + httpx_client=httpx_client + if httpx_client is not None + else httpx.AsyncClient(timeout=_defaulted_timeout, follow_redirects=follow_redirects) + if follow_redirects is not None + else httpx.AsyncClient(timeout=_defaulted_timeout), + timeout=_defaulted_timeout, ) self.agent = AsyncAgentClient(client_wrapper=self._client_wrapper) self.llm = AsyncLlmClient(client_wrapper=self._client_wrapper) diff --git a/src/superagent/core/__init__.py b/src/superagent/core/__init__.py index b3e43c2..58ad52a 100644 --- a/src/superagent/core/__init__.py +++ b/src/superagent/core/__init__.py @@ -6,6 +6,8 @@ from .file import File, convert_file_dict_to_httpx_tuples from .http_client import AsyncHttpClient, HttpClient from .jsonable_encoder import jsonable_encoder +from .pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .query_encoder import encode_query from .remove_none_from_dict import remove_none_from_dict from .request_options import RequestOptions @@ -19,7 +21,10 @@ "RequestOptions", "SyncClientWrapper", "convert_file_dict_to_httpx_tuples", + "deep_union_pydantic_dicts", + "encode_query", "jsonable_encoder", + "pydantic_v1", "remove_none_from_dict", "serialize_datetime", ] diff --git a/src/superagent/core/client_wrapper.py b/src/superagent/core/client_wrapper.py index 5d21b05..7f7bcc1 100644 --- a/src/superagent/core/client_wrapper.py +++ b/src/superagent/core/client_wrapper.py @@ -8,15 +8,22 @@ class BaseClientWrapper: - def __init__(self, *, token: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, base_url: str): + def __init__( + self, + *, + token: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, + base_url: str, + timeout: typing.Optional[float] = None, + ): self._token = token self._base_url = base_url + self._timeout = timeout def get_headers(self) -> typing.Dict[str, str]: headers: typing.Dict[str, str] = { "X-Fern-Language": "Python", "X-Fern-SDK-Name": "superagent-py", - "X-Fern-SDK-Version": "v0.2.39", + "X-Fern-SDK-Version": "v0.2.40", } token = self._get_token() if token is not None: @@ -32,6 +39,9 @@ def _get_token(self) -> typing.Optional[str]: def get_base_url(self) -> str: return self._base_url + def get_timeout(self) -> typing.Optional[float]: + return self._timeout + class SyncClientWrapper(BaseClientWrapper): def __init__( @@ -39,9 +49,10 @@ def __init__( *, token: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, base_url: str, + timeout: typing.Optional[float] = None, httpx_client: httpx.Client, ): - super().__init__(token=token, base_url=base_url) + super().__init__(token=token, base_url=base_url, timeout=timeout) self.httpx_client = HttpClient(httpx_client=httpx_client) @@ -51,7 +62,8 @@ def __init__( *, token: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, base_url: str, + timeout: typing.Optional[float] = None, httpx_client: httpx.AsyncClient, ): - super().__init__(token=token, base_url=base_url) + super().__init__(token=token, base_url=base_url, timeout=timeout) self.httpx_client = AsyncHttpClient(httpx_client=httpx_client) diff --git a/src/superagent/core/http_client.py b/src/superagent/core/http_client.py index fbbbc15..4e6877d 100644 --- a/src/superagent/core/http_client.py +++ b/src/superagent/core/http_client.py @@ -5,6 +5,7 @@ import re import time import typing +from contextlib import asynccontextmanager, contextmanager from functools import wraps from random import random @@ -98,8 +99,10 @@ def request( return response @wraps(httpx.Client.stream) + @contextmanager def stream(self, *args: typing.Any, max_retries: int = 0, retries: int = 0, **kwargs: typing.Any) -> typing.Any: - return self.httpx_client.stream(*args, **kwargs) + with self.httpx_client.stream(*args, **kwargs) as stream: + yield stream class AsyncHttpClient: @@ -118,8 +121,10 @@ async def request( return await self.request(max_retries=max_retries, retries=retries + 1, *args, **kwargs) return response - @wraps(httpx.AsyncClient.request) + @wraps(httpx.AsyncClient.stream) + @asynccontextmanager async def stream( self, *args: typing.Any, max_retries: int = 0, retries: int = 0, **kwargs: typing.Any ) -> typing.Any: - return self.httpx_client.stream(*args, **kwargs) + async with self.httpx_client.stream(*args, **kwargs) as stream: + yield stream diff --git a/src/superagent/core/jsonable_encoder.py b/src/superagent/core/jsonable_encoder.py index 37238ab..7f48273 100644 --- a/src/superagent/core/jsonable_encoder.py +++ b/src/superagent/core/jsonable_encoder.py @@ -16,12 +16,8 @@ from types import GeneratorType from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - from .datetime_utils import serialize_datetime +from .pydantic_utilities import pydantic_v1 SetIntStr = Set[Union[int, str]] DictIntStrAny = Dict[Union[int, str], Any] @@ -36,7 +32,7 @@ def generate_encoders_by_class_tuples( return encoders_by_class_tuples -encoders_by_class_tuples = generate_encoders_by_class_tuples(pydantic.json.ENCODERS_BY_TYPE) +encoders_by_class_tuples = generate_encoders_by_class_tuples(pydantic_v1.json.ENCODERS_BY_TYPE) def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any], Any]]] = None) -> Any: @@ -48,7 +44,7 @@ def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any] for encoder_type, encoder_instance in custom_encoder.items(): if isinstance(obj, encoder_type): return encoder_instance(obj) - if isinstance(obj, pydantic.BaseModel): + if isinstance(obj, pydantic_v1.BaseModel): encoder = getattr(obj.__config__, "json_encoders", {}) if custom_encoder: encoder.update(custom_encoder) @@ -65,10 +61,10 @@ def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any] return str(obj) if isinstance(obj, (str, int, float, type(None))): return obj - if isinstance(obj, dt.date): - return str(obj) if isinstance(obj, dt.datetime): return serialize_datetime(obj) + if isinstance(obj, dt.date): + return str(obj) if isinstance(obj, dict): encoded_dict = {} allowed_keys = set(obj.keys()) @@ -84,8 +80,8 @@ def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any] encoded_list.append(jsonable_encoder(item, custom_encoder=custom_encoder)) return encoded_list - if type(obj) in pydantic.json.ENCODERS_BY_TYPE: - return pydantic.json.ENCODERS_BY_TYPE[type(obj)](obj) + if type(obj) in pydantic_v1.json.ENCODERS_BY_TYPE: + return pydantic_v1.json.ENCODERS_BY_TYPE[type(obj)](obj) for encoder, classes_tuple in encoders_by_class_tuples.items(): if isinstance(obj, classes_tuple): return encoder(obj) diff --git a/src/superagent/core/pydantic_utilities.py b/src/superagent/core/pydantic_utilities.py new file mode 100644 index 0000000..a72c1a5 --- /dev/null +++ b/src/superagent/core/pydantic_utilities.py @@ -0,0 +1,28 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic + +IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.") + +if IS_PYDANTIC_V2: + import pydantic.v1 as pydantic_v1 # type: ignore # nopycln: import +else: + import pydantic as pydantic_v1 # type: ignore # nopycln: import + + +def deep_union_pydantic_dicts( + source: typing.Dict[str, typing.Any], destination: typing.Dict[str, typing.Any] +) -> typing.Dict[str, typing.Any]: + for key, value in source.items(): + if isinstance(value, dict): + node = destination.setdefault(key, {}) + deep_union_pydantic_dicts(value, node) + else: + destination[key] = value + + return destination + + +__all__ = ["pydantic_v1"] diff --git a/src/superagent/core/query_encoder.py b/src/superagent/core/query_encoder.py new file mode 100644 index 0000000..ac7e257 --- /dev/null +++ b/src/superagent/core/query_encoder.py @@ -0,0 +1,33 @@ +# This file was auto-generated by Fern from our API Definition. + +from collections import ChainMap +from typing import Any, Dict, Optional + +from .pydantic_utilities import pydantic_v1 + + +# Flattens dicts to be of the form {"key[subkey][subkey2]": value} where value is not a dict +def traverse_query_dict(dict_flat: Dict[str, Any], key_prefix: Optional[str] = None) -> Dict[str, Any]: + result = {} + for k, v in dict_flat.items(): + key = f"{key_prefix}[{k}]" if key_prefix is not None else k + if isinstance(v, dict): + result.update(traverse_query_dict(v, key)) + else: + result[key] = v + return result + + +def single_query_encoder(query_key: str, query_value: Any) -> Dict[str, Any]: + if isinstance(query_value, pydantic_v1.BaseModel) or isinstance(query_value, dict): + if isinstance(query_value, pydantic_v1.BaseModel): + obj_dict = query_value.dict(by_alias=True) + else: + obj_dict = query_value + return traverse_query_dict(obj_dict, query_key) + + return {query_key: query_value} + + +def encode_query(query: Dict[str, Any]) -> Dict[str, Any]: + return dict(ChainMap(*[single_query_encoder(k, v) for k, v in query.items()])) diff --git a/src/superagent/resources/agent/client.py b/src/superagent/resources/agent/client.py index 1cc2100..2e2ce58 100644 --- a/src/superagent/resources/agent/client.py +++ b/src/superagent/resources/agent/client.py @@ -7,6 +7,8 @@ from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import pydantic_v1 +from ...core.query_encoder import encode_query from ...core.remove_none_from_dict import remove_none_from_dict from ...core.request_options import RequestOptions from ...errors.unprocessable_entity_error import UnprocessableEntityError @@ -21,11 +23,6 @@ from ...types.llm_provider import LlmProvider from ...types.open_ai_assistant_parameters import OpenAiAssistantParameters -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -44,27 +41,48 @@ def list( """ List all agents - Parameters: - - skip: typing.Optional[int]. + Parameters + ---------- + skip : typing.Optional[int] + + take : typing.Optional[int] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - take: typing.Optional[int]. + Returns + ------- + AgentList + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.agent.list( + skip=1, + take=1, + ) """ _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/agents"), - params=jsonable_encoder( - remove_none_from_dict( - { - "skip": skip, - "take": take, - **( - request_options.get("additional_query_parameters", {}) - if request_options is not None - else {} - ), - } + method="GET", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/agents"), + params=encode_query( + jsonable_encoder( + remove_none_from_dict( + { + "skip": skip, + "take": take, + **( + request_options.get("additional_query_parameters", {}) + if request_options is not None + else {} + ), + } + ) ) ), headers=jsonable_encoder( @@ -77,14 +95,16 @@ def list( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentList, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AgentList, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -94,8 +114,8 @@ def list( def create( self, *, - is_active: typing.Optional[bool] = OMIT, name: str, + is_active: typing.Optional[bool] = OMIT, initial_message: typing.Optional[str] = OMIT, prompt: typing.Optional[str] = OMIT, llm_model: typing.Optional[str] = OMIT, @@ -111,32 +131,71 @@ def create( """ Create a new agent - Parameters: - - is_active: typing.Optional[bool]. + Parameters + ---------- + name : str + + is_active : typing.Optional[bool] - - name: str. + initial_message : typing.Optional[str] - - initial_message: typing.Optional[str]. + prompt : typing.Optional[str] - - prompt: typing.Optional[str]. + llm_model : typing.Optional[str] - - llm_model: typing.Optional[str]. + llm_provider : typing.Optional[LlmProvider] - - llm_provider: typing.Optional[LlmProvider]. + description : typing.Optional[str] - - description: typing.Optional[str]. + avatar : typing.Optional[str] - - avatar: typing.Optional[str]. + type : typing.Optional[AgentType] - - type: typing.Optional[AgentType]. + parameters : typing.Optional[OpenAiAssistantParameters] - - parameters: typing.Optional[OpenAiAssistantParameters]. + metadata : typing.Optional[typing.Dict[str, typing.Any]] - - metadata: typing.Optional[typing.Dict[str, typing.Any]]. + output_schema : typing.Optional[str] - - output_schema: typing.Optional[str]. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + AppModelsResponseAgent + Successful Response + + Examples + -------- + from superagent import ( + AgentType, + LlmProvider, + OpenAiAssistantParameters, + OpenAiAssistantParametersToolsItem_CodeInterpreter, + ) + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.agent.create( + is_active=True, + name="string", + initial_message="string", + prompt="string", + llm_model="string", + llm_provider=LlmProvider.OPENAI, + description="string", + avatar="string", + type=AgentType.SUPERAGENT, + parameters=OpenAiAssistantParameters( + metadata={"string": {"key": "value"}}, + file_ids=["string"], + tools=[OpenAiAssistantParametersToolsItem_CodeInterpreter()], + ), + metadata={"string": {"key": "value"}}, + output_schema="string", + ) """ _request: typing.Dict[str, typing.Any] = {"name": name} if is_active is not OMIT: @@ -162,10 +221,12 @@ def create( if output_schema is not OMIT: _request["outputSchema"] = output_schema _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/agents"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="POST", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/agents"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(_request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -183,14 +244,16 @@ def create( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -201,18 +264,38 @@ def get(self, agent_id: str, *, request_options: typing.Optional[RequestOptions] """ Get a single agent - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseAgent + Successful Response + + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.agent.get( + agent_id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin( + method="GET", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -224,14 +307,16 @@ def get(self, agent_id: str, *, request_options: typing.Optional[RequestOptions] ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -242,18 +327,38 @@ def delete(self, agent_id: str, *, request_options: typing.Optional[RequestOptio """ Delete an agent - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.Any + Successful Response + + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.agent.delete( + agent_id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin( + method="DELETE", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -265,14 +370,16 @@ def delete(self, agent_id: str, *, request_options: typing.Optional[RequestOptio ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -298,30 +405,58 @@ def update( """ Patch an agent - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str - - is_active: typing.Optional[bool]. + is_active : typing.Optional[bool] - - name: typing.Optional[str]. + name : typing.Optional[str] - - initial_message: typing.Optional[str]. + initial_message : typing.Optional[str] - - prompt: typing.Optional[str]. + prompt : typing.Optional[str] - - llm_model: typing.Optional[str]. + llm_model : typing.Optional[str] - - description: typing.Optional[str]. + description : typing.Optional[str] - - avatar: typing.Optional[str]. + avatar : typing.Optional[str] - - type: typing.Optional[str]. + type : typing.Optional[str] - - metadata: typing.Optional[typing.Dict[str, typing.Any]]. + metadata : typing.Optional[typing.Dict[str, typing.Any]] - - output_schema: typing.Optional[str]. + output_schema : typing.Optional[str] - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseAgent + Successful Response + + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.agent.update( + agent_id="string", + is_active=True, + name="string", + initial_message="string", + prompt="string", + llm_model="string", + description="string", + avatar="string", + type="string", + metadata={"string": {"key": "value"}}, + output_schema="string", + ) """ _request: typing.Dict[str, typing.Any] = {} if is_active is not OMIT: @@ -345,12 +480,14 @@ def update( if output_schema is not OMIT: _request["outputSchema"] = output_schema _response = self._client_wrapper.httpx_client.request( - "PATCH", - urllib.parse.urljoin( + method="PATCH", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(_request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -368,14 +505,16 @@ def update( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -387,8 +526,8 @@ def invoke( agent_id: str, *, input: str, - session_id: typing.Optional[str] = OMIT, enable_streaming: bool, + session_id: typing.Optional[str] = OMIT, output_schema: typing.Optional[str] = OMIT, llm_params: typing.Optional[LlmParams] = OMIT, request_options: typing.Optional[RequestOptions] = None, @@ -396,20 +535,47 @@ def invoke( """ Invoke an agent - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str + + input : str - - input: str. + enable_streaming : bool - - session_id: typing.Optional[str]. + session_id : typing.Optional[str] - - enable_streaming: bool. + output_schema : typing.Optional[str] - - output_schema: typing.Optional[str]. + llm_params : typing.Optional[LlmParams] - - llm_params: typing.Optional[LlmParams]. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + AppModelsResponseAgentInvoke + Successful Response + + Examples + -------- + from superagent import LlmParams + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.agent.invoke( + agent_id="string", + input="string", + session_id="string", + enable_streaming=True, + output_schema="string", + llm_params=LlmParams( + max_tokens=1, + temperature=1.1, + ), + ) """ _request: typing.Dict[str, typing.Any] = {"input": input, "enableStreaming": enable_streaming} if session_id is not OMIT: @@ -419,12 +585,14 @@ def invoke( if llm_params is not OMIT: _request["llm_params"] = llm_params _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin( + method="POST", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}/invoke" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(_request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -442,14 +610,16 @@ def invoke( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseAgentInvoke, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseAgentInvoke, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -462,20 +632,41 @@ def add_llm( """ Add LLM to agent - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str + + llm_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - llm_id: str. + Returns + ------- + AppModelsResponseAgent + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.agent.add_llm( + agent_id="string", + llm_id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin( + method="POST", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}/llms" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder({"llmId": llm_id}) if request_options is None or request_options.get("additional_body_parameters") is None @@ -493,14 +684,16 @@ def add_llm( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -513,21 +706,42 @@ def remove_llm( """ Remove LLM from agent - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str + + llm_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - llm_id: str. + Returns + ------- + typing.Any + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.agent.remove_llm( + agent_id="string", + llm_id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin( + method="DELETE", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}/llms/{jsonable_encoder(llm_id)}", ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -539,14 +753,16 @@ def remove_llm( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -557,18 +773,38 @@ def list_tools(self, agent_id: str, *, request_options: typing.Optional[RequestO """ List agent tools - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AgentToolList + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.agent.list_tools( + agent_id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin( + method="GET", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}/tools" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -580,14 +816,16 @@ def list_tools(self, agent_id: str, *, request_options: typing.Optional[RequestO ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentToolList, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AgentToolList, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -600,20 +838,41 @@ def add_tool( """ Add tool to agent - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str + + tool_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - tool_id: str. + Returns + ------- + AppModelsResponseAgent + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.agent.add_tool( + agent_id="string", + tool_id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin( + method="POST", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}/tools" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder({"toolId": tool_id}) if request_options is None or request_options.get("additional_body_parameters") is None @@ -631,14 +890,16 @@ def add_tool( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -651,21 +912,42 @@ def remove_tool( """ Remove tool from agent - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str + + tool_id : str - - tool_id: str. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + typing.Any + Successful Response + + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.agent.remove_tool( + agent_id="string", + tool_id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin( + method="DELETE", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}/tools/{jsonable_encoder(tool_id)}", ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -677,14 +959,16 @@ def remove_tool( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -697,18 +981,38 @@ def list_datasources( """ List agent datasources - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + AgentDatasosurceList + Successful Response + + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.agent.list_datasources( + agent_id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin( + method="GET", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}/datasources" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -720,14 +1024,16 @@ def list_datasources( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentDatasosurceList, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AgentDatasosurceList, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -740,20 +1046,41 @@ def add_datasource( """ Add datasource to agent - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str + + datasource_id : str - - datasource_id: str. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + AppModelsResponseAgent + Successful Response + + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.agent.add_datasource( + agent_id="string", + datasource_id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin( + method="POST", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}/datasources" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder({"datasourceId": datasource_id}) if request_options is None or request_options.get("additional_body_parameters") is None @@ -771,14 +1098,16 @@ def add_datasource( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -791,21 +1120,42 @@ def remove_datasource( """ Remove datasource from agent - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str + + datasource_id : str - - datasource_id: str. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + typing.Any + Successful Response + + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.agent.remove_datasource( + agent_id="string", + datasource_id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin( + method="DELETE", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}/datasources/{jsonable_encoder(datasource_id)}", ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -817,14 +1167,16 @@ def remove_datasource( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -846,27 +1198,48 @@ async def list( """ List all agents - Parameters: - - skip: typing.Optional[int]. + Parameters + ---------- + skip : typing.Optional[int] - - take: typing.Optional[int]. + take : typing.Optional[int] - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AgentList + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.agent.list( + skip=1, + take=1, + ) """ _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/agents"), - params=jsonable_encoder( - remove_none_from_dict( - { - "skip": skip, - "take": take, - **( - request_options.get("additional_query_parameters", {}) - if request_options is not None - else {} - ), - } + method="GET", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/agents"), + params=encode_query( + jsonable_encoder( + remove_none_from_dict( + { + "skip": skip, + "take": take, + **( + request_options.get("additional_query_parameters", {}) + if request_options is not None + else {} + ), + } + ) ) ), headers=jsonable_encoder( @@ -879,14 +1252,16 @@ async def list( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentList, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AgentList, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -896,8 +1271,8 @@ async def list( async def create( self, *, - is_active: typing.Optional[bool] = OMIT, name: str, + is_active: typing.Optional[bool] = OMIT, initial_message: typing.Optional[str] = OMIT, prompt: typing.Optional[str] = OMIT, llm_model: typing.Optional[str] = OMIT, @@ -913,32 +1288,71 @@ async def create( """ Create a new agent - Parameters: - - is_active: typing.Optional[bool]. + Parameters + ---------- + name : str + + is_active : typing.Optional[bool] + + initial_message : typing.Optional[str] - - name: str. + prompt : typing.Optional[str] - - initial_message: typing.Optional[str]. + llm_model : typing.Optional[str] - - prompt: typing.Optional[str]. + llm_provider : typing.Optional[LlmProvider] - - llm_model: typing.Optional[str]. + description : typing.Optional[str] - - llm_provider: typing.Optional[LlmProvider]. + avatar : typing.Optional[str] - - description: typing.Optional[str]. + type : typing.Optional[AgentType] - - avatar: typing.Optional[str]. + parameters : typing.Optional[OpenAiAssistantParameters] - - type: typing.Optional[AgentType]. + metadata : typing.Optional[typing.Dict[str, typing.Any]] - - parameters: typing.Optional[OpenAiAssistantParameters]. + output_schema : typing.Optional[str] - - metadata: typing.Optional[typing.Dict[str, typing.Any]]. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - output_schema: typing.Optional[str]. + Returns + ------- + AppModelsResponseAgent + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent import ( + AgentType, + LlmProvider, + OpenAiAssistantParameters, + OpenAiAssistantParametersToolsItem_CodeInterpreter, + ) + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.agent.create( + is_active=True, + name="string", + initial_message="string", + prompt="string", + llm_model="string", + llm_provider=LlmProvider.OPENAI, + description="string", + avatar="string", + type=AgentType.SUPERAGENT, + parameters=OpenAiAssistantParameters( + metadata={"string": {"key": "value"}}, + file_ids=["string"], + tools=[OpenAiAssistantParametersToolsItem_CodeInterpreter()], + ), + metadata={"string": {"key": "value"}}, + output_schema="string", + ) """ _request: typing.Dict[str, typing.Any] = {"name": name} if is_active is not OMIT: @@ -964,10 +1378,12 @@ async def create( if output_schema is not OMIT: _request["outputSchema"] = output_schema _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/agents"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="POST", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/agents"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(_request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -985,14 +1401,16 @@ async def create( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -1005,18 +1423,38 @@ async def get( """ Get a single agent - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + AppModelsResponseAgent + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.agent.get( + agent_id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin( + method="GET", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -1028,14 +1466,16 @@ async def get( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -1046,18 +1486,38 @@ async def delete(self, agent_id: str, *, request_options: typing.Optional[Reques """ Delete an agent - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.Any + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.agent.delete( + agent_id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin( + method="DELETE", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -1069,14 +1529,16 @@ async def delete(self, agent_id: str, *, request_options: typing.Optional[Reques ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -1102,30 +1564,58 @@ async def update( """ Patch an agent - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str - - is_active: typing.Optional[bool]. + is_active : typing.Optional[bool] - - name: typing.Optional[str]. + name : typing.Optional[str] - - initial_message: typing.Optional[str]. + initial_message : typing.Optional[str] - - prompt: typing.Optional[str]. + prompt : typing.Optional[str] - - llm_model: typing.Optional[str]. + llm_model : typing.Optional[str] - - description: typing.Optional[str]. + description : typing.Optional[str] - - avatar: typing.Optional[str]. + avatar : typing.Optional[str] - - type: typing.Optional[str]. + type : typing.Optional[str] - - metadata: typing.Optional[typing.Dict[str, typing.Any]]. + metadata : typing.Optional[typing.Dict[str, typing.Any]] - - output_schema: typing.Optional[str]. + output_schema : typing.Optional[str] - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseAgent + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.agent.update( + agent_id="string", + is_active=True, + name="string", + initial_message="string", + prompt="string", + llm_model="string", + description="string", + avatar="string", + type="string", + metadata={"string": {"key": "value"}}, + output_schema="string", + ) """ _request: typing.Dict[str, typing.Any] = {} if is_active is not OMIT: @@ -1149,12 +1639,14 @@ async def update( if output_schema is not OMIT: _request["outputSchema"] = output_schema _response = await self._client_wrapper.httpx_client.request( - "PATCH", - urllib.parse.urljoin( + method="PATCH", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(_request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -1172,14 +1664,16 @@ async def update( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -1191,8 +1685,8 @@ async def invoke( agent_id: str, *, input: str, - session_id: typing.Optional[str] = OMIT, enable_streaming: bool, + session_id: typing.Optional[str] = OMIT, output_schema: typing.Optional[str] = OMIT, llm_params: typing.Optional[LlmParams] = OMIT, request_options: typing.Optional[RequestOptions] = None, @@ -1200,20 +1694,47 @@ async def invoke( """ Invoke an agent - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str + + input : str - - input: str. + enable_streaming : bool - - session_id: typing.Optional[str]. + session_id : typing.Optional[str] - - enable_streaming: bool. + output_schema : typing.Optional[str] - - output_schema: typing.Optional[str]. + llm_params : typing.Optional[LlmParams] - - llm_params: typing.Optional[LlmParams]. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + AppModelsResponseAgentInvoke + Successful Response + + Examples + -------- + from superagent import LlmParams + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.agent.invoke( + agent_id="string", + input="string", + session_id="string", + enable_streaming=True, + output_schema="string", + llm_params=LlmParams( + max_tokens=1, + temperature=1.1, + ), + ) """ _request: typing.Dict[str, typing.Any] = {"input": input, "enableStreaming": enable_streaming} if session_id is not OMIT: @@ -1223,12 +1744,14 @@ async def invoke( if llm_params is not OMIT: _request["llm_params"] = llm_params _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin( + method="POST", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}/invoke" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(_request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -1246,14 +1769,16 @@ async def invoke( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseAgentInvoke, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseAgentInvoke, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -1266,20 +1791,41 @@ async def add_llm( """ Add LLM to agent - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str + + llm_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - llm_id: str. + Returns + ------- + AppModelsResponseAgent + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.agent.add_llm( + agent_id="string", + llm_id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin( + method="POST", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}/llms" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder({"llmId": llm_id}) if request_options is None or request_options.get("additional_body_parameters") is None @@ -1297,14 +1843,16 @@ async def add_llm( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -1317,21 +1865,42 @@ async def remove_llm( """ Remove LLM from agent - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str + + llm_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - llm_id: str. + Returns + ------- + typing.Any + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.agent.remove_llm( + agent_id="string", + llm_id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin( + method="DELETE", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}/llms/{jsonable_encoder(llm_id)}", ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -1343,14 +1912,16 @@ async def remove_llm( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -1363,18 +1934,38 @@ async def list_tools( """ List agent tools - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AgentToolList + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.agent.list_tools( + agent_id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin( + method="GET", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}/tools" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -1386,14 +1977,16 @@ async def list_tools( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentToolList, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AgentToolList, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -1406,20 +1999,41 @@ async def add_tool( """ Add tool to agent - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str + + tool_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - tool_id: str. + Returns + ------- + AppModelsResponseAgent + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.agent.add_tool( + agent_id="string", + tool_id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin( + method="POST", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}/tools" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder({"toolId": tool_id}) if request_options is None or request_options.get("additional_body_parameters") is None @@ -1437,14 +2051,16 @@ async def add_tool( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -1457,21 +2073,42 @@ async def remove_tool( """ Remove tool from agent - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str + + tool_id : str - - tool_id: str. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + typing.Any + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.agent.remove_tool( + agent_id="string", + tool_id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin( + method="DELETE", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}/tools/{jsonable_encoder(tool_id)}", ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -1483,14 +2120,16 @@ async def remove_tool( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -1503,18 +2142,38 @@ async def list_datasources( """ List agent datasources - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + AgentDatasosurceList + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.agent.list_datasources( + agent_id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin( + method="GET", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}/datasources" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -1526,14 +2185,16 @@ async def list_datasources( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentDatasosurceList, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AgentDatasosurceList, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -1546,20 +2207,41 @@ async def add_datasource( """ Add datasource to agent - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str + + datasource_id : str - - datasource_id: str. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + AppModelsResponseAgent + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.agent.add_datasource( + agent_id="string", + datasource_id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin( + method="POST", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}/datasources" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder({"datasourceId": datasource_id}) if request_options is None or request_options.get("additional_body_parameters") is None @@ -1577,14 +2259,16 @@ async def add_datasource( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -1597,21 +2281,42 @@ async def remove_datasource( """ Remove datasource from agent - Parameters: - - agent_id: str. + Parameters + ---------- + agent_id : str + + datasource_id : str - - datasource_id: str. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + typing.Any + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.agent.remove_datasource( + agent_id="string", + datasource_id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin( + method="DELETE", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{jsonable_encoder(agent_id)}/datasources/{jsonable_encoder(datasource_id)}", ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -1623,14 +2328,16 @@ async def remove_datasource( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: diff --git a/src/superagent/resources/api_key/client.py b/src/superagent/resources/api_key/client.py index 75410e7..c4c7a2a 100644 --- a/src/superagent/resources/api_key/client.py +++ b/src/superagent/resources/api_key/client.py @@ -7,6 +7,8 @@ from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import pydantic_v1 +from ...core.query_encoder import encode_query from ...core.remove_none_from_dict import remove_none_from_dict from ...core.request_options import RequestOptions from ...errors.unprocessable_entity_error import UnprocessableEntityError @@ -16,11 +18,6 @@ from ...types.app_models_response_api_key import AppModelsResponseApiKey from ...types.http_validation_error import HttpValidationError -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -33,14 +30,32 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> Ap """ List API keys - Parameters: - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + ApiKeyList + Successful Response + + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.api_key.list() """ _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-keys"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="GET", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-keys"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -52,12 +67,12 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> Ap ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ApiKeyList, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(ApiKeyList, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: @@ -70,16 +85,39 @@ def create( """ Create a new API key - Parameters: - - request: AppModelsRequestApiKey. + Parameters + ---------- + request : AppModelsRequestApiKey + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + ApiKeyCreate + Successful Response + + Examples + -------- + from superagent import AppModelsRequestApiKey + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.api_key.create( + request=AppModelsRequestApiKey( + name="string", + ), + ) """ _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-keys"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="POST", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-keys"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -97,14 +135,16 @@ def create( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ApiKeyCreate, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(ApiKeyCreate, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -115,16 +155,38 @@ def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = """ Delete an API key - Parameters: - - id: str. + Parameters + ---------- + id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + AppModelsResponseApiKey + Successful Response + + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.api_key.delete( + id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/api-keys/{jsonable_encoder(id)}"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="DELETE", + url=urllib.parse.urljoin( + f"{self._client_wrapper.get_base_url()}/", f"api/v1/api-keys/{jsonable_encoder(id)}" + ), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -136,14 +198,16 @@ def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseApiKey, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseApiKey, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -156,18 +220,44 @@ def update( """ Update an API key - Parameters: - - id: str. + Parameters + ---------- + id : str + + request : AppModelsRequestApiKey - - request: AppModelsRequestApiKey. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + AppModelsResponseApiKey + Successful Response + + Examples + -------- + from superagent import AppModelsRequestApiKey + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.api_key.update( + id="string", + request=AppModelsRequestApiKey( + name="string", + ), + ) """ _response = self._client_wrapper.httpx_client.request( - "PATCH", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/api-keys/{jsonable_encoder(id)}"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="PATCH", + url=urllib.parse.urljoin( + f"{self._client_wrapper.get_base_url()}/", f"api/v1/api-keys/{jsonable_encoder(id)}" + ), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -185,14 +275,16 @@ def update( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseApiKey, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseApiKey, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -208,14 +300,32 @@ async def list(self, *, request_options: typing.Optional[RequestOptions] = None) """ List API keys - Parameters: - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + ApiKeyList + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.api_key.list() """ _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-keys"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="GET", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-keys"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -227,12 +337,12 @@ async def list(self, *, request_options: typing.Optional[RequestOptions] = None) ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ApiKeyList, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(ApiKeyList, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: @@ -245,16 +355,39 @@ async def create( """ Create a new API key - Parameters: - - request: AppModelsRequestApiKey. + Parameters + ---------- + request : AppModelsRequestApiKey + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + ApiKeyCreate + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent import AppModelsRequestApiKey + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.api_key.create( + request=AppModelsRequestApiKey( + name="string", + ), + ) """ _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-keys"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="POST", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-keys"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -272,14 +405,16 @@ async def create( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ApiKeyCreate, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(ApiKeyCreate, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -292,16 +427,38 @@ async def delete( """ Delete an API key - Parameters: - - id: str. + Parameters + ---------- + id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseApiKey + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.api_key.delete( + id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/api-keys/{jsonable_encoder(id)}"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="DELETE", + url=urllib.parse.urljoin( + f"{self._client_wrapper.get_base_url()}/", f"api/v1/api-keys/{jsonable_encoder(id)}" + ), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -313,14 +470,16 @@ async def delete( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseApiKey, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseApiKey, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -333,18 +492,44 @@ async def update( """ Update an API key - Parameters: - - id: str. + Parameters + ---------- + id : str + + request : AppModelsRequestApiKey + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request: AppModelsRequestApiKey. + Returns + ------- + AppModelsResponseApiKey + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent import AppModelsRequestApiKey + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.api_key.update( + id="string", + request=AppModelsRequestApiKey( + name="string", + ), + ) """ _response = await self._client_wrapper.httpx_client.request( - "PATCH", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/api-keys/{jsonable_encoder(id)}"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="PATCH", + url=urllib.parse.urljoin( + f"{self._client_wrapper.get_base_url()}/", f"api/v1/api-keys/{jsonable_encoder(id)}" + ), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -362,14 +547,16 @@ async def update( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseApiKey, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseApiKey, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: diff --git a/src/superagent/resources/api_user/client.py b/src/superagent/resources/api_user/client.py index 1ececaa..8db06e1 100644 --- a/src/superagent/resources/api_user/client.py +++ b/src/superagent/resources/api_user/client.py @@ -7,6 +7,8 @@ from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import pydantic_v1 +from ...core.query_encoder import encode_query from ...core.remove_none_from_dict import remove_none_from_dict from ...core.request_options import RequestOptions from ...errors.unprocessable_entity_error import UnprocessableEntityError @@ -14,11 +16,6 @@ from ...types.app_models_response_api_user import AppModelsResponseApiUser from ...types.http_validation_error import HttpValidationError -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -33,16 +30,43 @@ def create( """ Create a new API user - Parameters: - - request: AppModelsRequestApiUser. + Parameters + ---------- + request : AppModelsRequestApiUser + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseApiUser + Successful Response + + Examples + -------- + from superagent import AppModelsRequestApiUser + from superagent.client import Superagent - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + client = Superagent( + token="YOUR_TOKEN", + ) + client.api_user.create( + request=AppModelsRequestApiUser( + email="string", + first_name="string", + last_name="string", + company="string", + anonymous_id="string", + ), + ) """ _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-users"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="POST", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-users"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -60,14 +84,16 @@ def create( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseApiUser, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseApiUser, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -78,14 +104,32 @@ def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> App """ Get a single api user - Parameters: - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseApiUser + Successful Response + + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.api_user.get() """ _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-users/me"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="GET", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-users/me"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -97,12 +141,12 @@ def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> App ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseApiUser, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseApiUser, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: @@ -113,14 +157,32 @@ def delete(self, *, request_options: typing.Optional[RequestOptions] = None) -> """ Delete an api user - Parameters: - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.Any + Successful Response + + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.api_user.delete() """ _response = self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-users/me"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="DELETE", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-users/me"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -132,12 +194,12 @@ def delete(self, *, request_options: typing.Optional[RequestOptions] = None) -> ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: @@ -150,16 +212,43 @@ def indentify( """ Indentify an api user - Parameters: - - request: AppModelsRequestApiUser. + Parameters + ---------- + request : AppModelsRequestApiUser + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + typing.Any + Successful Response + + Examples + -------- + from superagent import AppModelsRequestApiUser + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.api_user.indentify( + request=AppModelsRequestApiUser( + email="string", + first_name="string", + last_name="string", + company="string", + anonymous_id="string", + ), + ) """ _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-users/identify"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="POST", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-users/identify"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -177,14 +266,16 @@ def indentify( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -202,16 +293,43 @@ async def create( """ Create a new API user - Parameters: - - request: AppModelsRequestApiUser. + Parameters + ---------- + request : AppModelsRequestApiUser - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseApiUser + Successful Response + + Examples + -------- + from superagent import AppModelsRequestApiUser + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.api_user.create( + request=AppModelsRequestApiUser( + email="string", + first_name="string", + last_name="string", + company="string", + anonymous_id="string", + ), + ) """ _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-users"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="POST", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-users"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -229,14 +347,16 @@ async def create( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseApiUser, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseApiUser, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -247,14 +367,32 @@ async def get(self, *, request_options: typing.Optional[RequestOptions] = None) """ Get a single api user - Parameters: - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseApiUser + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.api_user.get() """ _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-users/me"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="GET", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-users/me"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -266,12 +404,12 @@ async def get(self, *, request_options: typing.Optional[RequestOptions] = None) ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseApiUser, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseApiUser, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: @@ -282,14 +420,32 @@ async def delete(self, *, request_options: typing.Optional[RequestOptions] = Non """ Delete an api user - Parameters: - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.Any + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.api_user.delete() """ _response = await self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-users/me"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="DELETE", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-users/me"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -301,12 +457,12 @@ async def delete(self, *, request_options: typing.Optional[RequestOptions] = Non ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: @@ -319,16 +475,43 @@ async def indentify( """ Indentify an api user - Parameters: - - request: AppModelsRequestApiUser. + Parameters + ---------- + request : AppModelsRequestApiUser + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + typing.Any + Successful Response + + Examples + -------- + from superagent import AppModelsRequestApiUser + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.api_user.indentify( + request=AppModelsRequestApiUser( + email="string", + first_name="string", + last_name="string", + company="string", + anonymous_id="string", + ), + ) """ _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-users/identify"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="POST", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-users/identify"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -346,14 +529,16 @@ async def indentify( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: diff --git a/src/superagent/resources/datasource/client.py b/src/superagent/resources/datasource/client.py index f13a498..58d969a 100644 --- a/src/superagent/resources/datasource/client.py +++ b/src/superagent/resources/datasource/client.py @@ -7,6 +7,8 @@ from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import pydantic_v1 +from ...core.query_encoder import encode_query from ...core.remove_none_from_dict import remove_none_from_dict from ...core.request_options import RequestOptions from ...errors.unprocessable_entity_error import UnprocessableEntityError @@ -15,11 +17,6 @@ from ...types.datasource_list import DatasourceList from ...types.http_validation_error import HttpValidationError -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -38,27 +35,48 @@ def list( """ List all datasources - Parameters: - - skip: typing.Optional[int]. + Parameters + ---------- + skip : typing.Optional[int] + + take : typing.Optional[int] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - take: typing.Optional[int]. + Returns + ------- + DatasourceList + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.datasource.list( + skip=1, + take=1, + ) """ _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/datasources"), - params=jsonable_encoder( - remove_none_from_dict( - { - "skip": skip, - "take": take, - **( - request_options.get("additional_query_parameters", {}) - if request_options is not None - else {} - ), - } + method="GET", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/datasources"), + params=encode_query( + jsonable_encoder( + remove_none_from_dict( + { + "skip": skip, + "take": take, + **( + request_options.get("additional_query_parameters", {}) + if request_options is not None + else {} + ), + } + ) ) ), headers=jsonable_encoder( @@ -71,14 +89,16 @@ def list( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(DatasourceList, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(DatasourceList, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -91,16 +111,46 @@ def create( """ Create a new datasource - Parameters: - - request: AppModelsRequestDatasource. + Parameters + ---------- + request : AppModelsRequestDatasource - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseDatasource + Successful Response + + Examples + -------- + from superagent import AppModelsRequestDatasource, EmbeddingsModelProvider + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.datasource.create( + request=AppModelsRequestDatasource( + name="string", + description="string", + type="string", + content="string", + url="string", + metadata={"string": {"key": "value"}}, + vector_db_id="string", + embeddings_model_provider=EmbeddingsModelProvider.OPENAI, + ), + ) """ _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/datasources"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="POST", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/datasources"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -118,14 +168,16 @@ def create( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseDatasource, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseDatasource, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -138,18 +190,38 @@ def get( """ Get a specific datasource - Parameters: - - datasource_id: str. + Parameters + ---------- + datasource_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseDatasource + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.datasource.get( + datasource_id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin( + method="GET", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/datasources/{jsonable_encoder(datasource_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -161,14 +233,16 @@ def get( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseDatasource, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseDatasource, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -179,18 +253,38 @@ def delete(self, datasource_id: str, *, request_options: typing.Optional[Request """ Delete a specific datasource - Parameters: - - datasource_id: str. + Parameters + ---------- + datasource_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + typing.Any + Successful Response + + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.datasource.delete( + datasource_id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin( + method="DELETE", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/datasources/{jsonable_encoder(datasource_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -202,14 +296,16 @@ def delete(self, datasource_id: str, *, request_options: typing.Optional[Request ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -226,20 +322,51 @@ def update( """ Update a specific datasource - Parameters: - - datasource_id: str. + Parameters + ---------- + datasource_id : str + + request : AppModelsRequestDatasource + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseDatasource + Successful Response - - request: AppModelsRequestDatasource. + Examples + -------- + from superagent import AppModelsRequestDatasource, EmbeddingsModelProvider + from superagent.client import Superagent - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + client = Superagent( + token="YOUR_TOKEN", + ) + client.datasource.update( + datasource_id="string", + request=AppModelsRequestDatasource( + name="string", + description="string", + type="string", + content="string", + url="string", + metadata={"string": {"key": "value"}}, + vector_db_id="string", + embeddings_model_provider=EmbeddingsModelProvider.OPENAI, + ), + ) """ _response = self._client_wrapper.httpx_client.request( - "PATCH", - urllib.parse.urljoin( + method="PATCH", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/datasources/{jsonable_encoder(datasource_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -257,14 +384,16 @@ def update( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseDatasource, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseDatasource, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -286,27 +415,48 @@ async def list( """ List all datasources - Parameters: - - skip: typing.Optional[int]. + Parameters + ---------- + skip : typing.Optional[int] - - take: typing.Optional[int]. + take : typing.Optional[int] - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DatasourceList + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.datasource.list( + skip=1, + take=1, + ) """ _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/datasources"), - params=jsonable_encoder( - remove_none_from_dict( - { - "skip": skip, - "take": take, - **( - request_options.get("additional_query_parameters", {}) - if request_options is not None - else {} - ), - } + method="GET", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/datasources"), + params=encode_query( + jsonable_encoder( + remove_none_from_dict( + { + "skip": skip, + "take": take, + **( + request_options.get("additional_query_parameters", {}) + if request_options is not None + else {} + ), + } + ) ) ), headers=jsonable_encoder( @@ -319,14 +469,16 @@ async def list( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(DatasourceList, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(DatasourceList, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -339,16 +491,46 @@ async def create( """ Create a new datasource - Parameters: - - request: AppModelsRequestDatasource. + Parameters + ---------- + request : AppModelsRequestDatasource + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + AppModelsResponseDatasource + Successful Response + + Examples + -------- + from superagent import AppModelsRequestDatasource, EmbeddingsModelProvider + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.datasource.create( + request=AppModelsRequestDatasource( + name="string", + description="string", + type="string", + content="string", + url="string", + metadata={"string": {"key": "value"}}, + vector_db_id="string", + embeddings_model_provider=EmbeddingsModelProvider.OPENAI, + ), + ) """ _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/datasources"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="POST", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/datasources"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -366,14 +548,16 @@ async def create( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseDatasource, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseDatasource, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -386,18 +570,38 @@ async def get( """ Get a specific datasource - Parameters: - - datasource_id: str. + Parameters + ---------- + datasource_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseDatasource + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.datasource.get( + datasource_id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin( + method="GET", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/datasources/{jsonable_encoder(datasource_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -409,14 +613,16 @@ async def get( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseDatasource, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseDatasource, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -429,18 +635,38 @@ async def delete( """ Delete a specific datasource - Parameters: - - datasource_id: str. + Parameters + ---------- + datasource_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + typing.Any + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.datasource.delete( + datasource_id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin( + method="DELETE", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/datasources/{jsonable_encoder(datasource_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -452,14 +678,16 @@ async def delete( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -476,20 +704,51 @@ async def update( """ Update a specific datasource - Parameters: - - datasource_id: str. + Parameters + ---------- + datasource_id : str + + request : AppModelsRequestDatasource + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseDatasource + Successful Response - - request: AppModelsRequestDatasource. + Examples + -------- + from superagent import AppModelsRequestDatasource, EmbeddingsModelProvider + from superagent.client import AsyncSuperagent - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.datasource.update( + datasource_id="string", + request=AppModelsRequestDatasource( + name="string", + description="string", + type="string", + content="string", + url="string", + metadata={"string": {"key": "value"}}, + vector_db_id="string", + embeddings_model_provider=EmbeddingsModelProvider.OPENAI, + ), + ) """ _response = await self._client_wrapper.httpx_client.request( - "PATCH", - urllib.parse.urljoin( + method="PATCH", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/datasources/{jsonable_encoder(datasource_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -507,14 +766,16 @@ async def update( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseDatasource, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseDatasource, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: diff --git a/src/superagent/resources/llm/client.py b/src/superagent/resources/llm/client.py index a460274..44eb370 100644 --- a/src/superagent/resources/llm/client.py +++ b/src/superagent/resources/llm/client.py @@ -7,6 +7,8 @@ from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import pydantic_v1 +from ...core.query_encoder import encode_query from ...core.remove_none_from_dict import remove_none_from_dict from ...core.request_options import RequestOptions from ...errors.unprocessable_entity_error import UnprocessableEntityError @@ -15,11 +17,6 @@ from ...types.http_validation_error import HttpValidationError from ...types.llm_list import LlmList -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -32,14 +29,32 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> Ll """ List all LLMs - Parameters: - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + LlmList + Successful Response + + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.llm.list() """ _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/llms"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="GET", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/llms"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -51,12 +66,12 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> Ll ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(LlmList, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(LlmList, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: @@ -69,16 +84,41 @@ def create( """ Create a new LLM - Parameters: - - request: AppModelsRequestLlm. + Parameters + ---------- + request : AppModelsRequestLlm + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + AppModelsResponseLlm + Successful Response + + Examples + -------- + from superagent import AppModelsRequestLlm + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.llm.create( + request=AppModelsRequestLlm( + provider="string", + api_key="string", + options={"string": {"key": "value"}}, + ), + ) """ _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/llms"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="POST", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/llms"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -96,14 +136,16 @@ def create( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseLlm, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseLlm, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -114,16 +156,38 @@ def get(self, llm_id: str, *, request_options: typing.Optional[RequestOptions] = """ Get a single LLM - Parameters: - - llm_id: str. + Parameters + ---------- + llm_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + AppModelsResponseLlm + Successful Response + + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.llm.get( + llm_id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/llms/{jsonable_encoder(llm_id)}"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="GET", + url=urllib.parse.urljoin( + f"{self._client_wrapper.get_base_url()}/", f"api/v1/llms/{jsonable_encoder(llm_id)}" + ), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -135,14 +199,16 @@ def get(self, llm_id: str, *, request_options: typing.Optional[RequestOptions] = ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseLlm, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseLlm, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -155,18 +221,46 @@ def update( """ Patch an LLM - Parameters: - - llm_id: str. + Parameters + ---------- + llm_id : str + + request : AppModelsRequestLlm - - request: AppModelsRequestLlm. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + AppModelsResponseLlm + Successful Response + + Examples + -------- + from superagent import AppModelsRequestLlm + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.llm.update( + llm_id="string", + request=AppModelsRequestLlm( + provider="string", + api_key="string", + options={"string": {"key": "value"}}, + ), + ) """ _response = self._client_wrapper.httpx_client.request( - "PATCH", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/llms/{jsonable_encoder(llm_id)}"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="PATCH", + url=urllib.parse.urljoin( + f"{self._client_wrapper.get_base_url()}/", f"api/v1/llms/{jsonable_encoder(llm_id)}" + ), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -184,14 +278,16 @@ def update( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseLlm, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseLlm, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -207,14 +303,32 @@ async def list(self, *, request_options: typing.Optional[RequestOptions] = None) """ List all LLMs - Parameters: - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + LlmList + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.llm.list() """ _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/llms"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="GET", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/llms"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -226,12 +340,12 @@ async def list(self, *, request_options: typing.Optional[RequestOptions] = None) ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(LlmList, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(LlmList, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: @@ -244,16 +358,41 @@ async def create( """ Create a new LLM - Parameters: - - request: AppModelsRequestLlm. + Parameters + ---------- + request : AppModelsRequestLlm + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseLlm + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent import AppModelsRequestLlm + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.llm.create( + request=AppModelsRequestLlm( + provider="string", + api_key="string", + options={"string": {"key": "value"}}, + ), + ) """ _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/llms"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="POST", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/llms"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -271,14 +410,16 @@ async def create( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseLlm, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseLlm, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -291,16 +432,38 @@ async def get( """ Get a single LLM - Parameters: - - llm_id: str. + Parameters + ---------- + llm_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseLlm + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.llm.get( + llm_id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/llms/{jsonable_encoder(llm_id)}"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="GET", + url=urllib.parse.urljoin( + f"{self._client_wrapper.get_base_url()}/", f"api/v1/llms/{jsonable_encoder(llm_id)}" + ), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -312,14 +475,16 @@ async def get( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseLlm, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseLlm, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -332,18 +497,46 @@ async def update( """ Patch an LLM - Parameters: - - llm_id: str. + Parameters + ---------- + llm_id : str + + request : AppModelsRequestLlm + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request: AppModelsRequestLlm. + Returns + ------- + AppModelsResponseLlm + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent import AppModelsRequestLlm + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.llm.update( + llm_id="string", + request=AppModelsRequestLlm( + provider="string", + api_key="string", + options={"string": {"key": "value"}}, + ), + ) """ _response = await self._client_wrapper.httpx_client.request( - "PATCH", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/llms/{jsonable_encoder(llm_id)}"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="PATCH", + url=urllib.parse.urljoin( + f"{self._client_wrapper.get_base_url()}/", f"api/v1/llms/{jsonable_encoder(llm_id)}" + ), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -361,14 +554,16 @@ async def update( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseLlm, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseLlm, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: diff --git a/src/superagent/resources/tool/client.py b/src/superagent/resources/tool/client.py index 69a444a..ba909d6 100644 --- a/src/superagent/resources/tool/client.py +++ b/src/superagent/resources/tool/client.py @@ -7,6 +7,8 @@ from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import pydantic_v1 +from ...core.query_encoder import encode_query from ...core.remove_none_from_dict import remove_none_from_dict from ...core.request_options import RequestOptions from ...errors.unprocessable_entity_error import UnprocessableEntityError @@ -14,11 +16,6 @@ from ...types.http_validation_error import HttpValidationError from ...types.tool_list import ToolList -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -37,27 +34,48 @@ def list( """ List all tools - Parameters: - - skip: typing.Optional[int]. + Parameters + ---------- + skip : typing.Optional[int] + + take : typing.Optional[int] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - take: typing.Optional[int]. + Returns + ------- + ToolList + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.tool.list( + skip=1, + take=1, + ) """ _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/tools"), - params=jsonable_encoder( - remove_none_from_dict( - { - "skip": skip, - "take": take, - **( - request_options.get("additional_query_parameters", {}) - if request_options is not None - else {} - ), - } + method="GET", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/tools"), + params=encode_query( + jsonable_encoder( + remove_none_from_dict( + { + "skip": skip, + "take": take, + **( + request_options.get("additional_query_parameters", {}) + if request_options is not None + else {} + ), + } + ) ) ), headers=jsonable_encoder( @@ -70,14 +88,16 @@ def list( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ToolList, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(ToolList, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -88,8 +108,8 @@ def create( self, *, name: str, - description: typing.Optional[str] = OMIT, type: str, + description: typing.Optional[str] = OMIT, metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, return_direct: typing.Optional[bool] = OMIT, request_options: typing.Optional[RequestOptions] = None, @@ -97,18 +117,40 @@ def create( """ Create a new tool - Parameters: - - name: str. + Parameters + ---------- + name : str + + type : str + + description : typing.Optional[str] + + metadata : typing.Optional[typing.Dict[str, typing.Any]] - - description: typing.Optional[str]. + return_direct : typing.Optional[bool] - - type: str. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - metadata: typing.Optional[typing.Dict[str, typing.Any]]. + Returns + ------- + AppModelsResponseTool + Successful Response - - return_direct: typing.Optional[bool]. + Examples + -------- + from superagent.client import Superagent - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + client = Superagent( + token="YOUR_TOKEN", + ) + client.tool.create( + name="string", + description="string", + type="string", + metadata={"string": {"key": "value"}}, + return_direct=True, + ) """ _request: typing.Dict[str, typing.Any] = {"name": name, "type": type} if description is not OMIT: @@ -118,10 +160,12 @@ def create( if return_direct is not OMIT: _request["returnDirect"] = return_direct _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/tools"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="POST", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/tools"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(_request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -139,14 +183,16 @@ def create( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseTool, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseTool, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -157,18 +203,38 @@ def get(self, tool_id: str, *, request_options: typing.Optional[RequestOptions] """ Get a specific tool - Parameters: - - tool_id: str. + Parameters + ---------- + tool_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + AppModelsResponseTool + Successful Response + + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.tool.get( + tool_id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin( + method="GET", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/tools/{jsonable_encoder(tool_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -180,14 +246,16 @@ def get(self, tool_id: str, *, request_options: typing.Optional[RequestOptions] ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseTool, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseTool, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -198,18 +266,38 @@ def delete(self, tool_id: str, *, request_options: typing.Optional[RequestOption """ Delete a specific tool - Parameters: - - tool_id: str. + Parameters + ---------- + tool_id : str - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.Any + Successful Response + + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.tool.delete( + tool_id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin( + method="DELETE", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/tools/{jsonable_encoder(tool_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -221,14 +309,16 @@ def delete(self, tool_id: str, *, request_options: typing.Optional[RequestOption ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -249,20 +339,43 @@ def update( """ Update a specific tool - Parameters: - - tool_id: str. + Parameters + ---------- + tool_id : str + + name : typing.Optional[str] + + description : typing.Optional[str] - - name: typing.Optional[str]. + type : typing.Optional[str] - - description: typing.Optional[str]. + metadata : typing.Optional[typing.Dict[str, typing.Any]] - - type: typing.Optional[str]. + return_direct : typing.Optional[bool] - - metadata: typing.Optional[typing.Dict[str, typing.Any]]. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - return_direct: typing.Optional[bool]. + Returns + ------- + AppModelsResponseTool + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.tool.update( + tool_id="string", + name="string", + description="string", + type="string", + metadata={"string": {"key": "value"}}, + return_direct=True, + ) """ _request: typing.Dict[str, typing.Any] = {} if name is not OMIT: @@ -276,12 +389,14 @@ def update( if return_direct is not OMIT: _request["returnDirect"] = return_direct _response = self._client_wrapper.httpx_client.request( - "PATCH", - urllib.parse.urljoin( + method="PATCH", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/tools/{jsonable_encoder(tool_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(_request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -299,14 +414,16 @@ def update( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseTool, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseTool, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -328,27 +445,48 @@ async def list( """ List all tools - Parameters: - - skip: typing.Optional[int]. + Parameters + ---------- + skip : typing.Optional[int] - - take: typing.Optional[int]. + take : typing.Optional[int] - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + ToolList + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.tool.list( + skip=1, + take=1, + ) """ _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/tools"), - params=jsonable_encoder( - remove_none_from_dict( - { - "skip": skip, - "take": take, - **( - request_options.get("additional_query_parameters", {}) - if request_options is not None - else {} - ), - } + method="GET", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/tools"), + params=encode_query( + jsonable_encoder( + remove_none_from_dict( + { + "skip": skip, + "take": take, + **( + request_options.get("additional_query_parameters", {}) + if request_options is not None + else {} + ), + } + ) ) ), headers=jsonable_encoder( @@ -361,14 +499,16 @@ async def list( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ToolList, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(ToolList, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -379,8 +519,8 @@ async def create( self, *, name: str, - description: typing.Optional[str] = OMIT, type: str, + description: typing.Optional[str] = OMIT, metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, return_direct: typing.Optional[bool] = OMIT, request_options: typing.Optional[RequestOptions] = None, @@ -388,18 +528,40 @@ async def create( """ Create a new tool - Parameters: - - name: str. + Parameters + ---------- + name : str + + type : str + + description : typing.Optional[str] + + metadata : typing.Optional[typing.Dict[str, typing.Any]] - - description: typing.Optional[str]. + return_direct : typing.Optional[bool] - - type: str. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - metadata: typing.Optional[typing.Dict[str, typing.Any]]. + Returns + ------- + AppModelsResponseTool + Successful Response - - return_direct: typing.Optional[bool]. + Examples + -------- + from superagent.client import AsyncSuperagent - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.tool.create( + name="string", + description="string", + type="string", + metadata={"string": {"key": "value"}}, + return_direct=True, + ) """ _request: typing.Dict[str, typing.Any] = {"name": name, "type": type} if description is not OMIT: @@ -409,10 +571,12 @@ async def create( if return_direct is not OMIT: _request["returnDirect"] = return_direct _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/tools"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="POST", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/tools"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(_request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -430,14 +594,16 @@ async def create( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseTool, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseTool, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -450,18 +616,38 @@ async def get( """ Get a specific tool - Parameters: - - tool_id: str. + Parameters + ---------- + tool_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + AppModelsResponseTool + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.tool.get( + tool_id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin( + method="GET", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/tools/{jsonable_encoder(tool_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -473,14 +659,16 @@ async def get( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseTool, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseTool, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -491,18 +679,38 @@ async def delete(self, tool_id: str, *, request_options: typing.Optional[Request """ Delete a specific tool - Parameters: - - tool_id: str. + Parameters + ---------- + tool_id : str - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.Any + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.tool.delete( + tool_id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin( + method="DELETE", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/tools/{jsonable_encoder(tool_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -514,14 +722,16 @@ async def delete(self, tool_id: str, *, request_options: typing.Optional[Request ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -542,20 +752,43 @@ async def update( """ Update a specific tool - Parameters: - - tool_id: str. + Parameters + ---------- + tool_id : str + + name : typing.Optional[str] + + description : typing.Optional[str] - - name: typing.Optional[str]. + type : typing.Optional[str] - - description: typing.Optional[str]. + metadata : typing.Optional[typing.Dict[str, typing.Any]] - - type: typing.Optional[str]. + return_direct : typing.Optional[bool] - - metadata: typing.Optional[typing.Dict[str, typing.Any]]. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - return_direct: typing.Optional[bool]. + Returns + ------- + AppModelsResponseTool + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.tool.update( + tool_id="string", + name="string", + description="string", + type="string", + metadata={"string": {"key": "value"}}, + return_direct=True, + ) """ _request: typing.Dict[str, typing.Any] = {} if name is not OMIT: @@ -569,12 +802,14 @@ async def update( if return_direct is not OMIT: _request["returnDirect"] = return_direct _response = await self._client_wrapper.httpx_client.request( - "PATCH", - urllib.parse.urljoin( + method="PATCH", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/tools/{jsonable_encoder(tool_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(_request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -592,14 +827,16 @@ async def update( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseTool, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseTool, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: diff --git a/src/superagent/resources/vector_database/client.py b/src/superagent/resources/vector_database/client.py index 7bb085e..1364355 100644 --- a/src/superagent/resources/vector_database/client.py +++ b/src/superagent/resources/vector_database/client.py @@ -7,6 +7,8 @@ from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import pydantic_v1 +from ...core.query_encoder import encode_query from ...core.remove_none_from_dict import remove_none_from_dict from ...core.request_options import RequestOptions from ...errors.unprocessable_entity_error import UnprocessableEntityError @@ -15,11 +17,6 @@ from ...types.http_validation_error import HttpValidationError from ...types.vector_db_list import VectorDbList -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -34,16 +31,40 @@ def create( """ Create a new Vector Database - Parameters: - - request: AppModelsRequestVectorDb. + Parameters + ---------- + request : AppModelsRequestVectorDb + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseVectorDb + Successful Response + + Examples + -------- + from superagent import AppModelsRequestVectorDb, VectorDbProvider + from superagent.client import Superagent - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + client = Superagent( + token="YOUR_TOKEN", + ) + client.vector_database.create( + request=AppModelsRequestVectorDb( + provider=VectorDbProvider.PINECONE, + options={"string": {"key": "value"}}, + ), + ) """ _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/vector-db"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="POST", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/vector-db"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -61,14 +82,16 @@ def create( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseVectorDb, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseVectorDb, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -79,14 +102,32 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> Ve """ List all Vector Databases - Parameters: - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + VectorDbList + Successful Response + + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.vector_database.list() """ _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/vector-dbs"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="GET", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/vector-dbs"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -98,12 +139,12 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> Ve ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(VectorDbList, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(VectorDbList, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: @@ -116,18 +157,38 @@ def get( """ Get a single Vector Database - Parameters: - - vector_db_id: str. + Parameters + ---------- + vector_db_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseVectorDb + Successful Response + + Examples + -------- + from superagent.client import Superagent - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + client = Superagent( + token="YOUR_TOKEN", + ) + client.vector_database.get( + vector_db_id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin( + method="GET", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/vector-dbs/{jsonable_encoder(vector_db_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -139,14 +200,16 @@ def get( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseVectorDb, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseVectorDb, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -157,18 +220,38 @@ def delete(self, vector_db_id: str, *, request_options: typing.Optional[RequestO """ Delete a Vector Database - Parameters: - - vector_db_id: str. + Parameters + ---------- + vector_db_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + typing.Any + Successful Response + + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.vector_database.delete( + vector_db_id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin( + method="DELETE", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/vector-dbs/{jsonable_encoder(vector_db_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -180,14 +263,16 @@ def delete(self, vector_db_id: str, *, request_options: typing.Optional[RequestO ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -204,20 +289,45 @@ def update( """ Patch a Vector Database - Parameters: - - vector_db_id: str. + Parameters + ---------- + vector_db_id : str + + request : AppModelsRequestVectorDb + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseVectorDb + Successful Response - - request: AppModelsRequestVectorDb. + Examples + -------- + from superagent import AppModelsRequestVectorDb, VectorDbProvider + from superagent.client import Superagent - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + client = Superagent( + token="YOUR_TOKEN", + ) + client.vector_database.update( + vector_db_id="string", + request=AppModelsRequestVectorDb( + provider=VectorDbProvider.PINECONE, + options={"string": {"key": "value"}}, + ), + ) """ _response = self._client_wrapper.httpx_client.request( - "PATCH", - urllib.parse.urljoin( + method="PATCH", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/vector-dbs/{jsonable_encoder(vector_db_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -235,14 +345,16 @@ def update( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseVectorDb, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseVectorDb, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -260,16 +372,40 @@ async def create( """ Create a new Vector Database - Parameters: - - request: AppModelsRequestVectorDb. + Parameters + ---------- + request : AppModelsRequestVectorDb - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseVectorDb + Successful Response + + Examples + -------- + from superagent import AppModelsRequestVectorDb, VectorDbProvider + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.vector_database.create( + request=AppModelsRequestVectorDb( + provider=VectorDbProvider.PINECONE, + options={"string": {"key": "value"}}, + ), + ) """ _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/vector-db"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="POST", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/vector-db"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -287,14 +423,16 @@ async def create( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseVectorDb, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseVectorDb, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -305,14 +443,32 @@ async def list(self, *, request_options: typing.Optional[RequestOptions] = None) """ List all Vector Databases - Parameters: - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + VectorDbList + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.vector_database.list() """ _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/vector-dbs"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="GET", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/vector-dbs"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -324,12 +480,12 @@ async def list(self, *, request_options: typing.Optional[RequestOptions] = None) ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(VectorDbList, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(VectorDbList, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: @@ -342,18 +498,38 @@ async def get( """ Get a single Vector Database - Parameters: - - vector_db_id: str. + Parameters + ---------- + vector_db_id : str - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseVectorDb + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.vector_database.get( + vector_db_id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin( + method="GET", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/vector-dbs/{jsonable_encoder(vector_db_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -365,14 +541,16 @@ async def get( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseVectorDb, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseVectorDb, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -383,18 +561,38 @@ async def delete(self, vector_db_id: str, *, request_options: typing.Optional[Re """ Delete a Vector Database - Parameters: - - vector_db_id: str. + Parameters + ---------- + vector_db_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.Any + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.vector_database.delete( + vector_db_id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin( + method="DELETE", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/vector-dbs/{jsonable_encoder(vector_db_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -406,14 +604,16 @@ async def delete(self, vector_db_id: str, *, request_options: typing.Optional[Re ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -430,20 +630,45 @@ async def update( """ Patch a Vector Database - Parameters: - - vector_db_id: str. + Parameters + ---------- + vector_db_id : str - - request: AppModelsRequestVectorDb. + request : AppModelsRequestVectorDb - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseVectorDb + Successful Response + + Examples + -------- + from superagent import AppModelsRequestVectorDb, VectorDbProvider + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.vector_database.update( + vector_db_id="string", + request=AppModelsRequestVectorDb( + provider=VectorDbProvider.PINECONE, + options={"string": {"key": "value"}}, + ), + ) """ _response = await self._client_wrapper.httpx_client.request( - "PATCH", - urllib.parse.urljoin( + method="PATCH", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/vector-dbs/{jsonable_encoder(vector_db_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -461,14 +686,16 @@ async def update( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseVectorDb, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseVectorDb, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: diff --git a/src/superagent/resources/workflow/client.py b/src/superagent/resources/workflow/client.py index fdfc5c9..3201b35 100644 --- a/src/superagent/resources/workflow/client.py +++ b/src/superagent/resources/workflow/client.py @@ -7,6 +7,8 @@ from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import pydantic_v1 +from ...core.query_encoder import encode_query from ...core.remove_none_from_dict import remove_none_from_dict from ...core.request_options import RequestOptions from ...errors.unprocessable_entity_error import UnprocessableEntityError @@ -18,11 +20,6 @@ from ...types.workflow_list import WorkflowList from ...types.workflow_step_list import WorkflowStepList -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -41,27 +38,48 @@ def list( """ List all workflows - Parameters: - - skip: typing.Optional[int]. + Parameters + ---------- + skip : typing.Optional[int] + + take : typing.Optional[int] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + WorkflowList + Successful Response - - take: typing.Optional[int]. + Examples + -------- + from superagent.client import Superagent - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + client = Superagent( + token="YOUR_TOKEN", + ) + client.workflow.list( + skip=1, + take=1, + ) """ _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/workflows"), - params=jsonable_encoder( - remove_none_from_dict( - { - "skip": skip, - "take": take, - **( - request_options.get("additional_query_parameters", {}) - if request_options is not None - else {} - ), - } + method="GET", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/workflows"), + params=encode_query( + jsonable_encoder( + remove_none_from_dict( + { + "skip": skip, + "take": take, + **( + request_options.get("additional_query_parameters", {}) + if request_options is not None + else {} + ), + } + ) ) ), headers=jsonable_encoder( @@ -74,14 +92,16 @@ def list( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(WorkflowList, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(WorkflowList, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -94,16 +114,40 @@ def create( """ Create a new workflow - Parameters: - - request: AppModelsRequestWorkflow. + Parameters + ---------- + request : AppModelsRequestWorkflow + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseWorkflow + Successful Response + + Examples + -------- + from superagent import AppModelsRequestWorkflow + from superagent.client import Superagent - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + client = Superagent( + token="YOUR_TOKEN", + ) + client.workflow.create( + request=AppModelsRequestWorkflow( + name="string", + description="string", + ), + ) """ _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/workflows"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="POST", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/workflows"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -121,14 +165,16 @@ def create( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseWorkflow, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseWorkflow, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -141,18 +187,38 @@ def get( """ Get a single workflow - Parameters: - - workflow_id: str. + Parameters + ---------- + workflow_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseWorkflow + Successful Response + + Examples + -------- + from superagent.client import Superagent - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + client = Superagent( + token="YOUR_TOKEN", + ) + client.workflow.get( + workflow_id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin( + method="GET", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{jsonable_encoder(workflow_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -164,14 +230,16 @@ def get( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseWorkflow, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseWorkflow, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -182,18 +250,38 @@ def delete(self, workflow_id: str, *, request_options: typing.Optional[RequestOp """ Delete a specific workflow - Parameters: - - workflow_id: str. + Parameters + ---------- + workflow_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + typing.Any + Successful Response + + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.workflow.delete( + workflow_id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin( + method="DELETE", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{jsonable_encoder(workflow_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -205,14 +293,16 @@ def delete(self, workflow_id: str, *, request_options: typing.Optional[RequestOp ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -230,23 +320,49 @@ def update( """ Patch a workflow step - Parameters: - - workflow_id: str. + Parameters + ---------- + workflow_id : str + + step_id : str + + request : AppModelsRequestWorkflowStep - - step_id: str. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request: AppModelsRequestWorkflowStep. + Returns + ------- + AppModelsResponseWorkflowStep + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent import AppModelsRequestWorkflowStep + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.workflow.update( + workflow_id="string", + step_id="string", + request=AppModelsRequestWorkflowStep( + order=1, + agent_id="string", + ), + ) """ _response = self._client_wrapper.httpx_client.request( - "PATCH", - urllib.parse.urljoin( + method="PATCH", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{jsonable_encoder(workflow_id)}/steps/{jsonable_encoder(step_id)}", ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -264,14 +380,16 @@ def update( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseWorkflowStep, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseWorkflowStep, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -292,20 +410,43 @@ def invoke( """ Invoke a specific workflow - Parameters: - - workflow_id: str. + Parameters + ---------- + workflow_id : str + + input : str + + enable_streaming : bool + + session_id : typing.Optional[str] - - input: str. + output_schemas : typing.Optional[typing.Dict[str, str]] - - enable_streaming: bool. + output_schema : typing.Optional[str] - - session_id: typing.Optional[str]. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - output_schemas: typing.Optional[typing.Dict[str, str]]. + Returns + ------- + typing.Any + Successful Response - - output_schema: typing.Optional[str]. + Examples + -------- + from superagent.client import Superagent - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + client = Superagent( + token="YOUR_TOKEN", + ) + client.workflow.invoke( + workflow_id="string", + input="string", + enable_streaming=True, + session_id="string", + output_schemas={"string": "string"}, + output_schema="string", + ) """ _request: typing.Dict[str, typing.Any] = {"input": input, "enableStreaming": enable_streaming} if session_id is not OMIT: @@ -315,12 +456,14 @@ def invoke( if output_schema is not OMIT: _request["outputSchema"] = output_schema _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin( + method="POST", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{jsonable_encoder(workflow_id)}/invoke" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(_request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -338,14 +481,16 @@ def invoke( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -358,18 +503,38 @@ def list_steps( """ List all steps of a workflow - Parameters: - - workflow_id: str. + Parameters + ---------- + workflow_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + WorkflowStepList + Successful Response + + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.workflow.list_steps( + workflow_id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin( + method="GET", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{jsonable_encoder(workflow_id)}/steps" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -381,14 +546,16 @@ def list_steps( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(WorkflowStepList, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(WorkflowStepList, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -405,20 +572,45 @@ def add_step( """ Create a new workflow step - Parameters: - - workflow_id: str. + Parameters + ---------- + workflow_id : str + + request : AppModelsRequestWorkflowStep + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request: AppModelsRequestWorkflowStep. + Returns + ------- + AppModelsResponseWorkflowStep + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent import AppModelsRequestWorkflowStep + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.workflow.add_step( + workflow_id="string", + request=AppModelsRequestWorkflowStep( + order=1, + agent_id="string", + ), + ) """ _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin( + method="POST", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{jsonable_encoder(workflow_id)}/steps" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -436,14 +628,16 @@ def add_step( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseWorkflowStep, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseWorkflowStep, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -456,21 +650,42 @@ def delete_step( """ Delete a specific workflow step - Parameters: - - workflow_id: str. + Parameters + ---------- + workflow_id : str + + step_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.Any + Successful Response - - step_id: str. + Examples + -------- + from superagent.client import Superagent - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + client = Superagent( + token="YOUR_TOKEN", + ) + client.workflow.delete_step( + workflow_id="string", + step_id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin( + method="DELETE", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{jsonable_encoder(workflow_id)}/steps/{jsonable_encoder(step_id)}", ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -482,14 +697,16 @@ def delete_step( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -511,27 +728,48 @@ async def list( """ List all workflows - Parameters: - - skip: typing.Optional[int]. + Parameters + ---------- + skip : typing.Optional[int] - - take: typing.Optional[int]. + take : typing.Optional[int] - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + WorkflowList + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.workflow.list( + skip=1, + take=1, + ) """ _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/workflows"), - params=jsonable_encoder( - remove_none_from_dict( - { - "skip": skip, - "take": take, - **( - request_options.get("additional_query_parameters", {}) - if request_options is not None - else {} - ), - } + method="GET", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/workflows"), + params=encode_query( + jsonable_encoder( + remove_none_from_dict( + { + "skip": skip, + "take": take, + **( + request_options.get("additional_query_parameters", {}) + if request_options is not None + else {} + ), + } + ) ) ), headers=jsonable_encoder( @@ -544,14 +782,16 @@ async def list( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(WorkflowList, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(WorkflowList, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -564,16 +804,40 @@ async def create( """ Create a new workflow - Parameters: - - request: AppModelsRequestWorkflow. + Parameters + ---------- + request : AppModelsRequestWorkflow - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseWorkflow + Successful Response + + Examples + -------- + from superagent import AppModelsRequestWorkflow + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.workflow.create( + request=AppModelsRequestWorkflow( + name="string", + description="string", + ), + ) """ _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/workflows"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="POST", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/workflows"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -591,14 +855,16 @@ async def create( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseWorkflow, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseWorkflow, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -611,18 +877,38 @@ async def get( """ Get a single workflow - Parameters: - - workflow_id: str. + Parameters + ---------- + workflow_id : str - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseWorkflow + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.workflow.get( + workflow_id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin( + method="GET", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{jsonable_encoder(workflow_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -634,14 +920,16 @@ async def get( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseWorkflow, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseWorkflow, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -652,18 +940,38 @@ async def delete(self, workflow_id: str, *, request_options: typing.Optional[Req """ Delete a specific workflow - Parameters: - - workflow_id: str. + Parameters + ---------- + workflow_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.Any + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.workflow.delete( + workflow_id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin( + method="DELETE", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{jsonable_encoder(workflow_id)}" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -675,14 +983,16 @@ async def delete(self, workflow_id: str, *, request_options: typing.Optional[Req ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -700,23 +1010,49 @@ async def update( """ Patch a workflow step - Parameters: - - workflow_id: str. + Parameters + ---------- + workflow_id : str + + step_id : str + + request : AppModelsRequestWorkflowStep + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - step_id: str. + Returns + ------- + AppModelsResponseWorkflowStep + Successful Response - - request: AppModelsRequestWorkflowStep. + Examples + -------- + from superagent import AppModelsRequestWorkflowStep + from superagent.client import AsyncSuperagent - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.workflow.update( + workflow_id="string", + step_id="string", + request=AppModelsRequestWorkflowStep( + order=1, + agent_id="string", + ), + ) """ _response = await self._client_wrapper.httpx_client.request( - "PATCH", - urllib.parse.urljoin( + method="PATCH", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{jsonable_encoder(workflow_id)}/steps/{jsonable_encoder(step_id)}", ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -734,14 +1070,16 @@ async def update( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseWorkflowStep, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseWorkflowStep, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -762,20 +1100,43 @@ async def invoke( """ Invoke a specific workflow - Parameters: - - workflow_id: str. + Parameters + ---------- + workflow_id : str - - input: str. + input : str - - enable_streaming: bool. + enable_streaming : bool - - session_id: typing.Optional[str]. + session_id : typing.Optional[str] - - output_schemas: typing.Optional[typing.Dict[str, str]]. + output_schemas : typing.Optional[typing.Dict[str, str]] - - output_schema: typing.Optional[str]. + output_schema : typing.Optional[str] - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.Any + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.workflow.invoke( + workflow_id="string", + input="string", + enable_streaming=True, + session_id="string", + output_schemas={"string": "string"}, + output_schema="string", + ) """ _request: typing.Dict[str, typing.Any] = {"input": input, "enableStreaming": enable_streaming} if session_id is not OMIT: @@ -785,12 +1146,14 @@ async def invoke( if output_schema is not OMIT: _request["outputSchema"] = output_schema _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin( + method="POST", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{jsonable_encoder(workflow_id)}/invoke" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(_request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -808,14 +1171,16 @@ async def invoke( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -828,18 +1193,38 @@ async def list_steps( """ List all steps of a workflow - Parameters: - - workflow_id: str. + Parameters + ---------- + workflow_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + WorkflowStepList + Successful Response - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.workflow.list_steps( + workflow_id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin( + method="GET", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{jsonable_encoder(workflow_id)}/steps" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -851,14 +1236,16 @@ async def list_steps( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(WorkflowStepList, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(WorkflowStepList, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -875,20 +1262,45 @@ async def add_step( """ Create a new workflow step - Parameters: - - workflow_id: str. + Parameters + ---------- + workflow_id : str + + request : AppModelsRequestWorkflowStep + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AppModelsResponseWorkflowStep + Successful Response - - request: AppModelsRequestWorkflowStep. + Examples + -------- + from superagent import AppModelsRequestWorkflowStep + from superagent.client import AsyncSuperagent - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.workflow.add_step( + workflow_id="string", + request=AppModelsRequestWorkflowStep( + order=1, + agent_id="string", + ), + ) """ _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin( + method="POST", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{jsonable_encoder(workflow_id)}/steps" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(request) if request_options is None or request_options.get("additional_body_parameters") is None @@ -906,14 +1318,16 @@ async def add_step( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AppModelsResponseWorkflowStep, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(AppModelsResponseWorkflowStep, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -926,21 +1340,42 @@ async def delete_step( """ Delete a specific workflow step - Parameters: - - workflow_id: str. + Parameters + ---------- + workflow_id : str + + step_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.Any + Successful Response - - step_id: str. + Examples + -------- + from superagent.client import AsyncSuperagent - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.workflow.delete_step( + workflow_id="string", + step_id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin( + method="DELETE", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{jsonable_encoder(workflow_id)}/steps/{jsonable_encoder(step_id)}", ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -952,14 +1387,16 @@ async def delete_step( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: diff --git a/src/superagent/resources/workflow_config/client.py b/src/superagent/resources/workflow_config/client.py index 8572573..c9382d2 100644 --- a/src/superagent/resources/workflow_config/client.py +++ b/src/superagent/resources/workflow_config/client.py @@ -7,16 +7,13 @@ from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import pydantic_v1 +from ...core.query_encoder import encode_query from ...core.remove_none_from_dict import remove_none_from_dict from ...core.request_options import RequestOptions from ...errors.unprocessable_entity_error import UnprocessableEntityError from ...types.http_validation_error import HttpValidationError -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - class WorkflowConfigClient: def __init__(self, *, client_wrapper: SyncClientWrapper): @@ -24,14 +21,32 @@ def __init__(self, *, client_wrapper: SyncClientWrapper): def get_schema(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.Any: """ - Parameters: - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.Any + Successful Response + + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.workflow_config.get_schema() """ _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/workflows/config/schema"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="GET", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/workflows/config/schema"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -43,12 +58,12 @@ def get_schema(self, *, request_options: typing.Optional[RequestOptions] = None) ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: @@ -57,18 +72,38 @@ def get_schema(self, *, request_options: typing.Optional[RequestOptions] = None) def add_config(self, workflow_id: str, *, request_options: typing.Optional[RequestOptions] = None) -> typing.Any: """ - Parameters: - - workflow_id: str. + Parameters + ---------- + workflow_id : str - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.Any + Successful Response + + Examples + -------- + from superagent.client import Superagent + + client = Superagent( + token="YOUR_TOKEN", + ) + client.workflow_config.add_config( + workflow_id="string", + ) """ _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin( + method="POST", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{jsonable_encoder(workflow_id)}/config" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {}))) if request_options is not None @@ -83,14 +118,16 @@ def add_config(self, workflow_id: str, *, request_options: typing.Optional[Reque ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -104,14 +141,32 @@ def __init__(self, *, client_wrapper: AsyncClientWrapper): async def get_schema(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.Any: """ - Parameters: - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.Any + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.workflow_config.get_schema() """ _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/workflows/config/schema"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + method="GET", + url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/workflows/config/schema"), + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), headers=jsonable_encoder( remove_none_from_dict( @@ -123,12 +178,12 @@ async def get_schema(self, *, request_options: typing.Optional[RequestOptions] = ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: @@ -139,18 +194,38 @@ async def add_config( self, workflow_id: str, *, request_options: typing.Optional[RequestOptions] = None ) -> typing.Any: """ - Parameters: - - workflow_id: str. + Parameters + ---------- + workflow_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. + Returns + ------- + typing.Any + Successful Response + + Examples + -------- + from superagent.client import AsyncSuperagent + + client = AsyncSuperagent( + token="YOUR_TOKEN", + ) + await client.workflow_config.add_config( + workflow_id="string", + ) """ _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin( + method="POST", + url=urllib.parse.urljoin( f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{jsonable_encoder(workflow_id)}/config" ), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None + params=encode_query( + jsonable_encoder( + request_options.get("additional_query_parameters") if request_options is not None else None + ) ), json=jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {}))) if request_options is not None @@ -165,14 +240,16 @@ async def add_config( ), timeout=request_options.get("timeout_in_seconds") if request_options is not None and request_options.get("timeout_in_seconds") is not None - else 60, + else self._client_wrapper.get_timeout(), retries=0, max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + raise UnprocessableEntityError( + pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: diff --git a/src/superagent/types/agent_datasosurce_list.py b/src/superagent/types/agent_datasosurce_list.py index 3e3181e..f61a4ff 100644 --- a/src/superagent/types/agent_datasosurce_list.py +++ b/src/superagent/types/agent_datasosurce_list.py @@ -4,27 +4,28 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .prisma_models_agent_datasource import PrismaModelsAgentDatasource -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class AgentDatasosurceList(pydantic.BaseModel): +class AgentDatasosurceList(pydantic_v1.BaseModel): success: bool - data: typing.Optional[typing.List[PrismaModelsAgentDatasource]] = None + data: typing.Optional[typing.List[PrismaModelsAgentDatasource]] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/agent_list.py b/src/superagent/types/agent_list.py index 8f4b28b..e9eeea2 100644 --- a/src/superagent/types/agent_list.py +++ b/src/superagent/types/agent_list.py @@ -4,17 +4,13 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .prisma_models_agent import PrismaModelsAgent -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class AgentList(pydantic.BaseModel): +class AgentList(pydantic_v1.BaseModel): success: bool - data: typing.Optional[typing.List[PrismaModelsAgent]] = None + data: typing.Optional[typing.List[PrismaModelsAgent]] total_pages: int def json(self, **kwargs: typing.Any) -> str: @@ -22,10 +18,15 @@ def json(self, **kwargs: typing.Any) -> str: return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/agent_tool_list.py b/src/superagent/types/agent_tool_list.py index cdc8ea7..70c7edd 100644 --- a/src/superagent/types/agent_tool_list.py +++ b/src/superagent/types/agent_tool_list.py @@ -4,27 +4,28 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .prisma_models_agent_tool import PrismaModelsAgentTool -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class AgentToolList(pydantic.BaseModel): +class AgentToolList(pydantic_v1.BaseModel): success: bool - data: typing.Optional[typing.List[PrismaModelsAgentTool]] = None + data: typing.Optional[typing.List[PrismaModelsAgentTool]] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/api_key_create.py b/src/superagent/types/api_key_create.py index 7f7d55b..c34e8a1 100644 --- a/src/superagent/types/api_key_create.py +++ b/src/superagent/types/api_key_create.py @@ -4,27 +4,28 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .api_key_create_model import ApiKeyCreateModel -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class ApiKeyCreate(pydantic.BaseModel): +class ApiKeyCreate(pydantic_v1.BaseModel): success: bool - data: typing.Optional[ApiKeyCreateModel] = None + data: typing.Optional[ApiKeyCreateModel] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/api_key_create_model.py b/src/superagent/types/api_key_create_model.py index 495aedb..ceca761 100644 --- a/src/superagent/types/api_key_create_model.py +++ b/src/superagent/types/api_key_create_model.py @@ -4,38 +4,40 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .prisma_models_api_user import PrismaModelsApiUser -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class ApiKeyCreateModel(pydantic.BaseModel): +class ApiKeyCreateModel(pydantic_v1.BaseModel): """ Represents a ApiKey record """ id: str name: str - display_api_key: str = pydantic.Field(alias="displayApiKey") - created_at: dt.datetime = pydantic.Field(alias="createdAt") - updated_at: dt.datetime = pydantic.Field(alias="updatedAt") - api_user_id: str = pydantic.Field(alias="apiUserId") - api_user: typing.Optional[PrismaModelsApiUser] = pydantic.Field(alias="apiUser", default=None) - api_key: str = pydantic.Field(alias="apiKey") + display_api_key: str = pydantic_v1.Field(alias="displayApiKey") + created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") + updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") + api_user_id: str = pydantic_v1.Field(alias="apiUserId") + api_user: typing.Optional[PrismaModelsApiUser] = pydantic_v1.Field(alias="apiUser") + api_key: str = pydantic_v1.Field(alias="apiKey") def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/api_key_list.py b/src/superagent/types/api_key_list.py index 6eaa163..0081c34 100644 --- a/src/superagent/types/api_key_list.py +++ b/src/superagent/types/api_key_list.py @@ -4,27 +4,28 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .prisma_models_api_key import PrismaModelsApiKey -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class ApiKeyList(pydantic.BaseModel): +class ApiKeyList(pydantic_v1.BaseModel): success: bool - data: typing.Optional[typing.List[PrismaModelsApiKey]] = None + data: typing.Optional[typing.List[PrismaModelsApiKey]] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_models_request_api_key.py b/src/superagent/types/app_models_request_api_key.py index dddf880..0b34933 100644 --- a/src/superagent/types/app_models_request_api_key.py +++ b/src/superagent/types/app_models_request_api_key.py @@ -4,14 +4,10 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class AppModelsRequestApiKey(pydantic.BaseModel): +class AppModelsRequestApiKey(pydantic_v1.BaseModel): name: str def json(self, **kwargs: typing.Any) -> str: @@ -19,10 +15,15 @@ def json(self, **kwargs: typing.Any) -> str: return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_models_request_api_user.py b/src/superagent/types/app_models_request_api_user.py index bf3f182..6719c4b 100644 --- a/src/superagent/types/app_models_request_api_user.py +++ b/src/superagent/types/app_models_request_api_user.py @@ -4,30 +4,32 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class AppModelsRequestApiUser(pydantic.BaseModel): +class AppModelsRequestApiUser(pydantic_v1.BaseModel): email: str - first_name: typing.Optional[str] = pydantic.Field(alias="firstName", default=None) - last_name: typing.Optional[str] = pydantic.Field(alias="lastName", default=None) - company: typing.Optional[str] = None - anonymous_id: typing.Optional[str] = pydantic.Field(alias="anonymousId", default=None) + first_name: typing.Optional[str] = pydantic_v1.Field(alias="firstName") + last_name: typing.Optional[str] = pydantic_v1.Field(alias="lastName") + company: typing.Optional[str] + anonymous_id: typing.Optional[str] = pydantic_v1.Field(alias="anonymousId") def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_models_request_datasource.py b/src/superagent/types/app_models_request_datasource.py index 1209e25..987c0bc 100644 --- a/src/superagent/types/app_models_request_datasource.py +++ b/src/superagent/types/app_models_request_datasource.py @@ -4,24 +4,20 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .embeddings_model_provider import EmbeddingsModelProvider -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class AppModelsRequestDatasource(pydantic.BaseModel): +class AppModelsRequestDatasource(pydantic_v1.BaseModel): name: str - description: typing.Optional[str] = None + description: typing.Optional[str] type: str - content: typing.Optional[str] = None - url: typing.Optional[str] = None - metadata: typing.Optional[typing.Dict[str, typing.Any]] = None - vector_db_id: typing.Optional[str] = pydantic.Field(alias="vectorDbId", default=None) - embeddings_model_provider: typing.Optional[EmbeddingsModelProvider] = pydantic.Field( - alias="embeddingsModelProvider", default=None + content: typing.Optional[str] + url: typing.Optional[str] + metadata: typing.Optional[typing.Dict[str, typing.Any]] + vector_db_id: typing.Optional[str] = pydantic_v1.Field(alias="vectorDbId") + embeddings_model_provider: typing.Optional[EmbeddingsModelProvider] = pydantic_v1.Field( + alias="embeddingsModelProvider" ) def json(self, **kwargs: typing.Any) -> str: @@ -29,11 +25,17 @@ def json(self, **kwargs: typing.Any) -> str: return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_models_request_llm.py b/src/superagent/types/app_models_request_llm.py index 966f7aa..47dc40e 100644 --- a/src/superagent/types/app_models_request_llm.py +++ b/src/superagent/types/app_models_request_llm.py @@ -4,28 +4,30 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class AppModelsRequestLlm(pydantic.BaseModel): +class AppModelsRequestLlm(pydantic_v1.BaseModel): provider: str - api_key: str = pydantic.Field(alias="apiKey") - options: typing.Optional[typing.Dict[str, typing.Any]] = None + api_key: str = pydantic_v1.Field(alias="apiKey") + options: typing.Optional[typing.Dict[str, typing.Any]] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_models_request_vector_db.py b/src/superagent/types/app_models_request_vector_db.py index d421e8b..b3c3539 100644 --- a/src/superagent/types/app_models_request_vector_db.py +++ b/src/superagent/types/app_models_request_vector_db.py @@ -4,15 +4,11 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .vector_db_provider import VectorDbProvider -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class AppModelsRequestVectorDb(pydantic.BaseModel): +class AppModelsRequestVectorDb(pydantic_v1.BaseModel): provider: VectorDbProvider options: typing.Dict[str, typing.Any] @@ -21,10 +17,15 @@ def json(self, **kwargs: typing.Any) -> str: return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_models_request_workflow.py b/src/superagent/types/app_models_request_workflow.py index 534748b..9ff1830 100644 --- a/src/superagent/types/app_models_request_workflow.py +++ b/src/superagent/types/app_models_request_workflow.py @@ -4,14 +4,10 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class AppModelsRequestWorkflow(pydantic.BaseModel): +class AppModelsRequestWorkflow(pydantic_v1.BaseModel): name: str description: str @@ -20,10 +16,15 @@ def json(self, **kwargs: typing.Any) -> str: return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_models_request_workflow_step.py b/src/superagent/types/app_models_request_workflow_step.py index 710c583..eafa3bd 100644 --- a/src/superagent/types/app_models_request_workflow_step.py +++ b/src/superagent/types/app_models_request_workflow_step.py @@ -4,27 +4,29 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class AppModelsRequestWorkflowStep(pydantic.BaseModel): +class AppModelsRequestWorkflowStep(pydantic_v1.BaseModel): order: int - agent_id: str = pydantic.Field(alias="agentId") + agent_id: str = pydantic_v1.Field(alias="agentId") def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_models_response_agent.py b/src/superagent/types/app_models_response_agent.py index 4b3e130..d3417d0 100644 --- a/src/superagent/types/app_models_response_agent.py +++ b/src/superagent/types/app_models_response_agent.py @@ -4,27 +4,28 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .prisma_models_agent import PrismaModelsAgent -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class AppModelsResponseAgent(pydantic.BaseModel): +class AppModelsResponseAgent(pydantic_v1.BaseModel): success: bool - data: typing.Optional[PrismaModelsAgent] = None + data: typing.Optional[PrismaModelsAgent] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_models_response_agent_invoke.py b/src/superagent/types/app_models_response_agent_invoke.py index c8e5c64..c6dcafb 100644 --- a/src/superagent/types/app_models_response_agent_invoke.py +++ b/src/superagent/types/app_models_response_agent_invoke.py @@ -4,26 +4,27 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class AppModelsResponseAgentInvoke(pydantic.BaseModel): +class AppModelsResponseAgentInvoke(pydantic_v1.BaseModel): success: bool - data: typing.Optional[typing.Any] = None + data: typing.Optional[typing.Any] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_models_response_api_key.py b/src/superagent/types/app_models_response_api_key.py index bf803fb..13f61b1 100644 --- a/src/superagent/types/app_models_response_api_key.py +++ b/src/superagent/types/app_models_response_api_key.py @@ -4,27 +4,28 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .prisma_models_api_key import PrismaModelsApiKey -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class AppModelsResponseApiKey(pydantic.BaseModel): +class AppModelsResponseApiKey(pydantic_v1.BaseModel): success: bool - data: typing.Optional[PrismaModelsApiKey] = None + data: typing.Optional[PrismaModelsApiKey] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_models_response_api_user.py b/src/superagent/types/app_models_response_api_user.py index e793e7c..380e5ab 100644 --- a/src/superagent/types/app_models_response_api_user.py +++ b/src/superagent/types/app_models_response_api_user.py @@ -4,27 +4,28 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .prisma_models_api_user import PrismaModelsApiUser -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class AppModelsResponseApiUser(pydantic.BaseModel): +class AppModelsResponseApiUser(pydantic_v1.BaseModel): success: bool - data: typing.Optional[PrismaModelsApiUser] = None + data: typing.Optional[PrismaModelsApiUser] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_models_response_datasource.py b/src/superagent/types/app_models_response_datasource.py index 09ac454..178f599 100644 --- a/src/superagent/types/app_models_response_datasource.py +++ b/src/superagent/types/app_models_response_datasource.py @@ -4,27 +4,28 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .prisma_models_datasource import PrismaModelsDatasource -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class AppModelsResponseDatasource(pydantic.BaseModel): +class AppModelsResponseDatasource(pydantic_v1.BaseModel): success: bool - data: typing.Optional[PrismaModelsDatasource] = None + data: typing.Optional[PrismaModelsDatasource] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_models_response_llm.py b/src/superagent/types/app_models_response_llm.py index 0a5f094..6e899d4 100644 --- a/src/superagent/types/app_models_response_llm.py +++ b/src/superagent/types/app_models_response_llm.py @@ -4,27 +4,28 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .prisma_models_llm import PrismaModelsLlm -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class AppModelsResponseLlm(pydantic.BaseModel): +class AppModelsResponseLlm(pydantic_v1.BaseModel): success: bool - data: typing.Optional[PrismaModelsLlm] = None + data: typing.Optional[PrismaModelsLlm] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_models_response_tool.py b/src/superagent/types/app_models_response_tool.py index d9be61a..2df9b7f 100644 --- a/src/superagent/types/app_models_response_tool.py +++ b/src/superagent/types/app_models_response_tool.py @@ -4,27 +4,28 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .prisma_models_tool import PrismaModelsTool -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class AppModelsResponseTool(pydantic.BaseModel): +class AppModelsResponseTool(pydantic_v1.BaseModel): success: bool - data: typing.Optional[PrismaModelsTool] = None + data: typing.Optional[PrismaModelsTool] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_models_response_vector_db.py b/src/superagent/types/app_models_response_vector_db.py index 817cbe8..bb0f20d 100644 --- a/src/superagent/types/app_models_response_vector_db.py +++ b/src/superagent/types/app_models_response_vector_db.py @@ -4,27 +4,28 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .prisma_models_vector_db import PrismaModelsVectorDb -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class AppModelsResponseVectorDb(pydantic.BaseModel): +class AppModelsResponseVectorDb(pydantic_v1.BaseModel): success: bool - data: typing.Optional[PrismaModelsVectorDb] = None + data: typing.Optional[PrismaModelsVectorDb] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_models_response_workflow.py b/src/superagent/types/app_models_response_workflow.py index eeca527..2f8bfdd 100644 --- a/src/superagent/types/app_models_response_workflow.py +++ b/src/superagent/types/app_models_response_workflow.py @@ -4,27 +4,28 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .prisma_models_workflow import PrismaModelsWorkflow -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class AppModelsResponseWorkflow(pydantic.BaseModel): +class AppModelsResponseWorkflow(pydantic_v1.BaseModel): success: bool - data: typing.Optional[PrismaModelsWorkflow] = None + data: typing.Optional[PrismaModelsWorkflow] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_models_response_workflow_step.py b/src/superagent/types/app_models_response_workflow_step.py index 5861b36..25106ff 100644 --- a/src/superagent/types/app_models_response_workflow_step.py +++ b/src/superagent/types/app_models_response_workflow_step.py @@ -4,27 +4,28 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .prisma_models_workflow_step import PrismaModelsWorkflowStep -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class AppModelsResponseWorkflowStep(pydantic.BaseModel): +class AppModelsResponseWorkflowStep(pydantic_v1.BaseModel): success: bool - data: typing.Optional[PrismaModelsWorkflowStep] = None + data: typing.Optional[PrismaModelsWorkflowStep] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/datasource_list.py b/src/superagent/types/datasource_list.py index 42bb36f..44c03a6 100644 --- a/src/superagent/types/datasource_list.py +++ b/src/superagent/types/datasource_list.py @@ -4,17 +4,13 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .prisma_models_datasource import PrismaModelsDatasource -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class DatasourceList(pydantic.BaseModel): +class DatasourceList(pydantic_v1.BaseModel): success: bool - data: typing.Optional[typing.List[PrismaModelsDatasource]] = None + data: typing.Optional[typing.List[PrismaModelsDatasource]] total_pages: int def json(self, **kwargs: typing.Any) -> str: @@ -22,10 +18,15 @@ def json(self, **kwargs: typing.Any) -> str: return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/function_definition.py b/src/superagent/types/function_definition.py index 844011e..7a4c9c5 100644 --- a/src/superagent/types/function_definition.py +++ b/src/superagent/types/function_definition.py @@ -4,27 +4,28 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class FunctionDefinition(pydantic.BaseModel): - name: typing.Optional[str] = None - description: typing.Optional[str] = None - parameters: typing.Optional[typing.Dict[str, typing.Any]] = None +class FunctionDefinition(pydantic_v1.BaseModel): + name: typing.Optional[str] + description: typing.Optional[str] + parameters: typing.Optional[typing.Dict[str, typing.Any]] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/http_validation_error.py b/src/superagent/types/http_validation_error.py index 267b46a..8e3430f 100644 --- a/src/superagent/types/http_validation_error.py +++ b/src/superagent/types/http_validation_error.py @@ -4,26 +4,27 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .validation_error import ValidationError -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class HttpValidationError(pydantic.BaseModel): - detail: typing.Optional[typing.List[ValidationError]] = None +class HttpValidationError(pydantic_v1.BaseModel): + detail: typing.Optional[typing.List[ValidationError]] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/llm_list.py b/src/superagent/types/llm_list.py index c4d1cb5..2c5a244 100644 --- a/src/superagent/types/llm_list.py +++ b/src/superagent/types/llm_list.py @@ -4,27 +4,28 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .prisma_models_llm import PrismaModelsLlm -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class LlmList(pydantic.BaseModel): +class LlmList(pydantic_v1.BaseModel): success: bool - data: typing.Optional[typing.List[PrismaModelsLlm]] = None + data: typing.Optional[typing.List[PrismaModelsLlm]] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/llm_params.py b/src/superagent/types/llm_params.py index 85f1a2d..b8adbb1 100644 --- a/src/superagent/types/llm_params.py +++ b/src/superagent/types/llm_params.py @@ -4,26 +4,27 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class LlmParams(pydantic.BaseModel): - max_tokens: typing.Optional[int] = None - temperature: typing.Optional[float] = None +class LlmParams(pydantic_v1.BaseModel): + max_tokens: typing.Optional[int] + temperature: typing.Optional[float] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/open_ai_assistant_parameters.py b/src/superagent/types/open_ai_assistant_parameters.py index 649d53d..98d07e5 100644 --- a/src/superagent/types/open_ai_assistant_parameters.py +++ b/src/superagent/types/open_ai_assistant_parameters.py @@ -4,29 +4,31 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .open_ai_assistant_parameters_tools_item import OpenAiAssistantParametersToolsItem -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class OpenAiAssistantParameters(pydantic.BaseModel): - metadata: typing.Optional[typing.Dict[str, typing.Any]] = None - file_ids: typing.Optional[typing.List[str]] = pydantic.Field(alias="fileIds", default=None) - tools: typing.Optional[typing.List[OpenAiAssistantParametersToolsItem]] = None +class OpenAiAssistantParameters(pydantic_v1.BaseModel): + metadata: typing.Optional[typing.Dict[str, typing.Any]] + file_ids: typing.Optional[typing.List[str]] = pydantic_v1.Field(alias="fileIds") + tools: typing.Optional[typing.List[OpenAiAssistantParametersToolsItem]] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/open_ai_assistant_parameters_tools_item.py b/src/superagent/types/open_ai_assistant_parameters_tools_item.py index e561140..d2125dd 100644 --- a/src/superagent/types/open_ai_assistant_parameters_tools_item.py +++ b/src/superagent/types/open_ai_assistant_parameters_tools_item.py @@ -2,38 +2,79 @@ from __future__ import annotations +import datetime as dt import typing -from .tool_assistant_tools_code import ToolAssistantToolsCode -from .tool_assistant_tools_function import ToolAssistantToolsFunction -from .tool_assistant_tools_retrieval import ToolAssistantToolsRetrieval +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .function_definition import FunctionDefinition -class OpenAiAssistantParametersToolsItem_CodeInterpreter(ToolAssistantToolsCode): - type: typing.Literal["code_interpreter"] +class OpenAiAssistantParametersToolsItem_CodeInterpreter(pydantic_v1.BaseModel): + type: typing.Literal["code_interpreter"] = "code_interpreter" + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True - allow_population_by_field_name = True + extra = pydantic_v1.Extra.forbid + json_encoders = {dt.datetime: serialize_datetime} + + +class OpenAiAssistantParametersToolsItem_Retrieval(pydantic_v1.BaseModel): + type: typing.Literal["retrieval"] = "retrieval" + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) -class OpenAiAssistantParametersToolsItem_Retrieval(ToolAssistantToolsRetrieval): - type: typing.Literal["retrieval"] + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True - allow_population_by_field_name = True + extra = pydantic_v1.Extra.forbid + json_encoders = {dt.datetime: serialize_datetime} + + +class OpenAiAssistantParametersToolsItem_Function(pydantic_v1.BaseModel): + function: typing.Optional[FunctionDefinition] + type: typing.Literal["function"] = "function" + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} -class OpenAiAssistantParametersToolsItem_Function(ToolAssistantToolsFunction): - type: typing.Literal["function"] + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True - allow_population_by_field_name = True + extra = pydantic_v1.Extra.forbid + json_encoders = {dt.datetime: serialize_datetime} OpenAiAssistantParametersToolsItem = typing.Union[ diff --git a/src/superagent/types/prisma_models_agent.py b/src/superagent/types/prisma_models_agent.py index bd39c28..1df2a3e 100644 --- a/src/superagent/types/prisma_models_agent.py +++ b/src/superagent/types/prisma_models_agent.py @@ -6,16 +6,12 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .agent_type import AgentType from .llm_model import LlmModel -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class PrismaModelsAgent(pydantic.BaseModel): +class PrismaModelsAgent(pydantic_v1.BaseModel): """ Represents a Agent record """ @@ -23,37 +19,41 @@ class PrismaModelsAgent(pydantic.BaseModel): id: str type: AgentType name: str - avatar: typing.Optional[str] = None - initial_message: typing.Optional[str] = pydantic.Field(alias="initialMessage", default=None) + avatar: typing.Optional[str] + initial_message: typing.Optional[str] = pydantic_v1.Field(alias="initialMessage") description: str - is_active: bool = pydantic.Field(alias="isActive") - created_at: dt.datetime = pydantic.Field(alias="createdAt") - updated_at: dt.datetime = pydantic.Field(alias="updatedAt") - llms: typing.Optional[typing.List[PrismaModelsAgentLlm]] = None - llm_model: typing.Optional[LlmModel] = pydantic.Field(alias="llmModel", default=None) - prompt: typing.Optional[str] = None - api_user_id: str = pydantic.Field(alias="apiUserId") - api_user: typing.Optional[PrismaModelsApiUser] = pydantic.Field(alias="apiUser", default=None) - datasources: typing.Optional[typing.List[PrismaModelsAgentDatasource]] = None - tools: typing.Optional[typing.List[PrismaModelsAgentTool]] = None - workflow_steps: typing.Optional[typing.List[PrismaModelsWorkflowStep]] = pydantic.Field( - alias="workflowSteps", default=None - ) - metadata: typing.Optional[typing.Any] = None - output_schema: typing.Optional[str] = pydantic.Field(alias="outputSchema", default=None) + is_active: bool = pydantic_v1.Field(alias="isActive") + created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") + updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") + llms: typing.Optional[typing.List[PrismaModelsAgentLlm]] + llm_model: typing.Optional[LlmModel] = pydantic_v1.Field(alias="llmModel") + prompt: typing.Optional[str] + api_user_id: str = pydantic_v1.Field(alias="apiUserId") + api_user: typing.Optional[PrismaModelsApiUser] = pydantic_v1.Field(alias="apiUser") + datasources: typing.Optional[typing.List[PrismaModelsAgentDatasource]] + tools: typing.Optional[typing.List[PrismaModelsAgentTool]] + workflow_steps: typing.Optional[typing.List[PrismaModelsWorkflowStep]] = pydantic_v1.Field(alias="workflowSteps") + metadata: typing.Optional[typing.Any] + output_schema: typing.Optional[str] = pydantic_v1.Field(alias="outputSchema") def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/prisma_models_agent_datasource.py b/src/superagent/types/prisma_models_agent_datasource.py index b8ddd60..b7c84b1 100644 --- a/src/superagent/types/prisma_models_agent_datasource.py +++ b/src/superagent/types/prisma_models_agent_datasource.py @@ -6,37 +6,39 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class PrismaModelsAgentDatasource(pydantic.BaseModel): +class PrismaModelsAgentDatasource(pydantic_v1.BaseModel): """ Represents a AgentDatasource record """ - agent_id: str = pydantic.Field(alias="agentId") - datasource_id: str = pydantic.Field(alias="datasourceId") - agent: typing.Optional[PrismaModelsAgent] = None - datasource: typing.Optional[PrismaModelsDatasource] = None - created_at: dt.datetime = pydantic.Field(alias="createdAt") - updated_at: dt.datetime = pydantic.Field(alias="updatedAt") + agent_id: str = pydantic_v1.Field(alias="agentId") + datasource_id: str = pydantic_v1.Field(alias="datasourceId") + agent: typing.Optional[PrismaModelsAgent] + datasource: typing.Optional[PrismaModelsDatasource] + created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") + updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/prisma_models_agent_llm.py b/src/superagent/types/prisma_models_agent_llm.py index 90ebb1d..57d8bd3 100644 --- a/src/superagent/types/prisma_models_agent_llm.py +++ b/src/superagent/types/prisma_models_agent_llm.py @@ -6,37 +6,39 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class PrismaModelsAgentLlm(pydantic.BaseModel): +class PrismaModelsAgentLlm(pydantic_v1.BaseModel): """ Represents a AgentLLM record """ - agent_id: str = pydantic.Field(alias="agentId") - llm_id: str = pydantic.Field(alias="llmId") - agent: typing.Optional[PrismaModelsAgent] = None - llm: typing.Optional[PrismaModelsLlm] = None - created_at: dt.datetime = pydantic.Field(alias="createdAt") - updated_at: dt.datetime = pydantic.Field(alias="updatedAt") + agent_id: str = pydantic_v1.Field(alias="agentId") + llm_id: str = pydantic_v1.Field(alias="llmId") + agent: typing.Optional[PrismaModelsAgent] + llm: typing.Optional[PrismaModelsLlm] + created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") + updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/prisma_models_agent_tool.py b/src/superagent/types/prisma_models_agent_tool.py index d697dc7..9300243 100644 --- a/src/superagent/types/prisma_models_agent_tool.py +++ b/src/superagent/types/prisma_models_agent_tool.py @@ -6,37 +6,39 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class PrismaModelsAgentTool(pydantic.BaseModel): +class PrismaModelsAgentTool(pydantic_v1.BaseModel): """ Represents a AgentTool record """ - agent_id: str = pydantic.Field(alias="agentId") - tool_id: str = pydantic.Field(alias="toolId") - agent: typing.Optional[PrismaModelsAgent] = None - tool: typing.Optional[PrismaModelsTool] = None - created_at: dt.datetime = pydantic.Field(alias="createdAt") - updated_at: dt.datetime = pydantic.Field(alias="updatedAt") + agent_id: str = pydantic_v1.Field(alias="agentId") + tool_id: str = pydantic_v1.Field(alias="toolId") + agent: typing.Optional[PrismaModelsAgent] + tool: typing.Optional[PrismaModelsTool] + created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") + updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/prisma_models_api_key.py b/src/superagent/types/prisma_models_api_key.py index f345c34..a0ea419 100644 --- a/src/superagent/types/prisma_models_api_key.py +++ b/src/superagent/types/prisma_models_api_key.py @@ -6,38 +6,40 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class PrismaModelsApiKey(pydantic.BaseModel): +class PrismaModelsApiKey(pydantic_v1.BaseModel): """ Represents a ApiKey record """ id: str name: str - display_api_key: str = pydantic.Field(alias="displayApiKey") - created_at: dt.datetime = pydantic.Field(alias="createdAt") - updated_at: dt.datetime = pydantic.Field(alias="updatedAt") - api_user_id: str = pydantic.Field(alias="apiUserId") - api_user: typing.Optional[PrismaModelsApiUser] = pydantic.Field(alias="apiUser", default=None) + display_api_key: str = pydantic_v1.Field(alias="displayApiKey") + created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") + updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") + api_user_id: str = pydantic_v1.Field(alias="apiUserId") + api_user: typing.Optional[PrismaModelsApiUser] = pydantic_v1.Field(alias="apiUser") def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/prisma_models_api_user.py b/src/superagent/types/prisma_models_api_user.py index 6073d31..4f6e30b 100644 --- a/src/superagent/types/prisma_models_api_user.py +++ b/src/superagent/types/prisma_models_api_user.py @@ -6,46 +6,46 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class PrismaModelsApiUser(pydantic.BaseModel): +class PrismaModelsApiUser(pydantic_v1.BaseModel): """ Represents a ApiUser record """ id: str - token: typing.Optional[str] = None - email: typing.Optional[str] = None - created_at: dt.datetime = pydantic.Field(alias="createdAt") - updated_at: dt.datetime = pydantic.Field(alias="updatedAt") - agents: typing.Optional[typing.List[PrismaModelsAgent]] = None - llms: typing.Optional[typing.List[PrismaModelsLlm]] = None - datasources: typing.Optional[typing.List[PrismaModelsDatasource]] = None - tools: typing.Optional[typing.List[PrismaModelsTool]] = None - workflows: typing.Optional[typing.List[PrismaModelsWorkflow]] = None - vector_db: typing.Optional[typing.List[PrismaModelsVectorDb]] = pydantic.Field(alias="vectorDb", default=None) - workflow_configs: typing.Optional[typing.List[WorkflowConfig]] = pydantic.Field( - alias="workflowConfigs", default=None - ) - api_keys: typing.Optional[typing.List[PrismaModelsApiKey]] = pydantic.Field(alias="apiKeys", default=None) + token: typing.Optional[str] + email: typing.Optional[str] + created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") + updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") + agents: typing.Optional[typing.List[PrismaModelsAgent]] + llms: typing.Optional[typing.List[PrismaModelsLlm]] + datasources: typing.Optional[typing.List[PrismaModelsDatasource]] + tools: typing.Optional[typing.List[PrismaModelsTool]] + workflows: typing.Optional[typing.List[PrismaModelsWorkflow]] + vector_db: typing.Optional[typing.List[PrismaModelsVectorDb]] = pydantic_v1.Field(alias="vectorDb") + workflow_configs: typing.Optional[typing.List[WorkflowConfig]] = pydantic_v1.Field(alias="workflowConfigs") + api_keys: typing.Optional[typing.List[PrismaModelsApiKey]] = pydantic_v1.Field(alias="apiKeys") def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/prisma_models_datasource.py b/src/superagent/types/prisma_models_datasource.py index fb813f9..74bf68a 100644 --- a/src/superagent/types/prisma_models_datasource.py +++ b/src/superagent/types/prisma_models_datasource.py @@ -6,48 +6,50 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .datasource_status import DatasourceStatus from .datasource_type import DatasourceType -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class PrismaModelsDatasource(pydantic.BaseModel): +class PrismaModelsDatasource(pydantic_v1.BaseModel): """ Represents a Datasource record """ id: str name: str - content: typing.Optional[str] = None - description: typing.Optional[str] = None - url: typing.Optional[str] = None + content: typing.Optional[str] + description: typing.Optional[str] + url: typing.Optional[str] type: DatasourceType - api_user_id: str = pydantic.Field(alias="apiUserId") - api_user: typing.Optional[PrismaModelsApiUser] = pydantic.Field(alias="apiUser", default=None) - created_at: dt.datetime = pydantic.Field(alias="createdAt") - updated_at: dt.datetime = pydantic.Field(alias="updatedAt") - metadata: typing.Optional[str] = None + api_user_id: str = pydantic_v1.Field(alias="apiUserId") + api_user: typing.Optional[PrismaModelsApiUser] = pydantic_v1.Field(alias="apiUser") + created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") + updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") + metadata: typing.Optional[str] status: DatasourceStatus - datasources: typing.Optional[typing.List[PrismaModelsAgentDatasource]] = None - vector_db: typing.Optional[PrismaModelsVectorDb] = pydantic.Field(alias="vectorDb", default=None) - vector_db_id: typing.Optional[str] = pydantic.Field(alias="vectorDbId", default=None) + datasources: typing.Optional[typing.List[PrismaModelsAgentDatasource]] + vector_db: typing.Optional[PrismaModelsVectorDb] = pydantic_v1.Field(alias="vectorDb") + vector_db_id: typing.Optional[str] = pydantic_v1.Field(alias="vectorDbId") def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/prisma_models_llm.py b/src/superagent/types/prisma_models_llm.py index a5fa74d..159bd73 100644 --- a/src/superagent/types/prisma_models_llm.py +++ b/src/superagent/types/prisma_models_llm.py @@ -6,41 +6,43 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .llm_provider import LlmProvider -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class PrismaModelsLlm(pydantic.BaseModel): +class PrismaModelsLlm(pydantic_v1.BaseModel): """ Represents a LLM record """ id: str provider: LlmProvider - api_key: str = pydantic.Field(alias="apiKey") - options: typing.Optional[typing.Any] = None - agents: typing.Optional[typing.List[PrismaModelsAgentLlm]] = None - created_at: dt.datetime = pydantic.Field(alias="createdAt") - updated_at: dt.datetime = pydantic.Field(alias="updatedAt") - api_user_id: str = pydantic.Field(alias="apiUserId") - api_user: typing.Optional[PrismaModelsApiUser] = pydantic.Field(alias="apiUser", default=None) + api_key: str = pydantic_v1.Field(alias="apiKey") + options: typing.Optional[typing.Any] + agents: typing.Optional[typing.List[PrismaModelsAgentLlm]] + created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") + updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") + api_user_id: str = pydantic_v1.Field(alias="apiUserId") + api_user: typing.Optional[PrismaModelsApiUser] = pydantic_v1.Field(alias="apiUser") def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/prisma_models_tool.py b/src/superagent/types/prisma_models_tool.py index 273a269..cee3b7b 100644 --- a/src/superagent/types/prisma_models_tool.py +++ b/src/superagent/types/prisma_models_tool.py @@ -6,15 +6,11 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .tool_type import ToolType -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class PrismaModelsTool(pydantic.BaseModel): +class PrismaModelsTool(pydantic_v1.BaseModel): """ Represents a Tool record """ @@ -23,27 +19,33 @@ class PrismaModelsTool(pydantic.BaseModel): name: str description: str type: ToolType - return_direct: bool = pydantic.Field(alias="returnDirect") - metadata: typing.Optional[str] = None - created_at: dt.datetime = pydantic.Field(alias="createdAt") - updated_at: dt.datetime = pydantic.Field(alias="updatedAt") - api_user_id: str = pydantic.Field(alias="apiUserId") - api_user: typing.Optional[PrismaModelsApiUser] = pydantic.Field(alias="apiUser", default=None) - tools: typing.Optional[typing.List[PrismaModelsAgentTool]] = None - tool_config: typing.Optional[typing.Any] = pydantic.Field(alias="toolConfig", default=None) + return_direct: bool = pydantic_v1.Field(alias="returnDirect") + metadata: typing.Optional[str] + created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") + updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") + api_user_id: str = pydantic_v1.Field(alias="apiUserId") + api_user: typing.Optional[PrismaModelsApiUser] = pydantic_v1.Field(alias="apiUser") + tools: typing.Optional[typing.List[PrismaModelsAgentTool]] + tool_config: typing.Optional[typing.Any] = pydantic_v1.Field(alias="toolConfig") def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/prisma_models_vector_db.py b/src/superagent/types/prisma_models_vector_db.py index 32f3825..b0ace47 100644 --- a/src/superagent/types/prisma_models_vector_db.py +++ b/src/superagent/types/prisma_models_vector_db.py @@ -6,40 +6,42 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .vector_db_provider import VectorDbProvider -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class PrismaModelsVectorDb(pydantic.BaseModel): +class PrismaModelsVectorDb(pydantic_v1.BaseModel): """ Represents a VectorDb record """ id: str provider: VectorDbProvider - options: typing.Optional[typing.Any] = None - datasources: typing.Optional[typing.List[PrismaModelsDatasource]] = None - created_at: dt.datetime = pydantic.Field(alias="createdAt") - updated_at: dt.datetime = pydantic.Field(alias="updatedAt") - api_user_id: str = pydantic.Field(alias="apiUserId") - api_user: typing.Optional[PrismaModelsApiUser] = pydantic.Field(alias="apiUser", default=None) + options: typing.Optional[typing.Any] + datasources: typing.Optional[typing.List[PrismaModelsDatasource]] + created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") + updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") + api_user_id: str = pydantic_v1.Field(alias="apiUserId") + api_user: typing.Optional[PrismaModelsApiUser] = pydantic_v1.Field(alias="apiUser") def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/prisma_models_workflow.py b/src/superagent/types/prisma_models_workflow.py index 983b04b..dc8aef3 100644 --- a/src/superagent/types/prisma_models_workflow.py +++ b/src/superagent/types/prisma_models_workflow.py @@ -6,42 +6,42 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class PrismaModelsWorkflow(pydantic.BaseModel): +class PrismaModelsWorkflow(pydantic_v1.BaseModel): """ Represents a Workflow record """ id: str name: str - description: typing.Optional[str] = None - created_at: dt.datetime = pydantic.Field(alias="createdAt") - updated_at: dt.datetime = pydantic.Field(alias="updatedAt") - steps: typing.Optional[typing.List[PrismaModelsWorkflowStep]] = None - api_user_id: str = pydantic.Field(alias="apiUserId") - api_user: typing.Optional[PrismaModelsApiUser] = pydantic.Field(alias="apiUser", default=None) - workflow_configs: typing.Optional[typing.List[WorkflowConfig]] = pydantic.Field( - alias="workflowConfigs", default=None - ) + description: typing.Optional[str] + created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") + updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") + steps: typing.Optional[typing.List[PrismaModelsWorkflowStep]] + api_user_id: str = pydantic_v1.Field(alias="apiUserId") + api_user: typing.Optional[PrismaModelsApiUser] = pydantic_v1.Field(alias="apiUser") + workflow_configs: typing.Optional[typing.List[WorkflowConfig]] = pydantic_v1.Field(alias="workflowConfigs") def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/prisma_models_workflow_step.py b/src/superagent/types/prisma_models_workflow_step.py index ec23f2d..1412f3c 100644 --- a/src/superagent/types/prisma_models_workflow_step.py +++ b/src/superagent/types/prisma_models_workflow_step.py @@ -6,41 +6,43 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class PrismaModelsWorkflowStep(pydantic.BaseModel): +class PrismaModelsWorkflowStep(pydantic_v1.BaseModel): """ Represents a WorkflowStep record """ id: str order: int - workflow_id: str = pydantic.Field(alias="workflowId") - workflow: typing.Optional[PrismaModelsWorkflow] = None - created_at: dt.datetime = pydantic.Field(alias="createdAt") - updated_at: dt.datetime = pydantic.Field(alias="updatedAt") - input: typing.Optional[str] = None - output: typing.Optional[str] = None - agent_id: str = pydantic.Field(alias="agentId") - agent: typing.Optional[PrismaModelsAgent] = None + workflow_id: str = pydantic_v1.Field(alias="workflowId") + workflow: typing.Optional[PrismaModelsWorkflow] + created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") + updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") + input: typing.Optional[str] + output: typing.Optional[str] + agent_id: str = pydantic_v1.Field(alias="agentId") + agent: typing.Optional[PrismaModelsAgent] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/tool_assistant_tools_code.py b/src/superagent/types/tool_assistant_tools_code.py index e02b361..40edcf5 100644 --- a/src/superagent/types/tool_assistant_tools_code.py +++ b/src/superagent/types/tool_assistant_tools_code.py @@ -4,23 +4,24 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class ToolAssistantToolsCode(pydantic.BaseModel): +class ToolAssistantToolsCode(pydantic_v1.BaseModel): def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/tool_assistant_tools_function.py b/src/superagent/types/tool_assistant_tools_function.py index 90effdd..d3970a7 100644 --- a/src/superagent/types/tool_assistant_tools_function.py +++ b/src/superagent/types/tool_assistant_tools_function.py @@ -4,26 +4,27 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .function_definition import FunctionDefinition -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class ToolAssistantToolsFunction(pydantic.BaseModel): - function: typing.Optional[FunctionDefinition] = None +class ToolAssistantToolsFunction(pydantic_v1.BaseModel): + function: typing.Optional[FunctionDefinition] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/tool_assistant_tools_retrieval.py b/src/superagent/types/tool_assistant_tools_retrieval.py index e5c9627..922f372 100644 --- a/src/superagent/types/tool_assistant_tools_retrieval.py +++ b/src/superagent/types/tool_assistant_tools_retrieval.py @@ -4,23 +4,24 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class ToolAssistantToolsRetrieval(pydantic.BaseModel): +class ToolAssistantToolsRetrieval(pydantic_v1.BaseModel): def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/tool_list.py b/src/superagent/types/tool_list.py index 21cf9ba..66d3c77 100644 --- a/src/superagent/types/tool_list.py +++ b/src/superagent/types/tool_list.py @@ -4,17 +4,13 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .prisma_models_tool import PrismaModelsTool -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class ToolList(pydantic.BaseModel): +class ToolList(pydantic_v1.BaseModel): success: bool - data: typing.Optional[typing.List[PrismaModelsTool]] = None + data: typing.Optional[typing.List[PrismaModelsTool]] total_pages: int def json(self, **kwargs: typing.Any) -> str: @@ -22,10 +18,15 @@ def json(self, **kwargs: typing.Any) -> str: return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/validation_error.py b/src/superagent/types/validation_error.py index 67b5ba7..3d50f49 100644 --- a/src/superagent/types/validation_error.py +++ b/src/superagent/types/validation_error.py @@ -4,15 +4,11 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .validation_error_loc_item import ValidationErrorLocItem -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class ValidationError(pydantic.BaseModel): +class ValidationError(pydantic_v1.BaseModel): loc: typing.List[ValidationErrorLocItem] msg: str type: str @@ -22,10 +18,15 @@ def json(self, **kwargs: typing.Any) -> str: return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/vector_db_list.py b/src/superagent/types/vector_db_list.py index 3a5e858..edd2404 100644 --- a/src/superagent/types/vector_db_list.py +++ b/src/superagent/types/vector_db_list.py @@ -4,27 +4,28 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .prisma_models_vector_db import PrismaModelsVectorDb -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class VectorDbList(pydantic.BaseModel): +class VectorDbList(pydantic_v1.BaseModel): success: bool - data: typing.Optional[typing.List[PrismaModelsVectorDb]] = None + data: typing.Optional[typing.List[PrismaModelsVectorDb]] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/workflow_config.py b/src/superagent/types/workflow_config.py index 6274413..4a88187 100644 --- a/src/superagent/types/workflow_config.py +++ b/src/superagent/types/workflow_config.py @@ -6,39 +6,41 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class WorkflowConfig(pydantic.BaseModel): +class WorkflowConfig(pydantic_v1.BaseModel): """ Represents a WorkflowConfig record """ id: str - config: typing.Optional[typing.Any] = None - created_at: dt.datetime = pydantic.Field(alias="createdAt") - updated_at: dt.datetime = pydantic.Field(alias="updatedAt") - workflow_id: str = pydantic.Field(alias="workflowId") - workflow: typing.Optional[PrismaModelsWorkflow] = None - api_user: typing.Optional[PrismaModelsApiUser] = pydantic.Field(alias="ApiUser", default=None) - api_user_id: typing.Optional[str] = pydantic.Field(alias="apiUserId", default=None) + config: typing.Optional[typing.Any] + created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") + updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") + workflow_id: str = pydantic_v1.Field(alias="workflowId") + workflow: typing.Optional[PrismaModelsWorkflow] + api_user: typing.Optional[PrismaModelsApiUser] = pydantic_v1.Field(alias="ApiUser") + api_user_id: typing.Optional[str] = pydantic_v1.Field(alias="apiUserId") def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/workflow_list.py b/src/superagent/types/workflow_list.py index 01d1447..3d887c5 100644 --- a/src/superagent/types/workflow_list.py +++ b/src/superagent/types/workflow_list.py @@ -4,17 +4,13 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .prisma_models_workflow import PrismaModelsWorkflow -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class WorkflowList(pydantic.BaseModel): +class WorkflowList(pydantic_v1.BaseModel): success: bool - data: typing.Optional[typing.List[PrismaModelsWorkflow]] = None + data: typing.Optional[typing.List[PrismaModelsWorkflow]] total_pages: int def json(self, **kwargs: typing.Any) -> str: @@ -22,10 +18,15 @@ def json(self, **kwargs: typing.Any) -> str: return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/workflow_step_list.py b/src/superagent/types/workflow_step_list.py index cb2f97b..84325f1 100644 --- a/src/superagent/types/workflow_step_list.py +++ b/src/superagent/types/workflow_step_list.py @@ -4,27 +4,28 @@ import typing from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .prisma_models_workflow_step import PrismaModelsWorkflowStep -try: - import pydantic.v1 as pydantic # type: ignore -except ImportError: - import pydantic # type: ignore - -class WorkflowStepList(pydantic.BaseModel): +class WorkflowStepList(pydantic_v1.BaseModel): success: bool - data: typing.Optional[typing.List[PrismaModelsWorkflowStep]] = None + data: typing.Optional[typing.List[PrismaModelsWorkflowStep]] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) class Config: frozen = True smart_union = True + extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/version.py b/src/superagent/version.py new file mode 100644 index 0000000..2ac062c --- /dev/null +++ b/src/superagent/version.py @@ -0,0 +1,4 @@ + +from importlib import metadata + +__version__ = metadata.version("superagent-py") diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/test_client.py b/tests/custom/test_client.py similarity index 100% rename from tests/test_client.py rename to tests/custom/test_client.py