From 3eeeabc46ce3f692fa5dba8dbd869edba68f7d1a Mon Sep 17 00:00:00 2001 From: Pavlo Paliychuk Date: Thu, 17 Oct 2024 22:22:39 -0400 Subject: [PATCH] Graph client (#221) * SDK regeneration * SDK regeneration * SDK regeneration * chore: Bump version * chore: Bump version * SDK regeneration * SDK regeneration * SDK regeneration * feat: Update memory example to include latest api * fix: Decrease sleep in example * SDK regeneration * SDK regeneration * chore: Bump version * SDK regeneration * SDK regeneration * SDK regeneration * SDK regeneration * SDK regeneration * SDK regeneration * SDK regeneration * SDK regeneration * chore: Add graph example * SDK regeneration * SDK regeneration * SDK regeneration * SDK regeneration * chore: Update graph examples * add get facts call * fix: examples * chore: fix memory example * SDK regeneration * SDK regeneration * update examples * SDK regeneration * SDK regeneration * SDK regeneration * SDK regeneration * SDK regeneration * SDK regeneration * update sleep times * SDK regeneration * SDK regeneration * SDK regeneration * SDK regeneration * chore: version bump * chore: cleaup comments * remove langchain component * remove langchain example * SDK regeneration * poetry lock * SDK regeneration --------- Co-authored-by: fern-api <115122769+fern-api[bot]@users.noreply.github.com> --- .fernignore | 2 +- .gitignore | 5 + LICENSE | 201 -- examples/chat_history/memory.py | 6 +- examples/graph_example/conversations.py | 289 +++ examples/graph_example/group_graph_example.py | 95 + examples/graph_example/user_graph_example.py | 173 ++ poetry.lock | 2032 ++++++++--------- pyproject.toml | 14 +- src/zep_cloud/__init__.py | 52 +- src/zep_cloud/base_client.py | 6 + src/zep_cloud/core/client_wrapper.py | 2 +- src/zep_cloud/document/client.py | 94 +- src/zep_cloud/graph/__init__.py | 5 + src/zep_cloud/graph/client.py | 351 +++ src/zep_cloud/graph/edge/__init__.py | 2 + src/zep_cloud/graph/edge/client.py | 398 ++++ src/zep_cloud/graph/episode/__init__.py | 2 + src/zep_cloud/graph/episode/client.py | 343 +++ src/zep_cloud/graph/node/__init__.py | 2 + src/zep_cloud/graph/node/client.py | 307 +++ src/zep_cloud/group/__init__.py | 2 + src/zep_cloud/group/client.py | 239 ++ src/zep_cloud/langchain/__init__.py | 7 - src/zep_cloud/langchain/helpers.py | 16 - src/zep_cloud/langchain/history.py | 201 -- src/zep_cloud/langchain/vectorstore.py | 553 ----- src/zep_cloud/memory/client.py | 147 +- src/zep_cloud/types/__init__.py | 48 +- ...cument_response.py => apidata_document.py} | 2 +- ...onse.py => apidata_document_collection.py} | 14 +- ...py => apidata_document_search_response.py} | 6 +- ...sult.py => apidata_document_with_score.py} | 2 +- src/zep_cloud/types/end_session_response.py | 4 +- src/zep_cloud/types/entity_edge.py | 79 + src/zep_cloud/types/entity_node.py | 54 + src/zep_cloud/types/episode.py | 37 + src/zep_cloud/types/episode_response.py | 30 + src/zep_cloud/types/fact.py | 12 +- src/zep_cloud/types/graph_data_type.py | 5 + src/zep_cloud/types/graph_search_results.py | 32 + src/zep_cloud/types/graph_search_scope.py | 5 + src/zep_cloud/types/group.py | 37 + src/zep_cloud/types/memory.py | 11 +- src/zep_cloud/types/memory_type.py | 5 - src/zep_cloud/types/message.py | 4 +- .../types/{added_fact.py => new_fact.py} | 2 +- src/zep_cloud/types/reranker.py | 5 + src/zep_cloud/types/session.py | 5 +- ..._response.py => session_classification.py} | 4 +- .../types/session_fact_rating_examples.py | 31 + .../types/session_fact_rating_instruction.py | 45 + src/zep_cloud/user/client.py | 93 + 53 files changed, 3808 insertions(+), 2310 deletions(-) delete mode 100644 LICENSE create mode 100644 examples/graph_example/conversations.py create mode 100644 examples/graph_example/group_graph_example.py create mode 100644 examples/graph_example/user_graph_example.py create mode 100644 src/zep_cloud/graph/__init__.py create mode 100644 src/zep_cloud/graph/client.py create mode 100644 src/zep_cloud/graph/edge/__init__.py create mode 100644 src/zep_cloud/graph/edge/client.py create mode 100644 src/zep_cloud/graph/episode/__init__.py create mode 100644 src/zep_cloud/graph/episode/client.py create mode 100644 src/zep_cloud/graph/node/__init__.py create mode 100644 src/zep_cloud/graph/node/client.py create mode 100644 src/zep_cloud/group/__init__.py create mode 100644 src/zep_cloud/group/client.py delete mode 100644 src/zep_cloud/langchain/__init__.py delete mode 100644 src/zep_cloud/langchain/helpers.py delete mode 100644 src/zep_cloud/langchain/history.py delete mode 100644 src/zep_cloud/langchain/vectorstore.py rename src/zep_cloud/types/{document_response.py => apidata_document.py} (97%) rename src/zep_cloud/types/{document_collection_response.py => apidata_document_collection.py} (83%) rename src/zep_cloud/types/{document_search_result_page.py => apidata_document_search_response.py} (86%) rename src/zep_cloud/types/{document_search_result.py => apidata_document_with_score.py} (96%) create mode 100644 src/zep_cloud/types/entity_edge.py create mode 100644 src/zep_cloud/types/entity_node.py create mode 100644 src/zep_cloud/types/episode.py create mode 100644 src/zep_cloud/types/episode_response.py create mode 100644 src/zep_cloud/types/graph_data_type.py create mode 100644 src/zep_cloud/types/graph_search_results.py create mode 100644 src/zep_cloud/types/graph_search_scope.py create mode 100644 src/zep_cloud/types/group.py delete mode 100644 src/zep_cloud/types/memory_type.py rename src/zep_cloud/types/{added_fact.py => new_fact.py} (96%) create mode 100644 src/zep_cloud/types/reranker.py rename src/zep_cloud/types/{classify_session_response.py => session_classification.py} (92%) create mode 100644 src/zep_cloud/types/session_fact_rating_examples.py create mode 100644 src/zep_cloud/types/session_fact_rating_instruction.py diff --git a/.fernignore b/.fernignore index 118c89ea..ef101b8d 100644 --- a/.fernignore +++ b/.fernignore @@ -9,4 +9,4 @@ poetry.lock README.md .github Makefile -LICENSE \ No newline at end of file +.gitignore \ No newline at end of file diff --git a/.gitignore b/.gitignore index bf4fa947..495f33f3 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,8 @@ dist/ .mypy_cache/ __pycache__/ poetry.toml +.idea/ +.vscode/ +*.env +.venv/ +venv/ \ No newline at end of file diff --git a/LICENSE b/LICENSE deleted file mode 100644 index f49a4e16..00000000 --- a/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file diff --git a/examples/chat_history/memory.py b/examples/chat_history/memory.py index d2a0459d..358dcaf6 100644 --- a/examples/chat_history/memory.py +++ b/examples/chat_history/memory.py @@ -72,7 +72,7 @@ async def main() -> None: await client.memory.add(session_id=session_id, messages=[Message(**m)]) # Wait for the messages to be processed - await asyncio.sleep(5) + await asyncio.sleep(20) # Synthesize a question from most recent messages. # Useful for RAG apps. This is faster than using an LLM chain. @@ -100,7 +100,7 @@ async def main() -> None: # Get Memory for session print(f"\n---Get Perpetual Memory for Session: {session_id}") - memory = await client.memory.get(session_id, memory_type="perpetual") + memory = await client.memory.get(session_id) print(f"Memory: {memory}") print("\n---End of Memory") @@ -115,7 +115,7 @@ async def main() -> None: query = "What are Jane's favorite shoe brands?" print(f"\n---Searching over facts for: '{query}'") facts_result = await client.memory.search_sessions( - session_ids=[session_id], text=query, search_scope="facts" + user_id=user_id, text=query, search_scope="facts" ) print("facts_result: ", facts_result) diff --git a/examples/graph_example/conversations.py b/examples/graph_example/conversations.py new file mode 100644 index 00000000..9f486e82 --- /dev/null +++ b/examples/graph_example/conversations.py @@ -0,0 +1,289 @@ +history = [ + # Conversation 1: Japan + [ + { + "role": "Paul", + "role_type": "user", + "content": "Hi, I'm planning a vacation to Japan. Can you give me some advice?", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "Of course! Japan is a fascinating destination. Are you more interested in modern cities like Tokyo, or historical sites like Kyoto?", + }, + { + "role": "Paul", + "role_type": "user", + "content": "I think I'd like to experience both. Can you suggest an itinerary?", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "Certainly! You could start with 3 days in Tokyo, then take the bullet train to Kyoto for 3 days. This way, you'll experience both the modern and traditional aspects of Japan.", + }, + { + "role": "Paul", + "role_type": "user", + "content": "That sounds perfect! What are some must-visit places in Tokyo?", + }, + ], + # Conversation 2: Italy + [ + { + "role": "Paul", + "role_type": "user", + "content": "I'm thinking about visiting Italy next summer. Any recommendations?", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "Italy is a wonderful choice! Are you more interested in art and history, or would you prefer to focus on food and wine experiences?", + }, + { + "role": "Paul", + "role_type": "user", + "content": "I love both, but I think I'm leaning towards the food and wine experiences.", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "Great! In that case, you might want to consider regions like Tuscany or Emilia-Romagna. Would you like more information about these areas?", + }, + { + "role": "Paul", + "role_type": "user", + "content": "Yes, please tell me more about Tuscany. What are some must-try dishes and wines there?", + }, + ], + [ + { + "role": "Paul", + "role_type": "user", + "content": "Apples are my favorite fruit", + }, + { + "role": "Paul", + "role_type": "user", + "content": "now bananas are my favorite fruit", + }, + { + "role": "Paul", + "role_type": "user", + "content": "I went to Eric Clapton's concert last night", + }, + ], + [ + { + "role": "Paul", + "role_type": "user", + "content": "I'm thinking about taking a road trip across the United States. Any suggestions?", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "That sounds exciting! A cross-country road trip can be an amazing experience. Do you have any specific regions or landmarks you'd like to see?", + }, + { + "role": "Paul", + "role_type": "user", + "content": "I'd love to see some national parks and maybe hit a few major cities. I'm starting from New York.", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "Great! How about a route that takes you through some major cities and national parks? We could plan a route from New York to California, hitting spots like Chicago, Yellowstone, and the Grand Canyon along the way.", + }, + { + "role": "Paul", + "role_type": "user", + "content": "That sounds perfect! How long do you think a trip like that would take?", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "A trip like that could take anywhere from 2-4 weeks, depending on how much time you want to spend at each location. Would you prefer a faster-paced trip or a more leisurely one?", + }, + { + "role": "Paul", + "role_type": "user", + "content": "I think I'd prefer a more leisurely pace. Let's aim for about 3 weeks. Can you break down a potential itinerary?", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "Certainly! Here's a rough 3-week itinerary: Days 1-2: New York to Chicago, Days 3-4: Chicago, Days 5-7: Drive to and explore Yellowstone, Days 8-10: Drive to and explore Grand Canyon, Days 11-13: Las Vegas and Death Valley, Days 14-16: Drive to and explore Yosemite, Days 17-19: San Francisco, Days 20-21: Drive down the Pacific Coast Highway to Los Angeles.", + }, + { + "role": "Paul", + "role_type": "user", + "content": "That sounds amazing! What kind of vehicle would you recommend for a trip like this?", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "For a long road trip like this, comfort and reliability are key. A mid-size SUV could be a good choice. It offers space for luggage, comfort for long drives, and can handle various road conditions you might encounter in national parks.", + }, + { + "role": "Paul", + "role_type": "user", + "content": "Good idea. Are there any specific preparations I should make for the national parks?", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "Absolutely! For national parks, it's a good idea to get an America the Beautiful Pass, which covers entrance fees to all national parks for a year. Also, make reservations for campsites or lodges well in advance, especially for popular parks like Yellowstone and Yosemite.", + }, + { + "role": "Paul", + "role_type": "user", + "content": "Great tip about the pass! What about food? Should I plan to cook or eat out mostly?", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "A mix of both can be good. In cities, you'll want to try local cuisines. For national parks, having some cooking equipment can save money and be convenient. Maybe plan to cook simple meals while camping and eat out in cities or when you want to treat yourself.", + }, + { + "role": "Paul", + "role_type": "user", + "content": "That makes sense. Are there any must-try foods in the cities we'll be visiting?", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "Absolutely! In Chicago, try deep-dish pizza and a Chicago-style hot dog. In San Francisco, don't miss the clam chowder in a sourdough bread bowl at Fisherman's Wharf. Las Vegas has world-class restaurants from celebrity chefs. And in Los Angeles, try some authentic tacos or Korean BBQ.", + }, + { + "role": "Paul", + "role_type": "user", + "content": "Yum! Now, what about budgeting? How much should I expect to spend on a trip like this?", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "Budget can vary widely, but for a 3-week trip, you might budget around $4000-$6000 for one person. This would cover gas, accommodations, food, and some activities. You can save by camping in national parks and cooking some meals, or spend more for hotels and restaurants.", + }, + { + "role": "Paul", + "role_type": "user", + "content": "That's helpful, thanks. Last question: what are some essential items I shouldn't forget to pack?", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "Great question! Don't forget: a good map or GPS, first-aid kit, flashlight, multi-tool, reusable water bottles, sunscreen, insect repellent, comfortable hiking shoes, and layers of clothing for various climates. Also, a camera to capture memories, and portable chargers for your devices.", + }, + { + "role": "Paul", + "role_type": "user", + "content": "Perfect, I think I'm all set to start planning in detail. Thanks for all your help!", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "You're welcome, Paul! Enjoy your epic road trip across the US. Don't hesitate to ask if you need any more advice as you plan. Safe travels!", + }, + ], + [ + { + "role": "Alex", + "role_type": "user", + "content": "Hi there! I thought I'd share some facts about myself today.", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "That's great, Alex! I'd love to learn more about you. Please, go ahead and share whatever you're comfortable with.", + }, + { + "role": "Alex", + "role_type": "user", + "content": "Well, I'm 28 years old and I work as a software engineer at a tech startup.", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "That's interesting! Software engineering is a dynamic field. How long have you been working in this role?", + }, + { + "role": "Alex", + "role_type": "user", + "content": "I've been in this role for about 3 years now. Before that, I studied Computer Science at UCLA.", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "UCLA is a great school! Do you have any hobbies or interests outside of work?", + }, + { + "role": "Alex", + "role_type": "user", + "content": "Yes, I'm really into rock climbing. I try to go to the climbing gym at least twice a week, and I love outdoor climbing trips when I can manage them.", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "Rock climbing sounds exciting! It's a great way to stay active and challenge yourself. Have you been on any memorable climbing trips recently?", + }, + { + "role": "Alex", + "role_type": "user", + "content": "Actually, yes! I just got back from a trip to Joshua Tree National Park. The rock formations there are amazing for bouldering.", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "Joshua Tree is beautiful! It's wonderful that you can combine your love for the outdoors with your hobby. Do you have any other interests or things you're passionate about?", + }, + { + "role": "Alex", + "role_type": "user", + "content": "I'm also really into cooking. I love trying out new recipes, especially dishes from different cultures. Last week, I attempted to make sushi for the first time!", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "Cooking is a great skill to have! How did your sushi-making attempt go? It can be quite challenging for beginners.", + }, + { + "role": "Alex", + "role_type": "user", + "content": "It was definitely a learning experience! The rolls weren't as tight as I'd like, but the flavor was good. I'm determined to keep practicing.", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "That's a great attitude! Practice makes perfect. Is there anything else you'd like to share about yourself?", + }, + { + "role": "Alex", + "role_type": "user", + "content": "Well, I'm originally from Boston, but I moved to San Francisco for work about 5 years ago. I love the tech scene here, but I do miss the changing seasons back east.", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "Moving across the country is a big change! San Francisco and Boston are quite different. Do you get to visit your hometown often?", + }, + { + "role": "Alex", + "role_type": "user", + "content": "I try to go back for the holidays each year. Oh, and I should mention, I have a rescue dog named Pixel. She's a mix of who-knows-what, but she's the best companion I could ask for.", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "Pixel sounds lovely! Rescue dogs often make the most loyal companions. Does she join you on any of your outdoor adventures?", + }, + { + "role": "Alex", + "role_type": "user", + "content": "She does! Pixel loves hiking, and she's even been camping with me a few times. She's not much for rock climbing though - she prefers to watch from the ground.", + }, + { + "role": "assistant", + "role_type": "assistant", + "content": "It's great that you and Pixel can share outdoor experiences together. Thank you for sharing all these interesting facts about yourself, Alex. You seem to lead a rich and varied life!", + }, + ], +] diff --git a/examples/graph_example/group_graph_example.py b/examples/graph_example/group_graph_example.py new file mode 100644 index 00000000..50c0e24d --- /dev/null +++ b/examples/graph_example/group_graph_example.py @@ -0,0 +1,95 @@ +""" +Example of using the Zep Python SDK asynchronously with Graph functionality. + +This script demonstrates the following functionality: +- Creating a group. +- Updating a group. +- Adding episodes to the group (text and JSON). +- Retrieving nodes from the group. +- Retrieving edges from the group. +- Searching the group for specific content. + +The script showcases various operations using the Zep Graph API, including +group management, adding different types of episodes, and querying the graph structure. +""" + +import asyncio +import os +import uuid + +from dotenv import find_dotenv, load_dotenv + +from zep_cloud.client import AsyncZep + +load_dotenv( + dotenv_path=find_dotenv() +) # load environment variables from .env file, if present + +API_KEY = os.environ.get("ZEP_API_KEY") or "YOUR_API_KEY" + + +async def main() -> None: + client = AsyncZep( + api_key=API_KEY, + ) + + group_id = uuid.uuid4().hex + print(f"Creating group {group_id}...") + group = await client.group.add( + group_id=group_id, + name="My Group", + description="This is my group", + ) + print(f"Group {group_id} created {group}") + + print(f"Adding episode to group {group_id}...") + await client.graph.add( + group_id=group_id, + data="This is a test episode", + type="text", + ) + + print(f"Adding more meaningful episode to group {group_id}...") + await client.graph.add( + group_id=group_id, + data="Eric Clapton is a rock star", + type="text", + ) + + print(f"Adding a JSON episode to group {group_id}...") + json_string = '{"name": "Eric Clapton", "age": 78, "genre": "Rock"}' + await client.graph.add( + group_id=group_id, + data=json_string, + type="json", + ) + await asyncio.sleep(20) + + # TODO: Need to enable non-message episodic content retrieval + print(f"Getting episodes from group {group_id}...") + results = await client.graph.episode.get_by_group_id(group_id, lastn=2) + print(f"Episodes from group {group_id} {results.episodes}") + episode = await client.graph.episode.get(results.episodes[0].uuid_) + print(f"Episode {episode.uuid_} from group {group_id} {episode}") + + print(f"Getting nodes from group {group_id}...") + nodes = await client.graph.node.get_by_group_id(group_id) + print(f"Nodes from group {group_id} {nodes}") + + print(f"Getting edges from group {group_id}...") + edges = await client.graph.edge.get_by_group_id(group_id) + print(f"Edges from group {group_id} {edges}") + + print(f"Searching group {group_id}...") + search_results = await client.graph.search( + group_id=group_id, + query="Eric Clapton", + ) + print(f"Search results from group {group_id} {search_results}") + + await client.group.delete(group_id) + print(f"Group {group_id} deleted") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/graph_example/user_graph_example.py b/examples/graph_example/user_graph_example.py new file mode 100644 index 00000000..fec6626a --- /dev/null +++ b/examples/graph_example/user_graph_example.py @@ -0,0 +1,173 @@ +""" +Example of using the Zep Python SDK asynchronously with Graph functionality. + +This script demonstrates the following functionality: +- Creating a user. +- Creating a session associated with the created user. +- Adding messages to the session. +- Retrieving episodes, edges, and nodes for a user. +- Searching the user's graph memory. +- Adding text and JSON episodes to the graph. +- Performing a centered search on a specific node. + +The script showcases various operations using the Zep Graph API, including +user and session management, adding different types of episodes, and querying +the graph structure. +""" + +import asyncio +import os +import uuid +import json +from dotenv import find_dotenv, load_dotenv +from conversations import history + +from zep_cloud.client import AsyncZep +from zep_cloud.types import Message + +load_dotenv( + dotenv_path=find_dotenv() +) # load environment variables from .env file, if present + +API_KEY = os.environ.get("ZEP_API_KEY") or "YOUR_API_KEY" + + +async def main() -> None: + client = AsyncZep( + api_key=API_KEY, + ) + user_id = uuid.uuid4().hex + session_id = uuid.uuid4().hex + await client.user.add(user_id=user_id, first_name="Paul") + print(f"User {user_id} created") + await client.memory.add_session(session_id=session_id, user_id=user_id) + print(f"Session {session_id} created") + for message in history[2]: + await client.memory.add( + session_id, + messages=[ + Message( + role_type=message["role_type"], + content=message["content"], + ) + ], + ) + + print("Waiting for the graph to be updated...") + await asyncio.sleep(30) + print("Getting memory for session") + session_memory = await client.memory.get(session_id) + print(session_memory) + print("Searching user memory...") + search_results = await client.memory.search_sessions( + text="What is the weather in San Francisco?", + user_id=user_id, + search_scope="facts", + ) + print(search_results) + sessions = await client.user.get_sessions(user_id) + print(sessions) + print("Getting episodes for user") + episode_result = await client.graph.episode.get_by_user_id(user_id, lastn=3) + episodes = episode_result.episodes + print(f"Episodes for user {user_id}:") + print(episodes) + episode = await client.graph.episode.get(episodes[0].uuid_) + print(episode) + + edges = await client.graph.edge.get_by_user_id(user_id) + print(f"Edges for user {user_id}:") + print(edges) + edge = await client.graph.edge.get(edges[0].uuid_) + print(edge) + + nodes = await client.graph.node.get_by_user_id(user_id) + print(f"Nodes for user {user_id}:") + print(nodes) + node = await client.graph.node.get(nodes[0].uuid_) + print(node) + + print("Searching user graph memory...") + search_results = await client.graph.search( + user_id=user_id, + query="What is the weather in San Francisco?", + ) + print(search_results.edges) + + print("Adding a new text episode to the graph...") + await client.graph.add( + user_id=user_id, + type="text", + data="The user is an avid fan of Eric Clapton", + ) + print("Text episode added") + print("Adding a new JSON episode to the graph...") + json_data = { + "name": "Eric Clapton", + "age": 78, + "genre": "Rock", + "favorite_user_id": user_id, + } + json_string = json.dumps(json_data) + await client.graph.add( + user_id=user_id, + type="json", + data=json_string, + ) + print("JSON episode added") + + print("Adding a new message episode to the graph...") + message = "Paul (user): I went to Eric Clapton concert last night" + await client.graph.add( + user_id=user_id, + type="message", + data=message, + ) + print("Message episode added") + + print("Waiting for the graph to be updated...") + # wait for the graph to be updated + await asyncio.sleep(30) + + print("Getting nodes from the graph...") + nodes = await client.graph.node.get_by_user_id(user_id) + print(nodes) + + print("Finding Eric Clapton in the graph...") + clapton_node = [node for node in nodes if node.name == "Eric Clapton"] + print(clapton_node) + + print("Performing Eric Clapton centered edge search...") + search_results = await client.graph.search( + user_id=user_id, + query="Eric Clapton", + center_node_uuid=clapton_node[0].uuid_, + scope="edges", + ) + print(search_results.edges) + + print("Performing Eric Clapton centered node search...") + search_results = await client.graph.search( + user_id=user_id, + query="Eric Clapton", + center_node_uuid=clapton_node[0].uuid_, + scope="nodes", + ) + print(search_results.nodes) + print("Getting all user facts") + result = await client.user.get_facts(user_id) + print(result.facts) + + for fact in result.facts: + if fact.valid_at or fact.invalid_at: + print( + f"Fact {fact.fact} is valid at {fact.valid_at} and invalid at {fact.invalid_at}\n " + ) + + # Uncomment to delete the user + # await client.user.delete(user_id) + # print(f"User {user_id} deleted") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/poetry.lock b/poetry.lock index 71c2f47e..5a78655f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,141 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "aiohappyeyeballs" +version = "2.4.3" +description = "Happy Eyeballs for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohappyeyeballs-2.4.3-py3-none-any.whl", hash = "sha256:8a7a83727b2756f394ab2895ea0765a0a8c475e3c71e98d43d76f22b4b435572"}, + {file = "aiohappyeyeballs-2.4.3.tar.gz", hash = "sha256:75cf88a15106a5002a8eb1dab212525c00d1f4c0fa96e551c9fbe6f09a621586"}, +] + +[[package]] +name = "aiohttp" +version = "3.10.10" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.10.10-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:be7443669ae9c016b71f402e43208e13ddf00912f47f623ee5994e12fc7d4b3f"}, + {file = "aiohttp-3.10.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7b06b7843929e41a94ea09eb1ce3927865387e3e23ebe108e0d0d09b08d25be9"}, + {file = "aiohttp-3.10.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:333cf6cf8e65f6a1e06e9eb3e643a0c515bb850d470902274239fea02033e9a8"}, + {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:274cfa632350225ce3fdeb318c23b4a10ec25c0e2c880eff951a3842cf358ac1"}, + {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9e5e4a85bdb56d224f412d9c98ae4cbd032cc4f3161818f692cd81766eee65a"}, + {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b606353da03edcc71130b52388d25f9a30a126e04caef1fd637e31683033abd"}, + {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab5a5a0c7a7991d90446a198689c0535be89bbd6b410a1f9a66688f0880ec026"}, + {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:578a4b875af3e0daaf1ac6fa983d93e0bbfec3ead753b6d6f33d467100cdc67b"}, + {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8105fd8a890df77b76dd3054cddf01a879fc13e8af576805d667e0fa0224c35d"}, + {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3bcd391d083f636c06a68715e69467963d1f9600f85ef556ea82e9ef25f043f7"}, + {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fbc6264158392bad9df19537e872d476f7c57adf718944cc1e4495cbabf38e2a"}, + {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e48d5021a84d341bcaf95c8460b152cfbad770d28e5fe14a768988c461b821bc"}, + {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2609e9ab08474702cc67b7702dbb8a80e392c54613ebe80db7e8dbdb79837c68"}, + {file = "aiohttp-3.10.10-cp310-cp310-win32.whl", hash = "sha256:84afcdea18eda514c25bc68b9af2a2b1adea7c08899175a51fe7c4fb6d551257"}, + {file = "aiohttp-3.10.10-cp310-cp310-win_amd64.whl", hash = "sha256:9c72109213eb9d3874f7ac8c0c5fa90e072d678e117d9061c06e30c85b4cf0e6"}, + {file = "aiohttp-3.10.10-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c30a0eafc89d28e7f959281b58198a9fa5e99405f716c0289b7892ca345fe45f"}, + {file = "aiohttp-3.10.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:258c5dd01afc10015866114e210fb7365f0d02d9d059c3c3415382ab633fcbcb"}, + {file = "aiohttp-3.10.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:15ecd889a709b0080f02721255b3f80bb261c2293d3c748151274dfea93ac871"}, + {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3935f82f6f4a3820270842e90456ebad3af15810cf65932bd24da4463bc0a4c"}, + {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:413251f6fcf552a33c981c4709a6bba37b12710982fec8e558ae944bfb2abd38"}, + {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1720b4f14c78a3089562b8875b53e36b51c97c51adc53325a69b79b4b48ebcb"}, + {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:679abe5d3858b33c2cf74faec299fda60ea9de62916e8b67e625d65bf069a3b7"}, + {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79019094f87c9fb44f8d769e41dbb664d6e8fcfd62f665ccce36762deaa0e911"}, + {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe2fb38c2ed905a2582948e2de560675e9dfbee94c6d5ccdb1301c6d0a5bf092"}, + {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a3f00003de6eba42d6e94fabb4125600d6e484846dbf90ea8e48a800430cc142"}, + {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1bbb122c557a16fafc10354b9d99ebf2f2808a660d78202f10ba9d50786384b9"}, + {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:30ca7c3b94708a9d7ae76ff281b2f47d8eaf2579cd05971b5dc681db8caac6e1"}, + {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:df9270660711670e68803107d55c2b5949c2e0f2e4896da176e1ecfc068b974a"}, + {file = "aiohttp-3.10.10-cp311-cp311-win32.whl", hash = "sha256:aafc8ee9b742ce75044ae9a4d3e60e3d918d15a4c2e08a6c3c3e38fa59b92d94"}, + {file = "aiohttp-3.10.10-cp311-cp311-win_amd64.whl", hash = "sha256:362f641f9071e5f3ee6f8e7d37d5ed0d95aae656adf4ef578313ee585b585959"}, + {file = "aiohttp-3.10.10-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9294bbb581f92770e6ed5c19559e1e99255e4ca604a22c5c6397b2f9dd3ee42c"}, + {file = "aiohttp-3.10.10-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a8fa23fe62c436ccf23ff930149c047f060c7126eae3ccea005f0483f27b2e28"}, + {file = "aiohttp-3.10.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c6a5b8c7926ba5d8545c7dd22961a107526562da31a7a32fa2456baf040939f"}, + {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:007ec22fbc573e5eb2fb7dec4198ef8f6bf2fe4ce20020798b2eb5d0abda6138"}, + {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9627cc1a10c8c409b5822a92d57a77f383b554463d1884008e051c32ab1b3742"}, + {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:50edbcad60d8f0e3eccc68da67f37268b5144ecc34d59f27a02f9611c1d4eec7"}, + {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a45d85cf20b5e0d0aa5a8dca27cce8eddef3292bc29d72dcad1641f4ed50aa16"}, + {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b00807e2605f16e1e198f33a53ce3c4523114059b0c09c337209ae55e3823a8"}, + {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f2d4324a98062be0525d16f768a03e0bbb3b9fe301ceee99611dc9a7953124e6"}, + {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:438cd072f75bb6612f2aca29f8bd7cdf6e35e8f160bc312e49fbecab77c99e3a"}, + {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:baa42524a82f75303f714108fea528ccacf0386af429b69fff141ffef1c534f9"}, + {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a7d8d14fe962153fc681f6366bdec33d4356f98a3e3567782aac1b6e0e40109a"}, + {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c1277cd707c465cd09572a774559a3cc7c7a28802eb3a2a9472588f062097205"}, + {file = "aiohttp-3.10.10-cp312-cp312-win32.whl", hash = "sha256:59bb3c54aa420521dc4ce3cc2c3fe2ad82adf7b09403fa1f48ae45c0cbde6628"}, + {file = "aiohttp-3.10.10-cp312-cp312-win_amd64.whl", hash = "sha256:0e1b370d8007c4ae31ee6db7f9a2fe801a42b146cec80a86766e7ad5c4a259cf"}, + {file = "aiohttp-3.10.10-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ad7593bb24b2ab09e65e8a1d385606f0f47c65b5a2ae6c551db67d6653e78c28"}, + {file = "aiohttp-3.10.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1eb89d3d29adaf533588f209768a9c02e44e4baf832b08118749c5fad191781d"}, + {file = "aiohttp-3.10.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3fe407bf93533a6fa82dece0e74dbcaaf5d684e5a51862887f9eaebe6372cd79"}, + {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aed5155f819873d23520919e16703fc8925e509abbb1a1491b0087d1cd969e"}, + {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f05e9727ce409358baa615dbeb9b969db94324a79b5a5cea45d39bdb01d82e6"}, + {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dffb610a30d643983aeb185ce134f97f290f8935f0abccdd32c77bed9388b42"}, + {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa6658732517ddabe22c9036479eabce6036655ba87a0224c612e1ae6af2087e"}, + {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:741a46d58677d8c733175d7e5aa618d277cd9d880301a380fd296975a9cdd7bc"}, + {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e00e3505cd80440f6c98c6d69269dcc2a119f86ad0a9fd70bccc59504bebd68a"}, + {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ffe595f10566f8276b76dc3a11ae4bb7eba1aac8ddd75811736a15b0d5311414"}, + {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bdfcf6443637c148c4e1a20c48c566aa694fa5e288d34b20fcdc58507882fed3"}, + {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d183cf9c797a5291e8301790ed6d053480ed94070637bfaad914dd38b0981f67"}, + {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:77abf6665ae54000b98b3c742bc6ea1d1fb31c394bcabf8b5d2c1ac3ebfe7f3b"}, + {file = "aiohttp-3.10.10-cp313-cp313-win32.whl", hash = "sha256:4470c73c12cd9109db8277287d11f9dd98f77fc54155fc71a7738a83ffcc8ea8"}, + {file = "aiohttp-3.10.10-cp313-cp313-win_amd64.whl", hash = "sha256:486f7aabfa292719a2753c016cc3a8f8172965cabb3ea2e7f7436c7f5a22a151"}, + {file = "aiohttp-3.10.10-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1b66ccafef7336a1e1f0e389901f60c1d920102315a56df85e49552308fc0486"}, + {file = "aiohttp-3.10.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:acd48d5b80ee80f9432a165c0ac8cbf9253eaddb6113269a5e18699b33958dbb"}, + {file = "aiohttp-3.10.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3455522392fb15ff549d92fbf4b73b559d5e43dc522588f7eb3e54c3f38beee7"}, + {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45c3b868724137f713a38376fef8120c166d1eadd50da1855c112fe97954aed8"}, + {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:da1dee8948d2137bb51fbb8a53cce6b1bcc86003c6b42565f008438b806cccd8"}, + {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c5ce2ce7c997e1971b7184ee37deb6ea9922ef5163c6ee5aa3c274b05f9e12fa"}, + {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28529e08fde6f12eba8677f5a8608500ed33c086f974de68cc65ab218713a59d"}, + {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7db54c7914cc99d901d93a34704833568d86c20925b2762f9fa779f9cd2e70f"}, + {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:03a42ac7895406220124c88911ebee31ba8b2d24c98507f4a8bf826b2937c7f2"}, + {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:7e338c0523d024fad378b376a79faff37fafb3c001872a618cde1d322400a572"}, + {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:038f514fe39e235e9fef6717fbf944057bfa24f9b3db9ee551a7ecf584b5b480"}, + {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:64f6c17757251e2b8d885d728b6433d9d970573586a78b78ba8929b0f41d045a"}, + {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:93429602396f3383a797a2a70e5f1de5df8e35535d7806c9f91df06f297e109b"}, + {file = "aiohttp-3.10.10-cp38-cp38-win32.whl", hash = "sha256:c823bc3971c44ab93e611ab1a46b1eafeae474c0c844aff4b7474287b75fe49c"}, + {file = "aiohttp-3.10.10-cp38-cp38-win_amd64.whl", hash = "sha256:54ca74df1be3c7ca1cf7f4c971c79c2daf48d9aa65dea1a662ae18926f5bc8ce"}, + {file = "aiohttp-3.10.10-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:01948b1d570f83ee7bbf5a60ea2375a89dfb09fd419170e7f5af029510033d24"}, + {file = "aiohttp-3.10.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9fc1500fd2a952c5c8e3b29aaf7e3cc6e27e9cfc0a8819b3bce48cc1b849e4cc"}, + {file = "aiohttp-3.10.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f614ab0c76397661b90b6851a030004dac502e48260ea10f2441abd2207fbcc7"}, + {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00819de9e45d42584bed046314c40ea7e9aea95411b38971082cad449392b08c"}, + {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05646ebe6b94cc93407b3bf34b9eb26c20722384d068eb7339de802154d61bc5"}, + {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:998f3bd3cfc95e9424a6acd7840cbdd39e45bc09ef87533c006f94ac47296090"}, + {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9010c31cd6fa59438da4e58a7f19e4753f7f264300cd152e7f90d4602449762"}, + {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ea7ffc6d6d6f8a11e6f40091a1040995cdff02cfc9ba4c2f30a516cb2633554"}, + {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ef9c33cc5cbca35808f6c74be11eb7f5f6b14d2311be84a15b594bd3e58b5527"}, + {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ce0cdc074d540265bfeb31336e678b4e37316849d13b308607efa527e981f5c2"}, + {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:597a079284b7ee65ee102bc3a6ea226a37d2b96d0418cc9047490f231dc09fe8"}, + {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:7789050d9e5d0c309c706953e5e8876e38662d57d45f936902e176d19f1c58ab"}, + {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e7f8b04d83483577fd9200461b057c9f14ced334dcb053090cea1da9c8321a91"}, + {file = "aiohttp-3.10.10-cp39-cp39-win32.whl", hash = "sha256:c02a30b904282777d872266b87b20ed8cc0d1501855e27f831320f471d54d983"}, + {file = "aiohttp-3.10.10-cp39-cp39-win_amd64.whl", hash = "sha256:edfe3341033a6b53a5c522c802deb2079eee5cbfbb0af032a55064bd65c73a23"}, + {file = "aiohttp-3.10.10.tar.gz", hash = "sha256:0631dd7c9f0822cc61c88586ca76d5b5ada26538097d0f1df510b082bad3411a"}, +] + +[package.dependencies] +aiohappyeyeballs = ">=2.3.0" +aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.12.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" [[package]] name = "annotated-types" @@ -44,82 +181,6 @@ files = [ {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, ] -[[package]] -name = "argon2-cffi" -version = "23.1.0" -description = "Argon2 for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, - {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, -] - -[package.dependencies] -argon2-cffi-bindings = "*" - -[package.extras] -dev = ["argon2-cffi[tests,typing]", "tox (>4)"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] -tests = ["hypothesis", "pytest"] -typing = ["mypy"] - -[[package]] -name = "argon2-cffi-bindings" -version = "21.2.0" -description = "Low-level CFFI bindings for Argon2" -optional = false -python-versions = ">=3.6" -files = [ - {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, - {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, -] - -[package.dependencies] -cffi = ">=1.0.1" - -[package.extras] -dev = ["cogapp", "pre-commit", "pytest", "wheel"] -tests = ["pytest"] - -[[package]] -name = "arrow" -version = "1.3.0" -description = "Better dates & times for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, - {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, -] - -[package.dependencies] -python-dateutil = ">=2.7.0" -types-python-dateutil = ">=2.8.10" - -[package.extras] -doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] -test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] - [[package]] name = "asttokens" version = "2.4.1" @@ -139,19 +200,16 @@ astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] [[package]] -name = "async-lru" -version = "2.0.4" -description = "Simple LRU cache for asyncio" +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"}, - {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, ] -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} - [[package]] name = "attrs" version = "24.2.0" @@ -171,41 +229,6 @@ docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphi tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] -[[package]] -name = "babel" -version = "2.16.0" -description = "Internationalization utilities" -optional = false -python-versions = ">=3.8" -files = [ - {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, - {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, -] - -[package.extras] -dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] - -[[package]] -name = "beautifulsoup4" -version = "4.12.3" -description = "Screen-scraping library" -optional = false -python-versions = ">=3.6.0" -files = [ - {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, - {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, -] - -[package.dependencies] -soupsieve = ">1.2" - -[package.extras] -cchardet = ["cchardet"] -chardet = ["chardet"] -charset-normalizer = ["charset-normalizer"] -html5lib = ["html5lib"] -lxml = ["lxml"] - [[package]] name = "black" version = "24.8.0" @@ -252,24 +275,6 @@ d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] -[[package]] -name = "bleach" -version = "6.1.0" -description = "An easy safelist-based HTML-sanitizing tool." -optional = false -python-versions = ">=3.8" -files = [ - {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, - {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, -] - -[package.dependencies] -six = ">=1.9.0" -webencodings = "*" - -[package.extras] -css = ["tinycss2 (>=1.1.0,<1.3)"] - [[package]] name = "certifi" version = "2024.8.30" @@ -501,6 +506,21 @@ traitlets = ">=4" [package.extras] test = ["pytest"] +[[package]] +name = "dataclasses-json" +version = "0.6.7" +description = "Easily serialize dataclasses to and from JSON." +optional = false +python-versions = "<4.0,>=3.7" +files = [ + {file = "dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a"}, + {file = "dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0"}, +] + +[package.dependencies] +marshmallow = ">=3.18.0,<4.0.0" +typing-inspect = ">=0.4.0,<1" + [[package]] name = "debugpy" version = "1.8.6" @@ -543,17 +563,6 @@ files = [ {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] -[[package]] -name = "defusedxml" -version = "0.7.1" -description = "XML bomb protection for Python stdlib modules" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, - {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, -] - [[package]] name = "distro" version = "1.9.0" @@ -594,29 +603,176 @@ files = [ tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] [[package]] -name = "fastjsonschema" -version = "2.20.0" -description = "Fastest Python implementation of JSON schema" +name = "frozenlist" +version = "1.4.1" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, +] + +[[package]] +name = "greenlet" +version = "3.1.1" +description = "Lightweight in-process concurrent programming" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "fastjsonschema-2.20.0-py3-none-any.whl", hash = "sha256:5875f0b0fa7a0043a91e93a9b8f793bcbbba9691e7fd83dca95c28ba26d21f0a"}, - {file = "fastjsonschema-2.20.0.tar.gz", hash = "sha256:3d48fc5300ee96f5d116f10fe6f28d938e6008f59a6a025c2649475b87f76a23"}, + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, ] [package.extras] -devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] - -[[package]] -name = "fqdn" -version = "1.5.1" -description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" -optional = false -python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" -files = [ - {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, - {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, -] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] [[package]] name = "h11" @@ -793,41 +949,6 @@ qtconsole = ["qtconsole"] test = ["pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath"] test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath", "trio"] -[[package]] -name = "ipywidgets" -version = "8.1.5" -description = "Jupyter interactive widgets" -optional = false -python-versions = ">=3.7" -files = [ - {file = "ipywidgets-8.1.5-py3-none-any.whl", hash = "sha256:3290f526f87ae6e77655555baba4f36681c555b8bdbbff430b70e52c34c86245"}, - {file = "ipywidgets-8.1.5.tar.gz", hash = "sha256:870e43b1a35656a80c18c9503bbf2d16802db1cb487eec6fab27d683381dde17"}, -] - -[package.dependencies] -comm = ">=0.1.3" -ipython = ">=6.1.0" -jupyterlab-widgets = ">=3.0.12,<3.1.0" -traitlets = ">=4.3.1" -widgetsnbextension = ">=4.0.12,<4.1.0" - -[package.extras] -test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] - -[[package]] -name = "isoduration" -version = "20.11.0" -description = "Operations with ISO 8601 durations" -optional = false -python-versions = ">=3.7" -files = [ - {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, - {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, -] - -[package.dependencies] -arrow = ">=0.15.0" - [[package]] name = "jedi" version = "0.19.1" @@ -847,23 +968,6 @@ docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alab qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] -[[package]] -name = "jinja2" -version = "3.1.4" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - [[package]] name = "jiter" version = "0.5.0" @@ -934,17 +1038,6 @@ files = [ {file = "jiter-0.5.0.tar.gz", hash = "sha256:1d916ba875bcab5c5f7d927df998c4cb694d27dceddf3392e58beaf10563368a"}, ] -[[package]] -name = "json5" -version = "0.9.25" -description = "A Python implementation of the JSON5 data format." -optional = false -python-versions = ">=3.8" -files = [ - {file = "json5-0.9.25-py3-none-any.whl", hash = "sha256:34ed7d834b1341a86987ed52f3f76cd8ee184394906b6e22a1e0deb9ab294e8f"}, - {file = "json5-0.9.25.tar.gz", hash = "sha256:548e41b9be043f9426776f05df8635a00fe06104ea51ed24b67f908856e151ae"}, -] - [[package]] name = "jsonpatch" version = "1.33" @@ -970,49 +1063,6 @@ files = [ {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, ] -[[package]] -name = "jsonschema" -version = "4.23.0" -description = "An implementation of JSON Schema validation for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, - {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} -jsonschema-specifications = ">=2023.03.6" -referencing = ">=0.28.4" -rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} -rpds-py = ">=0.7.1" -uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -webcolors = {version = ">=24.6.0", optional = true, markers = "extra == \"format-nongpl\""} - -[package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] - -[[package]] -name = "jsonschema-specifications" -version = "2023.12.1" -description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, - {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, -] - -[package.dependencies] -referencing = ">=0.31.0" - [[package]] name = "jupyter-client" version = "8.6.3" @@ -1057,251 +1107,126 @@ docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphin test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] [[package]] -name = "jupyter-events" -version = "0.10.0" -description = "Jupyter Event System library" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_events-0.10.0-py3-none-any.whl", hash = "sha256:4b72130875e59d57716d327ea70d3ebc3af1944d3717e5a498b8a06c6c159960"}, - {file = "jupyter_events-0.10.0.tar.gz", hash = "sha256:670b8229d3cc882ec782144ed22e0d29e1c2d639263f92ca8383e66682845e22"}, -] - -[package.dependencies] -jsonschema = {version = ">=4.18.0", extras = ["format-nongpl"]} -python-json-logger = ">=2.0.4" -pyyaml = ">=5.3" -referencing = "*" -rfc3339-validator = "*" -rfc3986-validator = ">=0.1.1" -traitlets = ">=5.3" - -[package.extras] -cli = ["click", "rich"] -docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] -test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "rich"] - -[[package]] -name = "jupyter-lsp" -version = "2.2.5" -description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter-lsp-2.2.5.tar.gz", hash = "sha256:793147a05ad446f809fd53ef1cd19a9f5256fd0a2d6b7ce943a982cb4f545001"}, - {file = "jupyter_lsp-2.2.5-py3-none-any.whl", hash = "sha256:45fbddbd505f3fbfb0b6cb2f1bc5e15e83ab7c79cd6e89416b248cb3c00c11da"}, -] - -[package.dependencies] -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jupyter-server = ">=1.1.2" - -[[package]] -name = "jupyter-server" -version = "2.14.2" -description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_server-2.14.2-py3-none-any.whl", hash = "sha256:47ff506127c2f7851a17bf4713434208fc490955d0e8632e95014a9a9afbeefd"}, - {file = "jupyter_server-2.14.2.tar.gz", hash = "sha256:66095021aa9638ced276c248b1d81862e4c50f292d575920bbe960de1c56b12b"}, -] - -[package.dependencies] -anyio = ">=3.1.0" -argon2-cffi = ">=21.1" -jinja2 = ">=3.0.3" -jupyter-client = ">=7.4.4" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -jupyter-events = ">=0.9.0" -jupyter-server-terminals = ">=0.4.4" -nbconvert = ">=6.4.4" -nbformat = ">=5.3.0" -overrides = ">=5.0" -packaging = ">=22.0" -prometheus-client = ">=0.9" -pywinpty = {version = ">=2.0.1", markers = "os_name == \"nt\""} -pyzmq = ">=24" -send2trash = ">=1.8.2" -terminado = ">=0.8.3" -tornado = ">=6.2.0" -traitlets = ">=5.6.0" -websocket-client = ">=1.7" - -[package.extras] -docs = ["ipykernel", "jinja2", "jupyter-client", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] -test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0,<9)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.7)", "pytest-timeout", "requests"] - -[[package]] -name = "jupyter-server-terminals" -version = "0.5.3" -description = "A Jupyter Server Extension Providing Terminals." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa"}, - {file = "jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269"}, -] - -[package.dependencies] -pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} -terminado = ">=0.8.3" - -[package.extras] -docs = ["jinja2", "jupyter-server", "mistune (<4.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] -test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] - -[[package]] -name = "jupyterlab" -version = "4.2.5" -description = "JupyterLab computational environment" +name = "langchain" +version = "0.1.20" +description = "Building applications with LLMs through composability" optional = false -python-versions = ">=3.8" +python-versions = "<4.0,>=3.8.1" files = [ - {file = "jupyterlab-4.2.5-py3-none-any.whl", hash = "sha256:73b6e0775d41a9fee7ee756c80f58a6bed4040869ccc21411dc559818874d321"}, - {file = "jupyterlab-4.2.5.tar.gz", hash = "sha256:ae7f3a1b8cb88b4f55009ce79fa7c06f99d70cd63601ee4aa91815d054f46f75"}, + {file = "langchain-0.1.20-py3-none-any.whl", hash = "sha256:09991999fbd6c3421a12db3c7d1f52d55601fc41d9b2a3ef51aab2e0e9c38da9"}, + {file = "langchain-0.1.20.tar.gz", hash = "sha256:f35c95eed8c8375e02dce95a34f2fd4856a4c98269d6dc34547a23dba5beab7e"}, ] [package.dependencies] -async-lru = ">=1.0.0" -httpx = ">=0.25.0" -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -ipykernel = ">=6.5.0" -jinja2 = ">=3.0.3" -jupyter-core = "*" -jupyter-lsp = ">=2.0.0" -jupyter-server = ">=2.4.0,<3" -jupyterlab-server = ">=2.27.1,<3" -notebook-shim = ">=0.2" -packaging = "*" -setuptools = ">=40.1.0" -tomli = {version = ">=1.2.2", markers = "python_version < \"3.11\""} -tornado = ">=6.2.0" -traitlets = "*" +aiohttp = ">=3.8.3,<4.0.0" +async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} +dataclasses-json = ">=0.5.7,<0.7" +langchain-community = ">=0.0.38,<0.1" +langchain-core = ">=0.1.52,<0.2.0" +langchain-text-splitters = ">=0.0.1,<0.1" +langsmith = ">=0.1.17,<0.2.0" +numpy = ">=1,<2" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +requests = ">=2,<3" +SQLAlchemy = ">=1.4,<3" +tenacity = ">=8.1.0,<9.0.0" [package.extras] -dev = ["build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.3.5)"] -docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-jupyter", "sphinx (>=1.8,<7.3.0)", "sphinx-copybutton"] -docs-screenshots = ["altair (==5.3.0)", "ipython (==8.16.1)", "ipywidgets (==8.1.2)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.1.post2)", "matplotlib (==3.8.3)", "nbconvert (>=7.0.0)", "pandas (==2.2.1)", "scipy (==1.12.0)", "vega-datasets (==0.9.0)"] -test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] -upgrade-extension = ["copier (>=9,<10)", "jinja2-time (<0.3)", "pydantic (<3.0)", "pyyaml-include (<3.0)", "tomli-w (<2.0)"] - -[[package]] -name = "jupyterlab-pygments" -version = "0.3.0" -description = "Pygments theme using JupyterLab CSS variables" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, - {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, -] - -[[package]] -name = "jupyterlab-server" -version = "2.27.3" -description = "A set of server components for JupyterLab and JupyterLab like applications." +azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-textanalytics (>=5.3.0,<6.0.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (<2)"] +clarifai = ["clarifai (>=9.1.0)"] +cli = ["typer (>=0.9.0,<0.10.0)"] +cohere = ["cohere (>=4,<6)"] +docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"] +embeddings = ["sentence-transformers (>=2,<3)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<6)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] +javascript = ["esprima (>=4.0.1,<5.0.0)"] +llms = ["clarifai (>=9.1.0)", "cohere (>=4,<6)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"] +openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"] +qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"] +text-helpers = ["chardet (>=5.1.0,<6.0.0)"] + +[[package]] +name = "langchain-community" +version = "0.0.38" +description = "Community contributed LangChain integrations." optional = false -python-versions = ">=3.8" +python-versions = "<4.0,>=3.8.1" files = [ - {file = "jupyterlab_server-2.27.3-py3-none-any.whl", hash = "sha256:e697488f66c3db49df675158a77b3b017520d772c6e1548c7d9bcc5df7944ee4"}, - {file = "jupyterlab_server-2.27.3.tar.gz", hash = "sha256:eb36caca59e74471988f0ae25c77945610b887f777255aa21f8065def9e51ed4"}, + {file = "langchain_community-0.0.38-py3-none-any.whl", hash = "sha256:ecb48660a70a08c90229be46b0cc5f6bc9f38f2833ee44c57dfab9bf3a2c121a"}, + {file = "langchain_community-0.0.38.tar.gz", hash = "sha256:127fc4b75bc67b62fe827c66c02e715a730fef8fe69bd2023d466bab06b5810d"}, ] [package.dependencies] -babel = ">=2.10" -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jinja2 = ">=3.0.3" -json5 = ">=0.9.0" -jsonschema = ">=4.18.0" -jupyter-server = ">=1.21,<3" -packaging = ">=21.3" -requests = ">=2.31" +aiohttp = ">=3.8.3,<4.0.0" +dataclasses-json = ">=0.5.7,<0.7" +langchain-core = ">=0.1.52,<0.2.0" +langsmith = ">=0.1.0,<0.2.0" +numpy = ">=1,<2" +PyYAML = ">=5.3" +requests = ">=2,<3" +SQLAlchemy = ">=1.4,<3" +tenacity = ">=8.1.0,<9.0.0" [package.extras] -docs = ["autodoc-traits", "jinja2 (<3.2.0)", "mistune (<4)", "myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinxcontrib-openapi (>0.8)"] -openapi = ["openapi-core (>=0.18.0,<0.19.0)", "ruamel-yaml"] -test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.8.0)", "pytest (>=7.0,<8)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] - -[[package]] -name = "jupyterlab-widgets" -version = "3.0.13" -description = "Jupyter interactive widgets for JupyterLab" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jupyterlab_widgets-3.0.13-py3-none-any.whl", hash = "sha256:e3cda2c233ce144192f1e29914ad522b2f4c40e77214b0cc97377ca3d323db54"}, - {file = "jupyterlab_widgets-3.0.13.tar.gz", hash = "sha256:a2966d385328c1942b683a8cd96b89b8dd82c8b8f81dda902bb2bc06d46f5bed"}, -] +cli = ["typer (>=0.9.0,<0.10.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "azure-identity (>=1.15.0,<2.0.0)", "azure-search-documents (==11.4.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.6,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "oracledb (>=2.2.0,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] [[package]] name = "langchain-core" -version = "0.2.41" +version = "0.1.52" description = "Building applications with LLMs through composability" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_core-0.2.41-py3-none-any.whl", hash = "sha256:3278fda5ba9a05defae8bb19f1226032add6aab21917db7b3bc74e750e263e84"}, - {file = "langchain_core-0.2.41.tar.gz", hash = "sha256:bc12032c5a298d85be754ccb129bc13ea21ccb1d6e22f8d7ba18b8da64315bb5"}, + {file = "langchain_core-0.1.52-py3-none-any.whl", hash = "sha256:62566749c92e8a1181c255c788548dc16dbc319d896cd6b9c95dc17af9b2a6db"}, + {file = "langchain_core-0.1.52.tar.gz", hash = "sha256:084c3fc452f5a6966c28ab3ec5dbc8b8d26fc3f63378073928f4e29d90b6393f"}, ] [package.dependencies] jsonpatch = ">=1.33,<2.0" -langsmith = ">=0.1.112,<0.2.0" -packaging = ">=23.2,<25" -pydantic = [ - {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, - {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, -] +langsmith = ">=0.1.0,<0.2.0" +packaging = ">=23.2,<24.0" +pydantic = ">=1,<3" PyYAML = ">=5.3" -tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0" -typing-extensions = ">=4.7" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +extended-testing = ["jinja2 (>=3,<4)"] [[package]] name = "langchain-openai" -version = "0.1.25" +version = "0.0.3" description = "An integration package connecting OpenAI and LangChain" optional = false -python-versions = "<4.0,>=3.8.1" +python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain_openai-0.1.25-py3-none-any.whl", hash = "sha256:f0b34a233d0d9cb8fce6006c903e57085c493c4f0e32862b99063b96eaedb109"}, - {file = "langchain_openai-0.1.25.tar.gz", hash = "sha256:eb116f744f820247a72f54313fb7c01524fba0927120d4e899e5e4ab41ad3928"}, + {file = "langchain_openai-0.0.3-py3-none-any.whl", hash = "sha256:32d8ae288e212ed47af418ffd216c8af3b8115514bb39127ca9e2910c06fc6b2"}, + {file = "langchain_openai-0.0.3.tar.gz", hash = "sha256:19720510abcd7d6217a47d551def7779dc001aebbf978bda5c03e0a8c8167ac3"}, ] [package.dependencies] -langchain-core = ">=0.2.40,<0.3.0" -openai = ">=1.40.0,<2.0.0" -tiktoken = ">=0.7,<1" +langchain-core = ">=0.1.13,<0.2" +numpy = ">=1,<2" +openai = ">=1.6.1,<2.0.0" +tiktoken = ">=0.5.2,<0.6.0" [[package]] -name = "langgraph" -version = "0.2.34" -description = "Building stateful, multi-actor applications with LLMs" +name = "langchain-text-splitters" +version = "0.0.2" +description = "LangChain text splitting utilities" optional = false -python-versions = "<4.0,>=3.9.0" +python-versions = "<4.0,>=3.8.1" files = [ - {file = "langgraph-0.2.34-py3-none-any.whl", hash = "sha256:93727158769f3dfad7cc5440405fca5a8cba77f79cbc0928b97f4341e561556f"}, - {file = "langgraph-0.2.34.tar.gz", hash = "sha256:2a7f94d9a2cabbd13e09db6bd87aa99548e98613c4fe762329088b17138cec15"}, + {file = "langchain_text_splitters-0.0.2-py3-none-any.whl", hash = "sha256:13887f32705862c1e1454213cb7834a63aae57c26fcd80346703a1d09c46168d"}, + {file = "langchain_text_splitters-0.0.2.tar.gz", hash = "sha256:ac8927dc0ba08eba702f6961c9ed7df7cead8de19a9f7101ab2b5ea34201b3c1"}, ] [package.dependencies] -langchain-core = ">=0.2.39,<0.4" -langgraph-checkpoint = ">=2.0.0,<3.0.0" - -[[package]] -name = "langgraph-checkpoint" -version = "2.0.0" -description = "Library with base interfaces for LangGraph checkpoint savers." -optional = false -python-versions = "<4.0.0,>=3.9.0" -files = [ - {file = "langgraph_checkpoint-2.0.0-py3-none-any.whl", hash = "sha256:a8d7dbfb86fc381ad9619e15de47eb37451b6051b2ce2749ee25cbb41935ef30"}, - {file = "langgraph_checkpoint-2.0.0.tar.gz", hash = "sha256:884ea7fd7c06904865e8fdcae01233e469ad4bc22871d5376b76f8eeab33464c"}, -] +langchain-core = ">=0.1.28,<0.3" -[package.dependencies] -langchain-core = ">=0.2.38,<0.4" -msgpack = ">=1.1.0,<2.0.0" +[package.extras] +extended-testing = ["beautifulsoup4 (>=4.12.3,<5.0.0)", "lxml (>=4.9.3,<6.0)"] [[package]] name = "langsmith" @@ -1325,74 +1250,24 @@ requests = ">=2,<3" requests-toolbelt = ">=1.0.0,<2.0.0" [[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." +name = "marshmallow" +version = "3.23.0" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, + {file = "marshmallow-3.23.0-py3-none-any.whl", hash = "sha256:82f20a2397834fe6d9611b241f2f7e7b680ed89c49f84728a1ad937be6b4bdf4"}, + {file = "marshmallow-3.23.0.tar.gz", hash = "sha256:98d8827a9f10c03d44ead298d2e99c6aea8197df18ccfad360dae7f89a50da2e"}, ] +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"] +docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.13)", "sphinx (==8.1.3)", "sphinx-issues (==5.0.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "simplejson"] + [[package]] name = "matplotlib-inline" version = "0.1.7" @@ -1408,88 +1283,108 @@ files = [ traitlets = "*" [[package]] -name = "mistune" -version = "3.0.2" -description = "A sane and fast Markdown parser with useful plugins and renderers" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, - {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, +name = "multidict" +version = "6.1.0" +description = "multidict implementation" +optional = false +python-versions = ">=3.8" +files = [ + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7"}, + {file = "multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0"}, + {file = "multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753"}, + {file = "multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80"}, + {file = "multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3"}, + {file = "multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133"}, + {file = "multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6"}, + {file = "multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81"}, + {file = "multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd"}, + {file = "multidict-6.1.0-cp38-cp38-win32.whl", hash = "sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167"}, + {file = "multidict-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43"}, + {file = "multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada"}, + {file = "multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a"}, + {file = "multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506"}, + {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, ] -[[package]] -name = "msgpack" -version = "1.1.0" -description = "MessagePack serializer" -optional = false -python-versions = ">=3.8" -files = [ - {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd"}, - {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d"}, - {file = "msgpack-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:914571a2a5b4e7606997e169f64ce53a8b1e06f2cf2c3a7273aa106236d43dd5"}, - {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c921af52214dcbb75e6bdf6a661b23c3e6417f00c603dd2070bccb5c3ef499f5"}, - {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8ce0b22b890be5d252de90d0e0d119f363012027cf256185fc3d474c44b1b9e"}, - {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73322a6cc57fcee3c0c57c4463d828e9428275fb85a27aa2aa1a92fdc42afd7b"}, - {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e1f3c3d21f7cf67bcf2da8e494d30a75e4cf60041d98b3f79875afb5b96f3a3f"}, - {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64fc9068d701233effd61b19efb1485587560b66fe57b3e50d29c5d78e7fef68"}, - {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:42f754515e0f683f9c79210a5d1cad631ec3d06cea5172214d2176a42e67e19b"}, - {file = "msgpack-1.1.0-cp310-cp310-win32.whl", hash = "sha256:3df7e6b05571b3814361e8464f9304c42d2196808e0119f55d0d3e62cd5ea044"}, - {file = "msgpack-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:685ec345eefc757a7c8af44a3032734a739f8c45d1b0ac45efc5d8977aa4720f"}, - {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d364a55082fb2a7416f6c63ae383fbd903adb5a6cf78c5b96cc6316dc1cedc7"}, - {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79ec007767b9b56860e0372085f8504db5d06bd6a327a335449508bbee9648fa"}, - {file = "msgpack-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ad622bf7756d5a497d5b6836e7fc3752e2dd6f4c648e24b1803f6048596f701"}, - {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e59bca908d9ca0de3dc8684f21ebf9a690fe47b6be93236eb40b99af28b6ea6"}, - {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1da8f11a3dd397f0a32c76165cf0c4eb95b31013a94f6ecc0b280c05c91b59"}, - {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452aff037287acb1d70a804ffd022b21fa2bb7c46bee884dbc864cc9024128a0"}, - {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8da4bf6d54ceed70e8861f833f83ce0814a2b72102e890cbdfe4b34764cdd66e"}, - {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:41c991beebf175faf352fb940bf2af9ad1fb77fd25f38d9142053914947cdbf6"}, - {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a52a1f3a5af7ba1c9ace055b659189f6c669cf3657095b50f9602af3a3ba0fe5"}, - {file = "msgpack-1.1.0-cp311-cp311-win32.whl", hash = "sha256:58638690ebd0a06427c5fe1a227bb6b8b9fdc2bd07701bec13c2335c82131a88"}, - {file = "msgpack-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd2906780f25c8ed5d7b323379f6138524ba793428db5d0e9d226d3fa6aa1788"}, - {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d46cf9e3705ea9485687aa4001a76e44748b609d260af21c4ceea7f2212a501d"}, - {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5dbad74103df937e1325cc4bfeaf57713be0b4f15e1c2da43ccdd836393e2ea2"}, - {file = "msgpack-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58dfc47f8b102da61e8949708b3eafc3504509a5728f8b4ddef84bd9e16ad420"}, - {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676e5be1b472909b2ee6356ff425ebedf5142427842aa06b4dfd5117d1ca8a2"}, - {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17fb65dd0bec285907f68b15734a993ad3fc94332b5bb21b0435846228de1f39"}, - {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a51abd48c6d8ac89e0cfd4fe177c61481aca2d5e7ba42044fd218cfd8ea9899f"}, - {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2137773500afa5494a61b1208619e3871f75f27b03bcfca7b3a7023284140247"}, - {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:398b713459fea610861c8a7b62a6fec1882759f308ae0795b5413ff6a160cf3c"}, - {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06f5fd2f6bb2a7914922d935d3b8bb4a7fff3a9a91cfce6d06c13bc42bec975b"}, - {file = "msgpack-1.1.0-cp312-cp312-win32.whl", hash = "sha256:ad33e8400e4ec17ba782f7b9cf868977d867ed784a1f5f2ab46e7ba53b6e1e1b"}, - {file = "msgpack-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:115a7af8ee9e8cddc10f87636767857e7e3717b7a2e97379dc2054712693e90f"}, - {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:071603e2f0771c45ad9bc65719291c568d4edf120b44eb36324dcb02a13bfddf"}, - {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0f92a83b84e7c0749e3f12821949d79485971f087604178026085f60ce109330"}, - {file = "msgpack-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1964df7b81285d00a84da4e70cb1383f2e665e0f1f2a7027e683956d04b734"}, - {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59caf6a4ed0d164055ccff8fe31eddc0ebc07cf7326a2aaa0dbf7a4001cd823e"}, - {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0907e1a7119b337971a689153665764adc34e89175f9a34793307d9def08e6ca"}, - {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65553c9b6da8166e819a6aa90ad15288599b340f91d18f60b2061f402b9a4915"}, - {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7a946a8992941fea80ed4beae6bff74ffd7ee129a90b4dd5cf9c476a30e9708d"}, - {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4b51405e36e075193bc051315dbf29168d6141ae2500ba8cd80a522964e31434"}, - {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4c01941fd2ff87c2a934ee6055bda4ed353a7846b8d4f341c428109e9fcde8c"}, - {file = "msgpack-1.1.0-cp313-cp313-win32.whl", hash = "sha256:7c9a35ce2c2573bada929e0b7b3576de647b0defbd25f5139dcdaba0ae35a4cc"}, - {file = "msgpack-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:bce7d9e614a04d0883af0b3d4d501171fbfca038f12c77fa838d9f198147a23f"}, - {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c40ffa9a15d74e05ba1fe2681ea33b9caffd886675412612d93ab17b58ea2fec"}, - {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1ba6136e650898082d9d5a5217d5906d1e138024f836ff48691784bbe1adf96"}, - {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0856a2b7e8dcb874be44fea031d22e5b3a19121be92a1e098f46068a11b0870"}, - {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:471e27a5787a2e3f974ba023f9e265a8c7cfd373632247deb225617e3100a3c7"}, - {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:646afc8102935a388ffc3914b336d22d1c2d6209c773f3eb5dd4d6d3b6f8c1cb"}, - {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13599f8829cfbe0158f6456374e9eea9f44eee08076291771d8ae93eda56607f"}, - {file = "msgpack-1.1.0-cp38-cp38-win32.whl", hash = "sha256:8a84efb768fb968381e525eeeb3d92857e4985aacc39f3c47ffd00eb4509315b"}, - {file = "msgpack-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:879a7b7b0ad82481c52d3c7eb99bf6f0645dbdec5134a4bddbd16f3506947feb"}, - {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:53258eeb7a80fc46f62fd59c876957a2d0e15e6449a9e71842b6d24419d88ca1"}, - {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e7b853bbc44fb03fbdba34feb4bd414322180135e2cb5164f20ce1c9795ee48"}, - {file = "msgpack-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3e9b4936df53b970513eac1758f3882c88658a220b58dcc1e39606dccaaf01c"}, - {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46c34e99110762a76e3911fc923222472c9d681f1094096ac4102c18319e6468"}, - {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a706d1e74dd3dea05cb54580d9bd8b2880e9264856ce5068027eed09680aa74"}, - {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:534480ee5690ab3cbed89d4c8971a5c631b69a8c0883ecfea96c19118510c846"}, - {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8cf9e8c3a2153934a23ac160cc4cba0ec035f6867c8013cc6077a79823370346"}, - {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3180065ec2abbe13a4ad37688b61b99d7f9e012a535b930e0e683ad6bc30155b"}, - {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5a91481a3cc573ac8c0d9aace09345d989dc4a0202b7fcb312c88c26d4e71a8"}, - {file = "msgpack-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f80bc7d47f76089633763f952e67f8214cb7b3ee6bfa489b3cb6a84cfac114cd"}, - {file = "msgpack-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:4d1b7ff2d6146e16e8bd665ac726a89c74163ef8cd39fa8c1087d4e52d3a2325"}, - {file = "msgpack-1.1.0.tar.gz", hash = "sha256:dd432ccc2c72b914e4cb77afce64aab761c1137cc698be3984eee260bcb2896e"}, -] +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} [[package]] name = "mypy" @@ -1549,87 +1444,6 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] -[[package]] -name = "nbclient" -version = "0.10.0" -description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "nbclient-0.10.0-py3-none-any.whl", hash = "sha256:f13e3529332a1f1f81d82a53210322476a168bb7090a0289c795fe9cc11c9d3f"}, - {file = "nbclient-0.10.0.tar.gz", hash = "sha256:4b3f1b7dba531e498449c4db4f53da339c91d449dc11e9af3a43b4eb5c5abb09"}, -] - -[package.dependencies] -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -nbformat = ">=5.1" -traitlets = ">=5.4" - -[package.extras] -dev = ["pre-commit"] -docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] -test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] - -[[package]] -name = "nbconvert" -version = "7.16.4" -description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Output formats include asciidoc, html, latex, markdown, pdf, py, rst, script. nbconvert can be used both as a Python library (`import nbconvert`) or as a command line tool (invoked as `jupyter nbconvert ...`)." -optional = false -python-versions = ">=3.8" -files = [ - {file = "nbconvert-7.16.4-py3-none-any.whl", hash = "sha256:05873c620fe520b6322bf8a5ad562692343fe3452abda5765c7a34b7d1aa3eb3"}, - {file = "nbconvert-7.16.4.tar.gz", hash = "sha256:86ca91ba266b0a448dc96fa6c5b9d98affabde2867b363258703536807f9f7f4"}, -] - -[package.dependencies] -beautifulsoup4 = "*" -bleach = "!=5.0.0" -defusedxml = "*" -importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} -jinja2 = ">=3.0" -jupyter-core = ">=4.7" -jupyterlab-pygments = "*" -markupsafe = ">=2.0" -mistune = ">=2.0.3,<4" -nbclient = ">=0.5.0" -nbformat = ">=5.7" -packaging = "*" -pandocfilters = ">=1.4.1" -pygments = ">=2.4.1" -tinycss2 = "*" -traitlets = ">=5.1" - -[package.extras] -all = ["flaky", "ipykernel", "ipython", "ipywidgets (>=7.5)", "myst-parser", "nbsphinx (>=0.2.12)", "playwright", "pydata-sphinx-theme", "pyqtwebengine (>=5.15)", "pytest (>=7)", "sphinx (==5.0.2)", "sphinxcontrib-spelling", "tornado (>=6.1)"] -docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] -qtpdf = ["pyqtwebengine (>=5.15)"] -qtpng = ["pyqtwebengine (>=5.15)"] -serve = ["tornado (>=6.1)"] -test = ["flaky", "ipykernel", "ipywidgets (>=7.5)", "pytest (>=7)"] -webpdf = ["playwright"] - -[[package]] -name = "nbformat" -version = "5.10.4" -description = "The Jupyter Notebook format" -optional = false -python-versions = ">=3.8" -files = [ - {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, - {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, -] - -[package.dependencies] -fastjsonschema = ">=2.15" -jsonschema = ">=2.6" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -traitlets = ">=5.1" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["pep440", "pre-commit", "pytest", "testpath"] - [[package]] name = "nest-asyncio" version = "1.6.0" @@ -1642,45 +1456,50 @@ files = [ ] [[package]] -name = "notebook" -version = "7.2.2" -description = "Jupyter Notebook - A web-based notebook environment for interactive computing" -optional = false -python-versions = ">=3.8" -files = [ - {file = "notebook-7.2.2-py3-none-any.whl", hash = "sha256:c89264081f671bc02eec0ed470a627ed791b9156cad9285226b31611d3e9fe1c"}, - {file = "notebook-7.2.2.tar.gz", hash = "sha256:2ef07d4220421623ad3fe88118d687bc0450055570cdd160814a59cf3a1c516e"}, -] - -[package.dependencies] -jupyter-server = ">=2.4.0,<3" -jupyterlab = ">=4.2.0,<4.3" -jupyterlab-server = ">=2.27.1,<3" -notebook-shim = ">=0.2,<0.3" -tornado = ">=6.2.0" - -[package.extras] -dev = ["hatch", "pre-commit"] -docs = ["myst-parser", "nbsphinx", "pydata-sphinx-theme", "sphinx (>=1.3.6)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.27.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"] - -[[package]] -name = "notebook-shim" -version = "0.2.4" -description = "A shim layer for notebook traits and config" +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef"}, - {file = "notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] -[package.dependencies] -jupyter-server = ">=1.8,<3" - -[package.extras] -test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync"] - [[package]] name = "openai" version = "1.51.0" @@ -1771,37 +1590,15 @@ files = [ {file = "orjson-3.10.7.tar.gz", hash = "sha256:75ef0640403f945f3a1f9f6400686560dbfb0fb5b16589ad62cd477043c4eee3"}, ] -[[package]] -name = "overrides" -version = "7.7.0" -description = "A decorator to automatically detect mismatch when overriding a method." -optional = false -python-versions = ">=3.6" -files = [ - {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, - {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, -] - [[package]] name = "packaging" -version = "24.1" +version = "23.2" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.8" -files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, -] - -[[package]] -name = "pandocfilters" -version = "1.5.1" -description = "Utilities for writing pandoc filters in python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.7" files = [ - {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, - {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] [[package]] @@ -1875,20 +1672,6 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] -[[package]] -name = "prometheus-client" -version = "0.21.0" -description = "Python client for the Prometheus monitoring system." -optional = false -python-versions = ">=3.8" -files = [ - {file = "prometheus_client-0.21.0-py3-none-any.whl", hash = "sha256:4fa6b4dd0ac16d58bb587c04b1caae65b8c5043e85f778f42f5f632f6af2e166"}, - {file = "prometheus_client-0.21.0.tar.gz", hash = "sha256:96c83c606b71ff2b0a433c98889d275f51ffec6c5e267de37c7a2b5c9aa9233e"}, -] - -[package.extras] -twisted = ["twisted"] - [[package]] name = "prompt-toolkit" version = "3.0.48" @@ -1903,6 +1686,113 @@ files = [ [package.dependencies] wcwidth = "*" +[[package]] +name = "propcache" +version = "0.2.0" +description = "Accelerated property cache" +optional = false +python-versions = ">=3.8" +files = [ + {file = "propcache-0.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c5869b8fd70b81835a6f187c5fdbe67917a04d7e52b6e7cc4e5fe39d55c39d58"}, + {file = "propcache-0.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:952e0d9d07609d9c5be361f33b0d6d650cd2bae393aabb11d9b719364521984b"}, + {file = "propcache-0.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:33ac8f098df0585c0b53009f039dfd913b38c1d2edafed0cedcc0c32a05aa110"}, + {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97e48e8875e6c13909c800fa344cd54cc4b2b0db1d5f911f840458a500fde2c2"}, + {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:388f3217649d6d59292b722d940d4d2e1e6a7003259eb835724092a1cca0203a"}, + {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f571aea50ba5623c308aa146eb650eebf7dbe0fd8c5d946e28343cb3b5aad577"}, + {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3dfafb44f7bb35c0c06eda6b2ab4bfd58f02729e7c4045e179f9a861b07c9850"}, + {file = "propcache-0.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3ebe9a75be7ab0b7da2464a77bb27febcb4fab46a34f9288f39d74833db7f61"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d2f0d0f976985f85dfb5f3d685697ef769faa6b71993b46b295cdbbd6be8cc37"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:a3dc1a4b165283bd865e8f8cb5f0c64c05001e0718ed06250d8cac9bec115b48"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9e0f07b42d2a50c7dd2d8675d50f7343d998c64008f1da5fef888396b7f84630"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e63e3e1e0271f374ed489ff5ee73d4b6e7c60710e1f76af5f0e1a6117cd26394"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:56bb5c98f058a41bb58eead194b4db8c05b088c93d94d5161728515bd52b052b"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7665f04d0c7f26ff8bb534e1c65068409bf4687aa2534faf7104d7182debb336"}, + {file = "propcache-0.2.0-cp310-cp310-win32.whl", hash = "sha256:7cf18abf9764746b9c8704774d8b06714bcb0a63641518a3a89c7f85cc02c2ad"}, + {file = "propcache-0.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:cfac69017ef97db2438efb854edf24f5a29fd09a536ff3a992b75990720cdc99"}, + {file = "propcache-0.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:63f13bf09cc3336eb04a837490b8f332e0db41da66995c9fd1ba04552e516354"}, + {file = "propcache-0.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608cce1da6f2672a56b24a015b42db4ac612ee709f3d29f27a00c943d9e851de"}, + {file = "propcache-0.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:466c219deee4536fbc83c08d09115249db301550625c7fef1c5563a584c9bc87"}, + {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc2db02409338bf36590aa985a461b2c96fce91f8e7e0f14c50c5fcc4f229016"}, + {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6ed8db0a556343d566a5c124ee483ae113acc9a557a807d439bcecc44e7dfbb"}, + {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91997d9cb4a325b60d4e3f20967f8eb08dfcb32b22554d5ef78e6fd1dda743a2"}, + {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c7dde9e533c0a49d802b4f3f218fa9ad0a1ce21f2c2eb80d5216565202acab4"}, + {file = "propcache-0.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffcad6c564fe6b9b8916c1aefbb37a362deebf9394bd2974e9d84232e3e08504"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:97a58a28bcf63284e8b4d7b460cbee1edaab24634e82059c7b8c09e65284f178"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:945db8ee295d3af9dbdbb698cce9bbc5c59b5c3fe328bbc4387f59a8a35f998d"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39e104da444a34830751715f45ef9fc537475ba21b7f1f5b0f4d71a3b60d7fe2"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c5ecca8f9bab618340c8e848d340baf68bcd8ad90a8ecd7a4524a81c1764b3db"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c436130cc779806bdf5d5fae0d848713105472b8566b75ff70048c47d3961c5b"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:191db28dc6dcd29d1a3e063c3be0b40688ed76434622c53a284e5427565bbd9b"}, + {file = "propcache-0.2.0-cp311-cp311-win32.whl", hash = "sha256:5f2564ec89058ee7c7989a7b719115bdfe2a2fb8e7a4543b8d1c0cc4cf6478c1"}, + {file = "propcache-0.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6e2e54267980349b723cff366d1e29b138b9a60fa376664a157a342689553f71"}, + {file = "propcache-0.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2ee7606193fb267be4b2e3b32714f2d58cad27217638db98a60f9efb5efeccc2"}, + {file = "propcache-0.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:91ee8fc02ca52e24bcb77b234f22afc03288e1dafbb1f88fe24db308910c4ac7"}, + {file = "propcache-0.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e900bad2a8456d00a113cad8c13343f3b1f327534e3589acc2219729237a2e8"}, + {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f52a68c21363c45297aca15561812d542f8fc683c85201df0bebe209e349f793"}, + {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e41d67757ff4fbc8ef2af99b338bfb955010444b92929e9e55a6d4dcc3c4f09"}, + {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a64e32f8bd94c105cc27f42d3b658902b5bcc947ece3c8fe7bc1b05982f60e89"}, + {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55346705687dbd7ef0d77883ab4f6fabc48232f587925bdaf95219bae072491e"}, + {file = "propcache-0.2.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00181262b17e517df2cd85656fcd6b4e70946fe62cd625b9d74ac9977b64d8d9"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6994984550eaf25dd7fc7bd1b700ff45c894149341725bb4edc67f0ffa94efa4"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:56295eb1e5f3aecd516d91b00cfd8bf3a13991de5a479df9e27dd569ea23959c"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:439e76255daa0f8151d3cb325f6dd4a3e93043e6403e6491813bcaaaa8733887"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f6475a1b2ecb310c98c28d271a30df74f9dd436ee46d09236a6b750a7599ce57"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3444cdba6628accf384e349014084b1cacd866fbb88433cd9d279d90a54e0b23"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4a9d9b4d0a9b38d1c391bb4ad24aa65f306c6f01b512e10a8a34a2dc5675d348"}, + {file = "propcache-0.2.0-cp312-cp312-win32.whl", hash = "sha256:69d3a98eebae99a420d4b28756c8ce6ea5a29291baf2dc9ff9414b42676f61d5"}, + {file = "propcache-0.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ad9c9b99b05f163109466638bd30ada1722abb01bbb85c739c50b6dc11f92dc3"}, + {file = "propcache-0.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ecddc221a077a8132cf7c747d5352a15ed763b674c0448d811f408bf803d9ad7"}, + {file = "propcache-0.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0e53cb83fdd61cbd67202735e6a6687a7b491c8742dfc39c9e01e80354956763"}, + {file = "propcache-0.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92fe151145a990c22cbccf9ae15cae8ae9eddabfc949a219c9f667877e40853d"}, + {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6a21ef516d36909931a2967621eecb256018aeb11fc48656e3257e73e2e247a"}, + {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f88a4095e913f98988f5b338c1d4d5d07dbb0b6bad19892fd447484e483ba6b"}, + {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a5b3bb545ead161be780ee85a2b54fdf7092815995661947812dde94a40f6fb"}, + {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67aeb72e0f482709991aa91345a831d0b707d16b0257e8ef88a2ad246a7280bf"}, + {file = "propcache-0.2.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c997f8c44ec9b9b0bcbf2d422cc00a1d9b9c681f56efa6ca149a941e5560da2"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a66df3d4992bc1d725b9aa803e8c5a66c010c65c741ad901e260ece77f58d2f"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:3ebbcf2a07621f29638799828b8d8668c421bfb94c6cb04269130d8de4fb7136"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1235c01ddaa80da8235741e80815ce381c5267f96cc49b1477fdcf8c047ef325"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3947483a381259c06921612550867b37d22e1df6d6d7e8361264b6d037595f44"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d5bed7f9805cc29c780f3aee05de3262ee7ce1f47083cfe9f77471e9d6777e83"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e4a91d44379f45f5e540971d41e4626dacd7f01004826a18cb048e7da7e96544"}, + {file = "propcache-0.2.0-cp313-cp313-win32.whl", hash = "sha256:f902804113e032e2cdf8c71015651c97af6418363bea8d78dc0911d56c335032"}, + {file = "propcache-0.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:8f188cfcc64fb1266f4684206c9de0e80f54622c3f22a910cbd200478aeae61e"}, + {file = "propcache-0.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:53d1bd3f979ed529f0805dd35ddaca330f80a9a6d90bc0121d2ff398f8ed8861"}, + {file = "propcache-0.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:83928404adf8fb3d26793665633ea79b7361efa0287dfbd372a7e74311d51ee6"}, + {file = "propcache-0.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:77a86c261679ea5f3896ec060be9dc8e365788248cc1e049632a1be682442063"}, + {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:218db2a3c297a3768c11a34812e63b3ac1c3234c3a086def9c0fee50d35add1f"}, + {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7735e82e3498c27bcb2d17cb65d62c14f1100b71723b68362872bca7d0913d90"}, + {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:20a617c776f520c3875cf4511e0d1db847a076d720714ae35ffe0df3e440be68"}, + {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67b69535c870670c9f9b14a75d28baa32221d06f6b6fa6f77a0a13c5a7b0a5b9"}, + {file = "propcache-0.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4569158070180c3855e9c0791c56be3ceeb192defa2cdf6a3f39e54319e56b89"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:db47514ffdbd91ccdc7e6f8407aac4ee94cc871b15b577c1c324236b013ddd04"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:2a60ad3e2553a74168d275a0ef35e8c0a965448ffbc3b300ab3a5bb9956c2162"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:662dd62358bdeaca0aee5761de8727cfd6861432e3bb828dc2a693aa0471a563"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:25a1f88b471b3bc911d18b935ecb7115dff3a192b6fef46f0bfaf71ff4f12418"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:f60f0ac7005b9f5a6091009b09a419ace1610e163fa5deaba5ce3484341840e7"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:74acd6e291f885678631b7ebc85d2d4aec458dd849b8c841b57ef04047833bed"}, + {file = "propcache-0.2.0-cp38-cp38-win32.whl", hash = "sha256:d9b6ddac6408194e934002a69bcaadbc88c10b5f38fb9307779d1c629181815d"}, + {file = "propcache-0.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:676135dcf3262c9c5081cc8f19ad55c8a64e3f7282a21266d05544450bffc3a5"}, + {file = "propcache-0.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:25c8d773a62ce0451b020c7b29a35cfbc05de8b291163a7a0f3b7904f27253e6"}, + {file = "propcache-0.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:375a12d7556d462dc64d70475a9ee5982465fbb3d2b364f16b86ba9135793638"}, + {file = "propcache-0.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1ec43d76b9677637a89d6ab86e1fef70d739217fefa208c65352ecf0282be957"}, + {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f45eec587dafd4b2d41ac189c2156461ebd0c1082d2fe7013571598abb8505d1"}, + {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc092ba439d91df90aea38168e11f75c655880c12782facf5cf9c00f3d42b562"}, + {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa1076244f54bb76e65e22cb6910365779d5c3d71d1f18b275f1dfc7b0d71b4d"}, + {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:682a7c79a2fbf40f5dbb1eb6bfe2cd865376deeac65acf9beb607505dced9e12"}, + {file = "propcache-0.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e40876731f99b6f3c897b66b803c9e1c07a989b366c6b5b475fafd1f7ba3fb8"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:363ea8cd3c5cb6679f1c2f5f1f9669587361c062e4899fce56758efa928728f8"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:140fbf08ab3588b3468932974a9331aff43c0ab8a2ec2c608b6d7d1756dbb6cb"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e70fac33e8b4ac63dfc4c956fd7d85a0b1139adcfc0d964ce288b7c527537fea"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b33d7a286c0dc1a15f5fc864cc48ae92a846df287ceac2dd499926c3801054a6"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f6d5749fdd33d90e34c2efb174c7e236829147a2713334d708746e94c4bde40d"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22aa8f2272d81d9317ff5756bb108021a056805ce63dd3630e27d042c8092798"}, + {file = "propcache-0.2.0-cp39-cp39-win32.whl", hash = "sha256:73e4b40ea0eda421b115248d7e79b59214411109a5bc47d0d48e4c73e3b8fcf9"}, + {file = "propcache-0.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:9517d5e9e0731957468c29dbfd0f976736a0e55afaea843726e887f36fe017df"}, + {file = "propcache-0.2.0-py3-none-any.whl", hash = "sha256:2ccc28197af5313706511fab3a8b66dcd6da067a1331372c82ea1cb74285e036"}, + {file = "propcache-0.2.0.tar.gz", hash = "sha256:df81779732feb9d01e5d513fad0122efb3d53bbc75f61b2a4f29a020bc985e70"}, +] + [[package]] name = "psutil" version = "6.0.0" @@ -2160,17 +2050,6 @@ files = [ [package.dependencies] six = ">=1.5" -[[package]] -name = "python-json-logger" -version = "2.0.7" -description = "A python library adding a json log formatter" -optional = false -python-versions = ">=3.6" -files = [ - {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, - {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, -] - [[package]] name = "pywin32" version = "306" @@ -2194,21 +2073,6 @@ files = [ {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, ] -[[package]] -name = "pywinpty" -version = "2.0.13" -description = "Pseudo terminal support for Windows from Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pywinpty-2.0.13-cp310-none-win_amd64.whl", hash = "sha256:697bff211fb5a6508fee2dc6ff174ce03f34a9a233df9d8b5fe9c8ce4d5eaf56"}, - {file = "pywinpty-2.0.13-cp311-none-win_amd64.whl", hash = "sha256:b96fb14698db1284db84ca38c79f15b4cfdc3172065b5137383910567591fa99"}, - {file = "pywinpty-2.0.13-cp312-none-win_amd64.whl", hash = "sha256:2fd876b82ca750bb1333236ce98488c1be96b08f4f7647cfdf4129dfad83c2d4"}, - {file = "pywinpty-2.0.13-cp38-none-win_amd64.whl", hash = "sha256:61d420c2116c0212808d31625611b51caf621fe67f8a6377e2e8b617ea1c1f7d"}, - {file = "pywinpty-2.0.13-cp39-none-win_amd64.whl", hash = "sha256:71cb613a9ee24174730ac7ae439fd179ca34ccb8c5349e8d7b72ab5dea2c6f4b"}, - {file = "pywinpty-2.0.13.tar.gz", hash = "sha256:c34e32351a3313ddd0d7da23d27f835c860d32fe4ac814d372a3ea9594f41dde"}, -] - [[package]] name = "pyyaml" version = "6.0.2" @@ -2392,21 +2256,6 @@ files = [ [package.dependencies] cffi = {version = "*", markers = "implementation_name == \"pypy\""} -[[package]] -name = "referencing" -version = "0.35.1" -description = "JSON Referencing + Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, - {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -rpds-py = ">=0.7.0" - [[package]] name = "regex" version = "2024.9.11" @@ -2545,143 +2394,6 @@ files = [ [package.dependencies] requests = ">=2.0.1,<3.0.0" -[[package]] -name = "rfc3339-validator" -version = "0.1.4" -description = "A pure python RFC3339 validator" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, - {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "rfc3986-validator" -version = "0.1.1" -description = "Pure python rfc3986 validator" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, - {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, -] - -[[package]] -name = "rpds-py" -version = "0.20.0" -description = "Python bindings to Rust's persistent data structures (rpds)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"}, - {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"}, - {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"}, - {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"}, - {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"}, - {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"}, - {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"}, - {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"}, - {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"}, - {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"}, - {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"}, - {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"}, - {file = "rpds_py-0.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29"}, - {file = "rpds_py-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57"}, - {file = "rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a"}, - {file = "rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2"}, - {file = "rpds_py-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f2fbf7db2012d4876fb0d66b5b9ba6591197b0f165db8d99371d976546472a24"}, - {file = "rpds_py-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1e5f3cd7397c8f86c8cc72d5a791071431c108edd79872cdd96e00abd8497d29"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce9845054c13696f7af7f2b353e6b4f676dab1b4b215d7fe5e05c6f8bb06f965"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c3e130fd0ec56cb76eb49ef52faead8ff09d13f4527e9b0c400307ff72b408e1"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b16aa0107ecb512b568244ef461f27697164d9a68d8b35090e9b0c1c8b27752"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7f429242aae2947246587d2964fad750b79e8c233a2367f71b554e9447949c"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0fc424a5842a11e28956e69395fbbeab2c97c42253169d87e90aac2886d751"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8c00a3b1e70c1d3891f0db1b05292747f0dbcfb49c43f9244d04c70fbc40eb8"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40ce74fc86ee4645d0a225498d091d8bc61f39b709ebef8204cb8b5a464d3c0e"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4fe84294c7019456e56d93e8ababdad5a329cd25975be749c3f5f558abb48253"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:338ca4539aad4ce70a656e5187a3a31c5204f261aef9f6ab50e50bcdffaf050a"}, - {file = "rpds_py-0.20.0-cp38-none-win32.whl", hash = "sha256:54b43a2b07db18314669092bb2de584524d1ef414588780261e31e85846c26a5"}, - {file = "rpds_py-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:a1862d2d7ce1674cffa6d186d53ca95c6e17ed2b06b3f4c476173565c862d232"}, - {file = "rpds_py-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3fde368e9140312b6e8b6c09fb9f8c8c2f00999d1823403ae90cc00480221b22"}, - {file = "rpds_py-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9824fb430c9cf9af743cf7aaf6707bf14323fb51ee74425c380f4c846ea70789"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11ef6ce74616342888b69878d45e9f779b95d4bd48b382a229fe624a409b72c5"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c52d3f2f82b763a24ef52f5d24358553e8403ce05f893b5347098014f2d9eff2"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d35cef91e59ebbeaa45214861874bc6f19eb35de96db73e467a8358d701a96c"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72278a30111e5b5525c1dd96120d9e958464316f55adb030433ea905866f4de"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c29cbbba378759ac5786730d1c3cb4ec6f8ababf5c42a9ce303dc4b3d08cda"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6632f2d04f15d1bd6fe0eedd3b86d9061b836ddca4c03d5cf5c7e9e6b7c14580"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d0b67d87bb45ed1cd020e8fbf2307d449b68abc45402fe1a4ac9e46c3c8b192b"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ec31a99ca63bf3cd7f1a5ac9fe95c5e2d060d3c768a09bc1d16e235840861420"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22e6c9976e38f4d8c4a63bd8a8edac5307dffd3ee7e6026d97f3cc3a2dc02a0b"}, - {file = "rpds_py-0.20.0-cp39-none-win32.whl", hash = "sha256:569b3ea770c2717b730b61998b6c54996adee3cef69fc28d444f3e7920313cf7"}, - {file = "rpds_py-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:e6900ecdd50ce0facf703f7a00df12374b74bbc8ad9fe0f6559947fb20f82364"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f918a1a130a6dfe1d7fe0f105064141342e7dd1611f2e6a21cd2f5c8cb1cfb3e"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f60012a73aa396be721558caa3a6fd49b3dd0033d1675c6d59c4502e870fcf0c"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d2b1ad682a3dfda2a4e8ad8572f3100f95fad98cb99faf37ff0ddfe9cbf9d03"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:614fdafe9f5f19c63ea02817fa4861c606a59a604a77c8cdef5aa01d28b97921"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa518bcd7600c584bf42e6617ee8132869e877db2f76bcdc281ec6a4113a53ab"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0475242f447cc6cb8a9dd486d68b2ef7fbee84427124c232bff5f63b1fe11e5"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90a4cd061914a60bd51c68bcb4357086991bd0bb93d8aa66a6da7701370708f"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:def7400461c3a3f26e49078302e1c1b38f6752342c77e3cf72ce91ca69fb1bc1"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:65794e4048ee837494aea3c21a28ad5fc080994dfba5b036cf84de37f7ad5074"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:faefcc78f53a88f3076b7f8be0a8f8d35133a3ecf7f3770895c25f8813460f08"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5b4f105deeffa28bbcdff6c49b34e74903139afa690e35d2d9e3c2c2fba18cec"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdfc3a892927458d98f3d55428ae46b921d1f7543b89382fdb483f5640daaec8"}, - {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"}, -] - [[package]] name = "ruff" version = "0.4.10" @@ -2708,42 +2420,6 @@ files = [ {file = "ruff-0.4.10.tar.gz", hash = "sha256:3aa4f2bc388a30d346c56524f7cacca85945ba124945fe489952aadb6b5cd804"}, ] -[[package]] -name = "send2trash" -version = "1.8.3" -description = "Send file to trash natively under Mac OS X, Windows and Linux" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "Send2Trash-1.8.3-py3-none-any.whl", hash = "sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9"}, - {file = "Send2Trash-1.8.3.tar.gz", hash = "sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf"}, -] - -[package.extras] -nativelib = ["pyobjc-framework-Cocoa", "pywin32"] -objc = ["pyobjc-framework-Cocoa"] -win32 = ["pywin32"] - -[[package]] -name = "setuptools" -version = "75.1.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, - {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] - [[package]] name = "six" version = "1.16.0" @@ -2767,16 +2443,100 @@ files = [ ] [[package]] -name = "soupsieve" -version = "2.6" -description = "A modern CSS selector implementation for Beautiful Soup." +name = "sqlalchemy" +version = "2.0.36" +description = "Database Abstraction Library" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, - {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:be9812b766cad94a25bc63bec11f88c4ad3629a0cec1cd5d4ba48dc23860486b"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aae840ebbd6cdd41af1c14590e5741665e5272d2fee999306673a1bb1fdb4d"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4557e1f11c5f653ebfdd924f3f9d5ebfc718283b0b9beebaa5dd6b77ec290971"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07b441f7d03b9a66299ce7ccf3ef2900abc81c0db434f42a5694a37bd73870f2"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:28120ef39c92c2dd60f2721af9328479516844c6b550b077ca450c7d7dc68575"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-win32.whl", hash = "sha256:b81ee3d84803fd42d0b154cb6892ae57ea6b7c55d8359a02379965706c7efe6c"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-win_amd64.whl", hash = "sha256:f942a799516184c855e1a32fbc7b29d7e571b52612647866d4ec1c3242578fcb"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3d6718667da04294d7df1670d70eeddd414f313738d20a6f1d1f379e3139a545"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:72c28b84b174ce8af8504ca28ae9347d317f9dba3999e5981a3cd441f3712e24"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b11d0cfdd2b095e7b0686cf5fabeb9c67fae5b06d265d8180715b8cfa86522e3"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e32092c47011d113dc01ab3e1d3ce9f006a47223b18422c5c0d150af13a00687"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6a440293d802d3011028e14e4226da1434b373cbaf4a4bbb63f845761a708346"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c54a1e53a0c308a8e8a7dffb59097bff7facda27c70c286f005327f21b2bd6b1"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-win32.whl", hash = "sha256:1e0d612a17581b6616ff03c8e3d5eff7452f34655c901f75d62bd86449d9750e"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-win_amd64.whl", hash = "sha256:8958b10490125124463095bbdadda5aa22ec799f91958e410438ad6c97a7b793"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, + {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, + {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, ] +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + [[package]] name = "stack-data" version = "0.6.3" @@ -2811,70 +2571,49 @@ files = [ doc = ["reno", "sphinx"] test = ["pytest", "tornado (>=4.5)", "typeguard"] -[[package]] -name = "terminado" -version = "0.18.1" -description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." -optional = false -python-versions = ">=3.8" -files = [ - {file = "terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0"}, - {file = "terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e"}, -] - -[package.dependencies] -ptyprocess = {version = "*", markers = "os_name != \"nt\""} -pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} -tornado = ">=6.1.0" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] -typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] - [[package]] name = "tiktoken" -version = "0.7.0" +version = "0.5.2" description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" optional = false python-versions = ">=3.8" files = [ - {file = "tiktoken-0.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485f3cc6aba7c6b6ce388ba634fbba656d9ee27f766216f45146beb4ac18b25f"}, - {file = "tiktoken-0.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e54be9a2cd2f6d6ffa3517b064983fb695c9a9d8aa7d574d1ef3c3f931a99225"}, - {file = "tiktoken-0.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79383a6e2c654c6040e5f8506f3750db9ddd71b550c724e673203b4f6b4b4590"}, - {file = "tiktoken-0.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d4511c52caacf3c4981d1ae2df85908bd31853f33d30b345c8b6830763f769c"}, - {file = "tiktoken-0.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:13c94efacdd3de9aff824a788353aa5749c0faee1fbe3816df365ea450b82311"}, - {file = "tiktoken-0.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8e58c7eb29d2ab35a7a8929cbeea60216a4ccdf42efa8974d8e176d50c9a3df5"}, - {file = "tiktoken-0.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:21a20c3bd1dd3e55b91c1331bf25f4af522c525e771691adbc9a69336fa7f702"}, - {file = "tiktoken-0.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:10c7674f81e6e350fcbed7c09a65bca9356eaab27fb2dac65a1e440f2bcfe30f"}, - {file = "tiktoken-0.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:084cec29713bc9d4189a937f8a35dbdfa785bd1235a34c1124fe2323821ee93f"}, - {file = "tiktoken-0.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:811229fde1652fedcca7c6dfe76724d0908775b353556d8a71ed74d866f73f7b"}, - {file = "tiktoken-0.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b6e7dc2e7ad1b3757e8a24597415bafcfb454cebf9a33a01f2e6ba2e663992"}, - {file = "tiktoken-0.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1063c5748be36344c7e18c7913c53e2cca116764c2080177e57d62c7ad4576d1"}, - {file = "tiktoken-0.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:20295d21419bfcca092644f7e2f2138ff947a6eb8cfc732c09cc7d76988d4a89"}, - {file = "tiktoken-0.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:959d993749b083acc57a317cbc643fb85c014d055b2119b739487288f4e5d1cb"}, - {file = "tiktoken-0.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:71c55d066388c55a9c00f61d2c456a6086673ab7dec22dd739c23f77195b1908"}, - {file = "tiktoken-0.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09ed925bccaa8043e34c519fbb2f99110bd07c6fd67714793c21ac298e449410"}, - {file = "tiktoken-0.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03c6c40ff1db0f48a7b4d2dafeae73a5607aacb472fa11f125e7baf9dce73704"}, - {file = "tiktoken-0.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d20b5c6af30e621b4aca094ee61777a44118f52d886dbe4f02b70dfe05c15350"}, - {file = "tiktoken-0.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d427614c3e074004efa2f2411e16c826f9df427d3c70a54725cae860f09e4bf4"}, - {file = "tiktoken-0.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8c46d7af7b8c6987fac9b9f61041b452afe92eb087d29c9ce54951280f899a97"}, - {file = "tiktoken-0.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:0bc603c30b9e371e7c4c7935aba02af5994a909fc3c0fe66e7004070858d3f8f"}, - {file = "tiktoken-0.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2398fecd38c921bcd68418675a6d155fad5f5e14c2e92fcf5fe566fa5485a858"}, - {file = "tiktoken-0.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f5f6afb52fb8a7ea1c811e435e4188f2bef81b5e0f7a8635cc79b0eef0193d6"}, - {file = "tiktoken-0.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:861f9ee616766d736be4147abac500732b505bf7013cfaf019b85892637f235e"}, - {file = "tiktoken-0.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54031f95c6939f6b78122c0aa03a93273a96365103793a22e1793ee86da31685"}, - {file = "tiktoken-0.7.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fffdcb319b614cf14f04d02a52e26b1d1ae14a570f90e9b55461a72672f7b13d"}, - {file = "tiktoken-0.7.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c72baaeaefa03ff9ba9688624143c858d1f6b755bb85d456d59e529e17234769"}, - {file = "tiktoken-0.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:131b8aeb043a8f112aad9f46011dced25d62629091e51d9dc1adbf4a1cc6aa98"}, - {file = "tiktoken-0.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cabc6dc77460df44ec5b879e68692c63551ae4fae7460dd4ff17181df75f1db7"}, - {file = "tiktoken-0.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8d57f29171255f74c0aeacd0651e29aa47dff6f070cb9f35ebc14c82278f3b25"}, - {file = "tiktoken-0.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ee92776fdbb3efa02a83f968c19d4997a55c8e9ce7be821ceee04a1d1ee149c"}, - {file = "tiktoken-0.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e215292e99cb41fbc96988ef62ea63bb0ce1e15f2c147a61acc319f8b4cbe5bf"}, - {file = "tiktoken-0.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8a81bac94769cab437dd3ab0b8a4bc4e0f9cf6835bcaa88de71f39af1791727a"}, - {file = "tiktoken-0.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d6d73ea93e91d5ca771256dfc9d1d29f5a554b83821a1dc0891987636e0ae226"}, - {file = "tiktoken-0.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:2bcb28ddf79ffa424f171dfeef9a4daff61a94c631ca6813f43967cb263b83b9"}, - {file = "tiktoken-0.7.0.tar.gz", hash = "sha256:1077266e949c24e0291f6c350433c6f0971365ece2b173a23bc3b9f9defef6b6"}, + {file = "tiktoken-0.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c4e654282ef05ec1bd06ead22141a9a1687991cef2c6a81bdd1284301abc71d"}, + {file = "tiktoken-0.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7b3134aa24319f42c27718c6967f3c1916a38a715a0fa73d33717ba121231307"}, + {file = "tiktoken-0.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6092e6e77730929c8c6a51bb0d7cfdf1b72b63c4d033d6258d1f2ee81052e9e5"}, + {file = "tiktoken-0.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ad8ae2a747622efae75837abba59be6c15a8f31b4ac3c6156bc56ec7a8e631"}, + {file = "tiktoken-0.5.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51cba7c8711afa0b885445f0637f0fcc366740798c40b981f08c5f984e02c9d1"}, + {file = "tiktoken-0.5.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3d8c7d2c9313f8e92e987d585ee2ba0f7c40a0de84f4805b093b634f792124f5"}, + {file = "tiktoken-0.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:692eca18c5fd8d1e0dde767f895c17686faaa102f37640e884eecb6854e7cca7"}, + {file = "tiktoken-0.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:138d173abbf1ec75863ad68ca289d4da30caa3245f3c8d4bfb274c4d629a2f77"}, + {file = "tiktoken-0.5.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7388fdd684690973fdc450b47dfd24d7f0cbe658f58a576169baef5ae4658607"}, + {file = "tiktoken-0.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a114391790113bcff670c70c24e166a841f7ea8f47ee2fe0e71e08b49d0bf2d4"}, + {file = "tiktoken-0.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca96f001e69f6859dd52926d950cfcc610480e920e576183497ab954e645e6ac"}, + {file = "tiktoken-0.5.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:15fed1dd88e30dfadcdd8e53a8927f04e1f6f81ad08a5ca824858a593ab476c7"}, + {file = "tiktoken-0.5.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:93f8e692db5756f7ea8cb0cfca34638316dcf0841fb8469de8ed7f6a015ba0b0"}, + {file = "tiktoken-0.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:bcae1c4c92df2ffc4fe9f475bf8148dbb0ee2404743168bbeb9dcc4b79dc1fdd"}, + {file = "tiktoken-0.5.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b76a1e17d4eb4357d00f0622d9a48ffbb23401dcf36f9716d9bd9c8e79d421aa"}, + {file = "tiktoken-0.5.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:01d8b171bb5df4035580bc26d4f5339a6fd58d06f069091899d4a798ea279d3e"}, + {file = "tiktoken-0.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42adf7d4fb1ed8de6e0ff2e794a6a15005f056a0d83d22d1d6755a39bffd9e7f"}, + {file = "tiktoken-0.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3f894dbe0adb44609f3d532b8ea10820d61fdcb288b325a458dfc60fefb7db"}, + {file = "tiktoken-0.5.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:58ccfddb4e62f0df974e8f7e34a667981d9bb553a811256e617731bf1d007d19"}, + {file = "tiktoken-0.5.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58902a8bad2de4268c2a701f1c844d22bfa3cbcc485b10e8e3e28a050179330b"}, + {file = "tiktoken-0.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:5e39257826d0647fcac403d8fa0a474b30d02ec8ffc012cfaf13083e9b5e82c5"}, + {file = "tiktoken-0.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bde3b0fbf09a23072d39c1ede0e0821f759b4fa254a5f00078909158e90ae1f"}, + {file = "tiktoken-0.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2ddee082dcf1231ccf3a591d234935e6acf3e82ee28521fe99af9630bc8d2a60"}, + {file = "tiktoken-0.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35c057a6a4e777b5966a7540481a75a31429fc1cb4c9da87b71c8b75b5143037"}, + {file = "tiktoken-0.5.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c4a049b87e28f1dc60509f8eb7790bc8d11f9a70d99b9dd18dfdd81a084ffe6"}, + {file = "tiktoken-0.5.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5bf5ce759089f4f6521ea6ed89d8f988f7b396e9f4afb503b945f5c949c6bec2"}, + {file = "tiktoken-0.5.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0c964f554af1a96884e01188f480dad3fc224c4bbcf7af75d4b74c4b74ae0125"}, + {file = "tiktoken-0.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:368dd5726d2e8788e47ea04f32e20f72a2012a8a67af5b0b003d1e059f1d30a3"}, + {file = "tiktoken-0.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a2deef9115b8cd55536c0a02c0203512f8deb2447f41585e6d929a0b878a0dd2"}, + {file = "tiktoken-0.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2ed7d380195affbf886e2f8b92b14edfe13f4768ff5fc8de315adba5b773815e"}, + {file = "tiktoken-0.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c76fce01309c8140ffe15eb34ded2bb94789614b7d1d09e206838fc173776a18"}, + {file = "tiktoken-0.5.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60a5654d6a2e2d152637dd9a880b4482267dfc8a86ccf3ab1cec31a8c76bfae8"}, + {file = "tiktoken-0.5.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:41d4d3228e051b779245a8ddd21d4336f8975563e92375662f42d05a19bdff41"}, + {file = "tiktoken-0.5.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c1cdec2c92fcde8c17a50814b525ae6a88e8e5b02030dc120b76e11db93f13"}, + {file = "tiktoken-0.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:84ddb36faedb448a50b246e13d1b6ee3437f60b7169b723a4b2abad75e914f3e"}, + {file = "tiktoken-0.5.2.tar.gz", hash = "sha256:f54c581f134a8ea96ce2023ab221d4d4d81ab614efa0b2fbce926387deb56c80"}, ] [package.dependencies] @@ -2884,24 +2623,6 @@ requests = ">=2.26.0" [package.extras] blobfile = ["blobfile (>=2)"] -[[package]] -name = "tinycss2" -version = "1.3.0" -description = "A tiny CSS parser" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tinycss2-1.3.0-py3-none-any.whl", hash = "sha256:54a8dbdffb334d536851be0226030e9505965bb2f30f21a4a82c55fb2a80fae7"}, - {file = "tinycss2-1.3.0.tar.gz", hash = "sha256:152f9acabd296a8375fbca5b84c961ff95971fcfc32e79550c8df8e29118c54d"}, -] - -[package.dependencies] -webencodings = ">=0.4" - -[package.extras] -doc = ["sphinx", "sphinx_rtd_theme"] -test = ["pytest", "ruff"] - [[package]] name = "tomli" version = "2.0.2" @@ -2968,17 +2689,6 @@ files = [ docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] -[[package]] -name = "types-python-dateutil" -version = "2.9.0.20241003" -description = "Typing stubs for python-dateutil" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-python-dateutil-2.9.0.20241003.tar.gz", hash = "sha256:58cb85449b2a56d6684e41aeefb4c4280631246a0da1a719bdbe6f3fb0317446"}, - {file = "types_python_dateutil-2.9.0.20241003-py3-none-any.whl", hash = "sha256:250e1d8e80e7bbc3a6c99b907762711d1a1cdd00e978ad39cb5940f6f0a87f3d"}, -] - [[package]] name = "typing-extensions" version = "4.12.2" @@ -2991,18 +2701,19 @@ files = [ ] [[package]] -name = "uri-template" -version = "1.3.0" -description = "RFC 6570 URI Template Processor" +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." optional = false -python-versions = ">=3.7" +python-versions = "*" files = [ - {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, - {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, ] -[package.extras] -dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"] +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" [[package]] name = "urllib3" @@ -3033,57 +2744,100 @@ files = [ ] [[package]] -name = "webcolors" -version = "24.8.0" -description = "A library for working with the color formats defined by HTML and CSS." -optional = false -python-versions = ">=3.8" -files = [ - {file = "webcolors-24.8.0-py3-none-any.whl", hash = "sha256:fc4c3b59358ada164552084a8ebee637c221e4059267d0f8325b3b560f6c7f0a"}, - {file = "webcolors-24.8.0.tar.gz", hash = "sha256:08b07af286a01bcd30d583a7acadf629583d1f79bfef27dd2c2c5c263817277d"}, -] - -[package.extras] -docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] -tests = ["coverage[toml]"] - -[[package]] -name = "webencodings" -version = "0.5.1" -description = "Character encoding aliases for legacy web content" +name = "yarl" +version = "1.15.4" +description = "Yet another URL library" optional = false -python-versions = "*" -files = [ - {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, - {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, -] - -[[package]] -name = "websocket-client" -version = "1.8.0" -description = "WebSocket client for Python with low level API options" -optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, - {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, + {file = "yarl-1.15.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:551205388d1da18a9975302c9a274ba24788f53bb9bb86187496ebf9e938916e"}, + {file = "yarl-1.15.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eee724176b5bc50ee64905f559345448119b860a30b9489bd7a073f61baf925f"}, + {file = "yarl-1.15.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db818e33599f7b2e4c6507f2b2c24f45ff539a1b6e4e09163bb6f3cfb4616ca7"}, + {file = "yarl-1.15.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07019a9de859c5a29916defd1e8c7557de6491a10bf50c49ff5284e6aedf5313"}, + {file = "yarl-1.15.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db64a20e78969fc66665d2e5fc96cb4f4dc80f2137d8fed4b5a650ad569bb60f"}, + {file = "yarl-1.15.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4076bfd8f1621449b19b9826848ed51bf0f2d1d38e82647c312c0730d8778903"}, + {file = "yarl-1.15.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c23a442973dba3646811c284fce3dddd7fe5c2bd674ac73a122198e8218d6115"}, + {file = "yarl-1.15.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2bdb038b3f5c284e3919218c580dedc95f592c417a358361450b9519b22f7a8"}, + {file = "yarl-1.15.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:59db8e6888d5302b8dbca0c1026ddabe99d81d67cdc101941519e13ffc9050fe"}, + {file = "yarl-1.15.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:f3294ce265011547630a59c20085fcb6af8cc5fa1fa44a203251f7d86cd5d913"}, + {file = "yarl-1.15.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4851618679ca70b863ba2e7109be5f09f8fd7715ec505bd42e5a947dcfde3a45"}, + {file = "yarl-1.15.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:dce1c56beef74d9c799a6ed94001693232a1402138292353a8ce302b64f457d9"}, + {file = "yarl-1.15.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1e7468f31de61a82817f918743e5229fce774f73fad58487cdf88eef4f06d864"}, + {file = "yarl-1.15.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:527c68f48a91d953691291d3bce0209293aa5ad13ff05286ddb506791c331818"}, + {file = "yarl-1.15.4-cp310-cp310-win32.whl", hash = "sha256:c30115cecaf25fdcb67cc71c669d08425207f62d7a2f6d5416057c1460529216"}, + {file = "yarl-1.15.4-cp310-cp310-win_amd64.whl", hash = "sha256:df09c80f4bc2bc2efde309af383c3fe8fd8c51fe0519edb350b9c9e0af43ffa4"}, + {file = "yarl-1.15.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:76259901cf1ac3db65e7e6dff04775b626d0715f9b51d92b447351144c756a82"}, + {file = "yarl-1.15.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:98d8dc1e8133f86d916125deca9780d791b22645f0d62bafe1452d1cd5eac631"}, + {file = "yarl-1.15.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d0f16c87c62b7a94b389ddf6a8c9d081265d788875c39f3a80108c4856eea7b"}, + {file = "yarl-1.15.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8de5328d91859b461899497980d4cc8269e84e2d18640f6ac643886fda9000bf"}, + {file = "yarl-1.15.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84937d00e2ea03616c40977de20189fa13a9213e5744a3c6afa0e7dd9141d69c"}, + {file = "yarl-1.15.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:691a3b498fdebef63308e8967bb598cfd326c56d628da82b799dd181bace4503"}, + {file = "yarl-1.15.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a706db0c3b7e4578ff34ed2b1d2507b08fd491346ffc64468786fdf1151d938"}, + {file = "yarl-1.15.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:adb6b5d07d17c32f9d34c9dd4a693637a72323cfcb1f8a52d57033ab2dd21e99"}, + {file = "yarl-1.15.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6e100c6c7d9e9d469009fd55cc4d7ad168d67d40758865c50da713f7ada491e5"}, + {file = "yarl-1.15.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:df6b254e55c8ac2362afaa651e3e53453aa19a095570792346245773b434176e"}, + {file = "yarl-1.15.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8721f8bedaa722c3c483cc06a1399cbfdb280eadf443aa5d324b0203cef2a75f"}, + {file = "yarl-1.15.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1005921b30f4f39bf893946df6173567ff650307babb5ec04bbf64342a1f62c1"}, + {file = "yarl-1.15.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:ab79cc13307065a0b3ef087f09f0509996fc605d35d6642bb28e5d85b2648e1e"}, + {file = "yarl-1.15.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f337486742c700b102d640830aab3faf2848bed966b479a39e6783edd4ab1c6c"}, + {file = "yarl-1.15.4-cp311-cp311-win32.whl", hash = "sha256:20acf84bd1ce530065f8e957e4a5878fda4bc5f18cb02659828210e1519de54e"}, + {file = "yarl-1.15.4-cp311-cp311-win_amd64.whl", hash = "sha256:ab9ccf26cb3fa32747ba2a637a189d2d42386a2fc4afc10dbc7f85922dd23b0f"}, + {file = "yarl-1.15.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f923e94e93a37fd990e8336e0b9bedea533e7cbed14e0c572bf9357ef2a70681"}, + {file = "yarl-1.15.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3198da7d7c34e29fc8c823e0c3ce6c7274aac35760de557c2017489c7d98fc5a"}, + {file = "yarl-1.15.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d886de2ea81f513ba2d6820451d33b767a97c37867ba688d42e164b2dbca1362"}, + {file = "yarl-1.15.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ac85e760543129a1912a82438fc8075223e35eaa2d457d61cd83c27d00d17be"}, + {file = "yarl-1.15.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e58c5d07b1f78dd4cb180c5b3b82465cd281aaeee8aafea0e5d72a4b97922cb1"}, + {file = "yarl-1.15.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9060589d0acad1fca048861fa9ee3e8ed060f67894fa885969648ab6e9e99a54"}, + {file = "yarl-1.15.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccd6774aa7bebdf9ca608bb0839318757a71b8e0d2cf7b10c002bc8790bd343e"}, + {file = "yarl-1.15.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7694f109867ee428c21b85ae19fd31d164c691eb45cc95c561cfdeba237a12e3"}, + {file = "yarl-1.15.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:83e7154aa0d17f5c93d27ac01088fd9ab6673e7bab1acbd07cd7a865b980c045"}, + {file = "yarl-1.15.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:f16d1940c0cbc342f1d29d6212a006d172be616d2942c5c41966e8a3ce4c3be1"}, + {file = "yarl-1.15.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7d5226c70af3ad9569ccc4ccc04ab65be79eeb22c87d7ae789c89e62ef76bbd6"}, + {file = "yarl-1.15.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f25906e4a72d9833e81717c39a39dee7297ff5cb44957d06d177a2ab8ef2ef7f"}, + {file = "yarl-1.15.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e07e4b17b648c880e8e42bf1ac0a730bde114961646ae1c2ec4433f0c11ca94"}, + {file = "yarl-1.15.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6f8136bde8dfa4477c6a85c79a366581b4a505b51a52b669318fb631d3f4f638"}, + {file = "yarl-1.15.4-cp312-cp312-win32.whl", hash = "sha256:ccbeaf5b18b173b9d78e332e017b30ba8bedcf03cdce1d13490b82a3f421bc98"}, + {file = "yarl-1.15.4-cp312-cp312-win_amd64.whl", hash = "sha256:f74f6ffdc633aefecbc80282242a5395058db9d1247fa7dd2f070ef84dc82583"}, + {file = "yarl-1.15.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:4f66a0eda48844508736e47ed476d8fdd7cdbf16a4053b5d439509a25f708504"}, + {file = "yarl-1.15.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fd2bb86f40962d53a91def15a2f7684c62e081a7b96ec74ed0259c34b15973b9"}, + {file = "yarl-1.15.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f864b412557e69a6b953d62c01a0ed0ee342666298aa7f2a29af526bfa80f6e9"}, + {file = "yarl-1.15.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a79c0a8bbb046add85663af85e9993b691bf20c2a109518bd35e0ce77edfe42"}, + {file = "yarl-1.15.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de479e30abd2dfd49fdad3bd6953f2d930a45380be5143c0c9f7a1215cffc8cc"}, + {file = "yarl-1.15.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21fabe58042f3e567b4edc75b2cf44cea02f228e41ac09d73de126bf685fe883"}, + {file = "yarl-1.15.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77390496f2f32437a721c854897f889abefae0f3009daf90a2f703508d96c920"}, + {file = "yarl-1.15.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3896bf15284dd23acab1f2e7fceb350d8da6f6f2436b922f7ec6b3de685d34ca"}, + {file = "yarl-1.15.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:590e2d733a82ecf004c5c531cbef0d6be328e93adec960024eb213f10cb9503e"}, + {file = "yarl-1.15.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:1ceb677fb583971351627eac70eec6763fbc889761828da7a276681b5e39742d"}, + {file = "yarl-1.15.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:69f628d2da1489b27959f4d63fdb326781fe484944dce94abbf919e416c54abe"}, + {file = "yarl-1.15.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:35a6b69cc44bda002705d6138346bf0a0234cbb7c26c3bf192513eb946aee6f9"}, + {file = "yarl-1.15.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:49f886e8dcf591275c6e20915b516fd81647857566b0c0158c52df1e468849c9"}, + {file = "yarl-1.15.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:49190eb2ece70313742b0ea51520340288a059674da1f39eefb589d598d9453e"}, + {file = "yarl-1.15.4-cp313-cp313-win32.whl", hash = "sha256:48334a6c8afee93097eb17c0a094234dac2d88da076c8cf372e09e2a5dcc4b66"}, + {file = "yarl-1.15.4-cp313-cp313-win_amd64.whl", hash = "sha256:f68025d6ba1816428b7de615c80f61cb03d5b7061158d4ced7696657a64aa59c"}, + {file = "yarl-1.15.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8b569f4f511b59518ba6719feb5b8bf0a5d4115e6ac903c89e10a8a9ac656017"}, + {file = "yarl-1.15.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9fe17744d60fc404ac61f824118e1e15ce3c2e92eced9b8e22f3c7847acafbf2"}, + {file = "yarl-1.15.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:115346433fad2084ee3a1a925ccc0659990aa42e208ca54c278830a150a3caf3"}, + {file = "yarl-1.15.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60165b8bc260f453321004b193770a66cc1b1a5c57c07d4b8dcc96839e7ad578"}, + {file = "yarl-1.15.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65a0168691373e08d869d48b62c8bed0af0cdaef19c76e11ad73b43901bbdb5a"}, + {file = "yarl-1.15.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:787532f00543a21b8f4ec3050b4e01b8fe437797903c0156a0b03dfca5e1ba6c"}, + {file = "yarl-1.15.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f51c9d173e5fa4b12d06ddca09a41cabbdeb660471dbe55432423eec095709ab"}, + {file = "yarl-1.15.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c96eaa30030e1cfafe533f3da8983812281235b7c50ef2a6c78ceca7aea1a0b"}, + {file = "yarl-1.15.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4feab2dcb725eb5b4835207ecf3d370ff7ce930b253cba5e681646cb80d64c2c"}, + {file = "yarl-1.15.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:de38b0b5b86e57efb129d179854e78b65cb8e294a8c75560877869c43aa2415a"}, + {file = "yarl-1.15.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:65e0467f90f2acf3bc83bbfeedece8f1fd84df8add1a54e9600ed7b7b5debdb0"}, + {file = "yarl-1.15.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:40c18f96696549e73b92dc12619f07019cbf5faefc1612608f967c144816e493"}, + {file = "yarl-1.15.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:46491b3e058de7b484e1c9fb20aa8441f06d6c9a18395d711c1c2a9ad6707d6a"}, + {file = "yarl-1.15.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:faa3dd7f4620ab5e5da7a0789d0aac78a9ad0376f102409d442ec5a4179e200a"}, + {file = "yarl-1.15.4-cp39-cp39-win32.whl", hash = "sha256:c33ea7c55a73be343f02361795caf52a187357ea07708fb1cae6661ee1d689c8"}, + {file = "yarl-1.15.4-cp39-cp39-win_amd64.whl", hash = "sha256:11b207061f28b4b6d980239b22ab0ecfadc47846b5a3b8e79f27fcc019d02cf9"}, + {file = "yarl-1.15.4-py3-none-any.whl", hash = "sha256:e5cc288111c450c0a54a74475591b206d3b1cb47dc71bb6200f6be8b1337184c"}, + {file = "yarl-1.15.4.tar.gz", hash = "sha256:a0c5e271058d148d730219ca4f33c5d841c6bd46e05b0da60fea7b516906ccd3"}, ] -[package.extras] -docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - -[[package]] -name = "widgetsnbextension" -version = "4.0.13" -description = "Jupyter interactive widgets for Jupyter Notebook" -optional = false -python-versions = ">=3.7" -files = [ - {file = "widgetsnbextension-4.0.13-py3-none-any.whl", hash = "sha256:74b2692e8500525cc38c2b877236ba51d34541e6385eeed5aec15a70f88a6c71"}, - {file = "widgetsnbextension-4.0.13.tar.gz", hash = "sha256:ffcb67bc9febd10234a362795f643927f4e0c05d9342c727b65d2384f8feacb6"}, -] +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" +propcache = ">=0.2.0" [[package]] name = "zipp" @@ -3107,4 +2861,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.9.0,<4.0" -content-hash = "fb774f74c530cefc1a8bc2de380497292afc4fba3658bda0c3d148decd285872" +content-hash = "9ee978ac32589ac9d4ac2e0c4cddedf8f904655b73aa1c9e69ed076ef25db3df" diff --git a/pyproject.toml b/pyproject.toml index 2651eeaa..734ee1b1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,12 +1,10 @@ [tool.poetry] name = "zep-cloud" -version = "1.0.9" +version = "2.0.0-rc.1" description = "" readme = "README.md" authors = [] -packages = [ - { include = "zep_cloud", from = "src" } -] +packages = [{ include = "zep_cloud", from = "src" }] [tool.poetry.dependencies] python = ">=3.9.0,<4.0" @@ -19,14 +17,12 @@ mypy = "1.9.0" pytest = "^7.4.0" pytest-asyncio = "^0.23.5" python-dateutil = "^2.9.0" +langchain = "^0.1.3" +openai = "^1.9.0" +langchain-openai = "^0.0.3" black = "^24.4.2" ruff = "^0.4.8" ipykernel = "^6.29.4" -langchain-openai = "^0.1.23" -langgraph = "^0.2.16" -ipywidgets = "^8.1.5" -openai = "^1.43.0" -notebook = "^7.2.2" [tool.pytest.ini_options] testpaths = ["tests"] diff --git a/src/zep_cloud/__init__.py b/src/zep_cloud/__init__.py index 9de1d9a8..1193253b 100644 --- a/src/zep_cloud/__init__.py +++ b/src/zep_cloud/__init__.py @@ -1,32 +1,42 @@ # This file was auto-generated by Fern from our API Definition. from .types import ( - AddedFact, ApiError, + ApidataDocument, + ApidataDocumentCollection, + ApidataDocumentSearchResponse, + ApidataDocumentWithScore, ClassifySessionRequest, - ClassifySessionResponse, CreateDocumentRequest, - DocumentCollectionResponse, - DocumentResponse, - DocumentSearchResult, - DocumentSearchResultPage, EndSessionResponse, EndSessionsResponse, + EntityEdge, + EntityNode, + Episode, + EpisodeResponse, Fact, FactRatingExamples, FactRatingInstruction, FactResponse, FactsResponse, + GraphDataType, + GraphSearchResults, + GraphSearchScope, + Group, Memory, MemorySearchResult, - MemoryType, Message, MessageListResponse, + NewFact, Question, + Reranker, RoleType, SearchScope, SearchType, Session, + SessionClassification, + SessionFactRatingExamples, + SessionFactRatingInstruction, SessionListResponse, SessionSearchResponse, SessionSearchResult, @@ -38,41 +48,51 @@ UserListResponse, ) from .errors import BadRequestError, ConflictError, InternalServerError, NotFoundError, UnauthorizedError -from . import document, memory, user +from . import document, graph, group, memory, user from .environment import ZepEnvironment from .version import __version__ __all__ = [ - "AddedFact", "ApiError", + "ApidataDocument", + "ApidataDocumentCollection", + "ApidataDocumentSearchResponse", + "ApidataDocumentWithScore", "BadRequestError", "ClassifySessionRequest", - "ClassifySessionResponse", "ConflictError", "CreateDocumentRequest", - "DocumentCollectionResponse", - "DocumentResponse", - "DocumentSearchResult", - "DocumentSearchResultPage", "EndSessionResponse", "EndSessionsResponse", + "EntityEdge", + "EntityNode", + "Episode", + "EpisodeResponse", "Fact", "FactRatingExamples", "FactRatingInstruction", "FactResponse", "FactsResponse", + "GraphDataType", + "GraphSearchResults", + "GraphSearchScope", + "Group", "InternalServerError", "Memory", "MemorySearchResult", - "MemoryType", "Message", "MessageListResponse", + "NewFact", "NotFoundError", "Question", + "Reranker", "RoleType", "SearchScope", "SearchType", "Session", + "SessionClassification", + "SessionFactRatingExamples", + "SessionFactRatingInstruction", "SessionListResponse", "SessionSearchResponse", "SessionSearchResult", @@ -86,6 +106,8 @@ "ZepEnvironment", "__version__", "document", + "graph", + "group", "memory", "user", ] diff --git a/src/zep_cloud/base_client.py b/src/zep_cloud/base_client.py index 763c164a..52baace2 100644 --- a/src/zep_cloud/base_client.py +++ b/src/zep_cloud/base_client.py @@ -9,6 +9,8 @@ from .core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .document.client import AsyncDocumentClient, DocumentClient from .environment import ZepEnvironment +from .graph.client import AsyncGraphClient, GraphClient +from .group.client import AsyncGroupClient, GroupClient from .memory.client import AsyncMemoryClient, MemoryClient from .user.client import AsyncUserClient, UserClient @@ -75,6 +77,8 @@ def __init__( ) self.document = DocumentClient(client_wrapper=self._client_wrapper) self.memory = MemoryClient(client_wrapper=self._client_wrapper) + self.graph = GraphClient(client_wrapper=self._client_wrapper) + self.group = GroupClient(client_wrapper=self._client_wrapper) self.user = UserClient(client_wrapper=self._client_wrapper) @@ -140,6 +144,8 @@ def __init__( ) self.document = AsyncDocumentClient(client_wrapper=self._client_wrapper) self.memory = AsyncMemoryClient(client_wrapper=self._client_wrapper) + self.graph = AsyncGraphClient(client_wrapper=self._client_wrapper) + self.group = AsyncGroupClient(client_wrapper=self._client_wrapper) self.user = AsyncUserClient(client_wrapper=self._client_wrapper) diff --git a/src/zep_cloud/core/client_wrapper.py b/src/zep_cloud/core/client_wrapper.py index c4d1693d..2180cfb7 100644 --- a/src/zep_cloud/core/client_wrapper.py +++ b/src/zep_cloud/core/client_wrapper.py @@ -17,7 +17,7 @@ def get_headers(self) -> typing.Dict[str, str]: headers: typing.Dict[str, str] = { "X-Fern-Language": "Python", "X-Fern-SDK-Name": "zep-cloud", - "X-Fern-SDK-Version": "1.0.9", + "X-Fern-SDK-Version": "2.0.0-rc.1", } headers["Authorization"] = f"Api-Key {self.api_key}" return headers diff --git a/src/zep_cloud/document/client.py b/src/zep_cloud/document/client.py index e38604c0..215fbe0b 100644 --- a/src/zep_cloud/document/client.py +++ b/src/zep_cloud/document/client.py @@ -13,10 +13,10 @@ from ..errors.not_found_error import NotFoundError from ..errors.unauthorized_error import UnauthorizedError from ..types.api_error import ApiError as types_api_error_ApiError +from ..types.apidata_document import ApidataDocument +from ..types.apidata_document_collection import ApidataDocumentCollection +from ..types.apidata_document_search_response import ApidataDocumentSearchResponse from ..types.create_document_request import CreateDocumentRequest -from ..types.document_collection_response import DocumentCollectionResponse -from ..types.document_response import DocumentResponse -from ..types.document_search_result_page import DocumentSearchResultPage from ..types.search_type import SearchType from ..types.success_response import SuccessResponse from ..types.update_document_list_request import UpdateDocumentListRequest @@ -31,7 +31,7 @@ def __init__(self, *, client_wrapper: SyncClientWrapper): def list_collections( self, *, request_options: typing.Optional[RequestOptions] = None - ) -> typing.List[typing.List[DocumentCollectionResponse]]: + ) -> typing.List[typing.List[ApidataDocumentCollection]]: """ Returns a list of all DocumentCollections. @@ -42,7 +42,7 @@ def list_collections( Returns ------- - typing.List[typing.List[DocumentCollectionResponse]] + typing.List[typing.List[ApidataDocumentCollection]] OK Examples @@ -58,7 +58,7 @@ def list_collections( "collections", method="GET", request_options=request_options ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[typing.List[DocumentCollectionResponse]], _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.List[typing.List[ApidataDocumentCollection]], _response.json()) # type: ignore if _response.status_code == 401: raise UnauthorizedError( pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore @@ -75,7 +75,7 @@ def list_collections( def get_collection( self, collection_name: str, *, request_options: typing.Optional[RequestOptions] = None - ) -> DocumentCollectionResponse: + ) -> ApidataDocumentCollection: """ Returns a DocumentCollection if it exists. @@ -89,7 +89,7 @@ def get_collection( Returns ------- - DocumentCollectionResponse + ApidataDocumentCollection OK Examples @@ -107,7 +107,7 @@ def get_collection( f"collections/{jsonable_encoder(collection_name)}", method="GET", request_options=request_options ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DocumentCollectionResponse, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(ApidataDocumentCollection, _response.json()) # type: ignore if _response.status_code == 400: raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore if _response.status_code == 401: @@ -317,7 +317,7 @@ def add_documents( *, request: typing.Sequence[CreateDocumentRequest], request_options: typing.Optional[RequestOptions] = None, - ) -> typing.List[str]: + ) -> typing.List[typing.List[str]]: """ Creates Documents in a specified DocumentCollection and returns their UUIDs. @@ -333,7 +333,7 @@ def add_documents( Returns ------- - typing.List[str] + typing.List[typing.List[str]] OK Examples @@ -361,7 +361,7 @@ def add_documents( omit=OMIT, ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[str], _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.List[typing.List[str]], _response.json()) # type: ignore if _response.status_code == 400: raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore if _response.status_code == 401: @@ -447,7 +447,7 @@ def batch_get_documents( document_ids: typing.Optional[typing.Sequence[str]] = OMIT, uuids: typing.Optional[typing.Sequence[str]] = OMIT, request_options: typing.Optional[RequestOptions] = None, - ) -> typing.List[DocumentResponse]: + ) -> typing.List[typing.List[ApidataDocument]]: """ Returns Documents from a DocumentCollection specified by UUID or ID. @@ -465,7 +465,7 @@ def batch_get_documents( Returns ------- - typing.List[DocumentResponse] + typing.List[typing.List[ApidataDocument]] OK Examples @@ -487,7 +487,7 @@ def batch_get_documents( omit=OMIT, ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[DocumentResponse], _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.List[typing.List[ApidataDocument]], _response.json()) # type: ignore if _response.status_code == 400: raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore if _response.status_code == 401: @@ -571,9 +571,9 @@ def batch_update_documents( raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) - def gets_a_document_from_a_document_collection_by_uuid( + def gets_a_document_from_a_document_collection_by_uuid_cloud_only( self, collection_name: str, document_uuid: str, *, request_options: typing.Optional[RequestOptions] = None - ) -> DocumentResponse: + ) -> ApidataDocument: """ Returns specified Document from a DocumentCollection. @@ -590,7 +590,7 @@ def gets_a_document_from_a_document_collection_by_uuid( Returns ------- - DocumentResponse + ApidataDocument OK Examples @@ -600,7 +600,7 @@ def gets_a_document_from_a_document_collection_by_uuid( client = Zep( api_key="YOUR_API_KEY", ) - client.document.gets_a_document_from_a_document_collection_by_uuid( + client.document.gets_a_document_from_a_document_collection_by_uuid_cloud_only( collection_name="collectionName", document_uuid="documentUUID", ) @@ -611,7 +611,7 @@ def gets_a_document_from_a_document_collection_by_uuid( request_options=request_options, ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DocumentResponse, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(ApidataDocument, _response.json()) # type: ignore if _response.status_code == 400: raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore if _response.status_code == 401: @@ -687,7 +687,7 @@ def delete_document( raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) - def updates_a_document( + def updates_a_document_cloud_only( self, collection_name: str, document_uuid: str, @@ -726,7 +726,7 @@ def updates_a_document( client = Zep( api_key="YOUR_API_KEY", ) - client.document.updates_a_document( + client.document.updates_a_document_cloud_only( collection_name="collectionName", document_uuid="documentUUID", ) @@ -769,7 +769,7 @@ def search( search_type: typing.Optional[SearchType] = OMIT, text: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, - ) -> DocumentSearchResultPage: + ) -> ApidataDocumentSearchResponse: """ Searches over documents in a collection based on provided search criteria. One of text or metadata must be provided. Returns an empty list if no documents are found. @@ -800,7 +800,7 @@ def search( Returns ------- - DocumentSearchResultPage + ApidataDocumentSearchResponse OK Examples @@ -829,7 +829,7 @@ def search( omit=OMIT, ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DocumentSearchResultPage, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(ApidataDocumentSearchResponse, _response.json()) # type: ignore if _response.status_code == 400: raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore if _response.status_code == 401: @@ -853,7 +853,7 @@ def __init__(self, *, client_wrapper: AsyncClientWrapper): async def list_collections( self, *, request_options: typing.Optional[RequestOptions] = None - ) -> typing.List[typing.List[DocumentCollectionResponse]]: + ) -> typing.List[typing.List[ApidataDocumentCollection]]: """ Returns a list of all DocumentCollections. @@ -864,7 +864,7 @@ async def list_collections( Returns ------- - typing.List[typing.List[DocumentCollectionResponse]] + typing.List[typing.List[ApidataDocumentCollection]] OK Examples @@ -880,7 +880,7 @@ async def list_collections( "collections", method="GET", request_options=request_options ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[typing.List[DocumentCollectionResponse]], _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.List[typing.List[ApidataDocumentCollection]], _response.json()) # type: ignore if _response.status_code == 401: raise UnauthorizedError( pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore @@ -897,7 +897,7 @@ async def list_collections( async def get_collection( self, collection_name: str, *, request_options: typing.Optional[RequestOptions] = None - ) -> DocumentCollectionResponse: + ) -> ApidataDocumentCollection: """ Returns a DocumentCollection if it exists. @@ -911,7 +911,7 @@ async def get_collection( Returns ------- - DocumentCollectionResponse + ApidataDocumentCollection OK Examples @@ -929,7 +929,7 @@ async def get_collection( f"collections/{jsonable_encoder(collection_name)}", method="GET", request_options=request_options ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DocumentCollectionResponse, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(ApidataDocumentCollection, _response.json()) # type: ignore if _response.status_code == 400: raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore if _response.status_code == 401: @@ -1139,7 +1139,7 @@ async def add_documents( *, request: typing.Sequence[CreateDocumentRequest], request_options: typing.Optional[RequestOptions] = None, - ) -> typing.List[str]: + ) -> typing.List[typing.List[str]]: """ Creates Documents in a specified DocumentCollection and returns their UUIDs. @@ -1155,7 +1155,7 @@ async def add_documents( Returns ------- - typing.List[str] + typing.List[typing.List[str]] OK Examples @@ -1183,7 +1183,7 @@ async def add_documents( omit=OMIT, ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[str], _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.List[typing.List[str]], _response.json()) # type: ignore if _response.status_code == 400: raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore if _response.status_code == 401: @@ -1269,7 +1269,7 @@ async def batch_get_documents( document_ids: typing.Optional[typing.Sequence[str]] = OMIT, uuids: typing.Optional[typing.Sequence[str]] = OMIT, request_options: typing.Optional[RequestOptions] = None, - ) -> typing.List[DocumentResponse]: + ) -> typing.List[typing.List[ApidataDocument]]: """ Returns Documents from a DocumentCollection specified by UUID or ID. @@ -1287,7 +1287,7 @@ async def batch_get_documents( Returns ------- - typing.List[DocumentResponse] + typing.List[typing.List[ApidataDocument]] OK Examples @@ -1309,7 +1309,7 @@ async def batch_get_documents( omit=OMIT, ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[DocumentResponse], _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(typing.List[typing.List[ApidataDocument]], _response.json()) # type: ignore if _response.status_code == 400: raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore if _response.status_code == 401: @@ -1393,9 +1393,9 @@ async def batch_update_documents( raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) - async def gets_a_document_from_a_document_collection_by_uuid( + async def gets_a_document_from_a_document_collection_by_uuid_cloud_only( self, collection_name: str, document_uuid: str, *, request_options: typing.Optional[RequestOptions] = None - ) -> DocumentResponse: + ) -> ApidataDocument: """ Returns specified Document from a DocumentCollection. @@ -1412,7 +1412,7 @@ async def gets_a_document_from_a_document_collection_by_uuid( Returns ------- - DocumentResponse + ApidataDocument OK Examples @@ -1422,7 +1422,7 @@ async def gets_a_document_from_a_document_collection_by_uuid( client = AsyncZep( api_key="YOUR_API_KEY", ) - await client.document.gets_a_document_from_a_document_collection_by_uuid( + await client.document.gets_a_document_from_a_document_collection_by_uuid_cloud_only( collection_name="collectionName", document_uuid="documentUUID", ) @@ -1433,7 +1433,7 @@ async def gets_a_document_from_a_document_collection_by_uuid( request_options=request_options, ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DocumentResponse, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(ApidataDocument, _response.json()) # type: ignore if _response.status_code == 400: raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore if _response.status_code == 401: @@ -1509,7 +1509,7 @@ async def delete_document( raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) - async def updates_a_document( + async def updates_a_document_cloud_only( self, collection_name: str, document_uuid: str, @@ -1548,7 +1548,7 @@ async def updates_a_document( client = AsyncZep( api_key="YOUR_API_KEY", ) - await client.document.updates_a_document( + await client.document.updates_a_document_cloud_only( collection_name="collectionName", document_uuid="documentUUID", ) @@ -1591,7 +1591,7 @@ async def search( search_type: typing.Optional[SearchType] = OMIT, text: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, - ) -> DocumentSearchResultPage: + ) -> ApidataDocumentSearchResponse: """ Searches over documents in a collection based on provided search criteria. One of text or metadata must be provided. Returns an empty list if no documents are found. @@ -1622,7 +1622,7 @@ async def search( Returns ------- - DocumentSearchResultPage + ApidataDocumentSearchResponse OK Examples @@ -1651,7 +1651,7 @@ async def search( omit=OMIT, ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DocumentSearchResultPage, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(ApidataDocumentSearchResponse, _response.json()) # type: ignore if _response.status_code == 400: raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore if _response.status_code == 401: diff --git a/src/zep_cloud/graph/__init__.py b/src/zep_cloud/graph/__init__.py new file mode 100644 index 00000000..12a38702 --- /dev/null +++ b/src/zep_cloud/graph/__init__.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +from . import edge, episode, node + +__all__ = ["edge", "episode", "node"] diff --git a/src/zep_cloud/graph/client.py b/src/zep_cloud/graph/client.py new file mode 100644 index 00000000..e72ea5b6 --- /dev/null +++ b/src/zep_cloud/graph/client.py @@ -0,0 +1,351 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError as core_api_error_ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.pydantic_utilities import pydantic_v1 +from ..core.request_options import RequestOptions +from ..errors.bad_request_error import BadRequestError +from ..errors.internal_server_error import InternalServerError +from ..types.api_error import ApiError as types_api_error_ApiError +from ..types.graph_data_type import GraphDataType +from ..types.graph_search_results import GraphSearchResults +from ..types.graph_search_scope import GraphSearchScope +from ..types.reranker import Reranker +from ..types.success_response import SuccessResponse +from .edge.client import AsyncEdgeClient, EdgeClient +from .episode.client import AsyncEpisodeClient, EpisodeClient +from .node.client import AsyncNodeClient, NodeClient + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class GraphClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + self.edge = EdgeClient(client_wrapper=self._client_wrapper) + self.episode = EpisodeClient(client_wrapper=self._client_wrapper) + self.node = NodeClient(client_wrapper=self._client_wrapper) + + def add( + self, + *, + data: typing.Optional[str] = OMIT, + group_id: typing.Optional[str] = OMIT, + type: typing.Optional[GraphDataType] = OMIT, + user_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None + ) -> SuccessResponse: + """ + Add data to the graph + + Parameters + ---------- + data : typing.Optional[str] + + group_id : typing.Optional[str] + + type : typing.Optional[GraphDataType] + + user_id : typing.Optional[str] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + SuccessResponse + Data added + + Examples + -------- + from zep_cloud.client import Zep + + client = Zep( + api_key="YOUR_API_KEY", + ) + client.graph.add() + """ + _response = self._client_wrapper.httpx_client.request( + "graph", + method="POST", + json={"data": data, "group_id": group_id, "type": type, "user_id": user_id}, + request_options=request_options, + omit=OMIT, + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(SuccessResponse, _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + def search( + self, + *, + query: str, + center_node_uuid: typing.Optional[str] = OMIT, + group_id: typing.Optional[str] = OMIT, + limit: typing.Optional[int] = OMIT, + min_score: typing.Optional[float] = OMIT, + mmr_lambda: typing.Optional[float] = OMIT, + reranker: typing.Optional[Reranker] = OMIT, + scope: typing.Optional[GraphSearchScope] = OMIT, + user_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None + ) -> GraphSearchResults: + """ + Perform a graph search query + + Parameters + ---------- + query : str + The string to search for (required) + + center_node_uuid : typing.Optional[str] + Node to rerank around for node distance reranking + + group_id : typing.Optional[str] + one of user_id or group_id must be provided + + limit : typing.Optional[int] + The maximum number of facts to retrieve + + min_score : typing.Optional[float] + minimum similarity score for a result to be returned + + mmr_lambda : typing.Optional[float] + weighting for maximal marginal relevance + + reranker : typing.Optional[Reranker] + Defaults to RRF + + scope : typing.Optional[GraphSearchScope] + Defaults to Edges. Nodes and Communities will be added in the future. + + user_id : typing.Optional[str] + one of user_id or group_id must be provided + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + GraphSearchResults + Graph search results + + Examples + -------- + from zep_cloud.client import Zep + + client = Zep( + api_key="YOUR_API_KEY", + ) + client.graph.search( + query="query", + ) + """ + _response = self._client_wrapper.httpx_client.request( + "graph/search", + method="POST", + json={ + "center_node_uuid": center_node_uuid, + "group_id": group_id, + "limit": limit, + "min_score": min_score, + "mmr_lambda": mmr_lambda, + "query": query, + "reranker": reranker, + "scope": scope, + "user_id": user_id, + }, + request_options=request_options, + omit=OMIT, + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(GraphSearchResults, _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncGraphClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + self.edge = AsyncEdgeClient(client_wrapper=self._client_wrapper) + self.episode = AsyncEpisodeClient(client_wrapper=self._client_wrapper) + self.node = AsyncNodeClient(client_wrapper=self._client_wrapper) + + async def add( + self, + *, + data: typing.Optional[str] = OMIT, + group_id: typing.Optional[str] = OMIT, + type: typing.Optional[GraphDataType] = OMIT, + user_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None + ) -> SuccessResponse: + """ + Add data to the graph + + Parameters + ---------- + data : typing.Optional[str] + + group_id : typing.Optional[str] + + type : typing.Optional[GraphDataType] + + user_id : typing.Optional[str] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + SuccessResponse + Data added + + Examples + -------- + from zep_cloud.client import AsyncZep + + client = AsyncZep( + api_key="YOUR_API_KEY", + ) + await client.graph.add() + """ + _response = await self._client_wrapper.httpx_client.request( + "graph", + method="POST", + json={"data": data, "group_id": group_id, "type": type, "user_id": user_id}, + request_options=request_options, + omit=OMIT, + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(SuccessResponse, _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + async def search( + self, + *, + query: str, + center_node_uuid: typing.Optional[str] = OMIT, + group_id: typing.Optional[str] = OMIT, + limit: typing.Optional[int] = OMIT, + min_score: typing.Optional[float] = OMIT, + mmr_lambda: typing.Optional[float] = OMIT, + reranker: typing.Optional[Reranker] = OMIT, + scope: typing.Optional[GraphSearchScope] = OMIT, + user_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None + ) -> GraphSearchResults: + """ + Perform a graph search query + + Parameters + ---------- + query : str + The string to search for (required) + + center_node_uuid : typing.Optional[str] + Node to rerank around for node distance reranking + + group_id : typing.Optional[str] + one of user_id or group_id must be provided + + limit : typing.Optional[int] + The maximum number of facts to retrieve + + min_score : typing.Optional[float] + minimum similarity score for a result to be returned + + mmr_lambda : typing.Optional[float] + weighting for maximal marginal relevance + + reranker : typing.Optional[Reranker] + Defaults to RRF + + scope : typing.Optional[GraphSearchScope] + Defaults to Edges. Nodes and Communities will be added in the future. + + user_id : typing.Optional[str] + one of user_id or group_id must be provided + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + GraphSearchResults + Graph search results + + Examples + -------- + from zep_cloud.client import AsyncZep + + client = AsyncZep( + api_key="YOUR_API_KEY", + ) + await client.graph.search( + query="query", + ) + """ + _response = await self._client_wrapper.httpx_client.request( + "graph/search", + method="POST", + json={ + "center_node_uuid": center_node_uuid, + "group_id": group_id, + "limit": limit, + "min_score": min_score, + "mmr_lambda": mmr_lambda, + "query": query, + "reranker": reranker, + "scope": scope, + "user_id": user_id, + }, + request_options=request_options, + omit=OMIT, + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(GraphSearchResults, _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/zep_cloud/graph/edge/__init__.py b/src/zep_cloud/graph/edge/__init__.py new file mode 100644 index 00000000..f3ea2659 --- /dev/null +++ b/src/zep_cloud/graph/edge/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/src/zep_cloud/graph/edge/client.py b/src/zep_cloud/graph/edge/client.py new file mode 100644 index 00000000..20ecb0bc --- /dev/null +++ b/src/zep_cloud/graph/edge/client.py @@ -0,0 +1,398 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError as core_api_error_ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import pydantic_v1 +from ...core.request_options import RequestOptions +from ...errors.bad_request_error import BadRequestError +from ...errors.internal_server_error import InternalServerError +from ...errors.not_found_error import NotFoundError +from ...types.api_error import ApiError as types_api_error_ApiError +from ...types.entity_edge import EntityEdge +from ...types.success_response import SuccessResponse + + +class EdgeClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def get_by_group_id( + self, group_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> typing.List[EntityEdge]: + """ + Get all edges for a group + + Parameters + ---------- + group_id : str + Group ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[EntityEdge] + Edges + + Examples + -------- + from zep_cloud.client import Zep + + client = Zep( + api_key="YOUR_API_KEY", + ) + client.graph.edge.get_by_group_id( + group_id="group_id", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"graph/edge/group/{jsonable_encoder(group_id)}", method="GET", request_options=request_options + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[EntityEdge], _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + def get_by_user_id( + self, user_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> typing.List[EntityEdge]: + """ + Get all edges for a user + + Parameters + ---------- + user_id : str + User ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[EntityEdge] + Edges + + Examples + -------- + from zep_cloud.client import Zep + + client = Zep( + api_key="YOUR_API_KEY", + ) + client.graph.edge.get_by_user_id( + user_id="user_id", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"graph/edge/user/{jsonable_encoder(user_id)}", method="GET", request_options=request_options + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[EntityEdge], _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + def get(self, uuid_: str, *, request_options: typing.Optional[RequestOptions] = None) -> EntityEdge: + """ + Get a specific edge by its UUID + + Parameters + ---------- + uuid_ : str + Edge UUID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + EntityEdge + Edge + + Examples + -------- + from zep_cloud.client import Zep + + client = Zep( + api_key="YOUR_API_KEY", + ) + client.graph.edge.get( + uuid_="uuid", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"graph/edge/{jsonable_encoder(uuid_)}", method="GET", request_options=request_options + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(EntityEdge, _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 404: + raise NotFoundError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + def delete(self, uuid_: str, *, request_options: typing.Optional[RequestOptions] = None) -> SuccessResponse: + """ + Delete an edge by UUID + + Parameters + ---------- + uuid_ : str + Edge UUID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + SuccessResponse + Edge deleted + + Examples + -------- + from zep_cloud.client import Zep + + client = Zep( + api_key="YOUR_API_KEY", + ) + client.graph.edge.delete( + uuid_="uuid", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"graph/edge/{jsonable_encoder(uuid_)}", method="DELETE", request_options=request_options + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(SuccessResponse, _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncEdgeClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def get_by_group_id( + self, group_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> typing.List[EntityEdge]: + """ + Get all edges for a group + + Parameters + ---------- + group_id : str + Group ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[EntityEdge] + Edges + + Examples + -------- + from zep_cloud.client import AsyncZep + + client = AsyncZep( + api_key="YOUR_API_KEY", + ) + await client.graph.edge.get_by_group_id( + group_id="group_id", + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"graph/edge/group/{jsonable_encoder(group_id)}", method="GET", request_options=request_options + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[EntityEdge], _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + async def get_by_user_id( + self, user_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> typing.List[EntityEdge]: + """ + Get all edges for a user + + Parameters + ---------- + user_id : str + User ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[EntityEdge] + Edges + + Examples + -------- + from zep_cloud.client import AsyncZep + + client = AsyncZep( + api_key="YOUR_API_KEY", + ) + await client.graph.edge.get_by_user_id( + user_id="user_id", + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"graph/edge/user/{jsonable_encoder(user_id)}", method="GET", request_options=request_options + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[EntityEdge], _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + async def get(self, uuid_: str, *, request_options: typing.Optional[RequestOptions] = None) -> EntityEdge: + """ + Get a specific edge by its UUID + + Parameters + ---------- + uuid_ : str + Edge UUID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + EntityEdge + Edge + + Examples + -------- + from zep_cloud.client import AsyncZep + + client = AsyncZep( + api_key="YOUR_API_KEY", + ) + await client.graph.edge.get( + uuid_="uuid", + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"graph/edge/{jsonable_encoder(uuid_)}", method="GET", request_options=request_options + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(EntityEdge, _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 404: + raise NotFoundError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + async def delete(self, uuid_: str, *, request_options: typing.Optional[RequestOptions] = None) -> SuccessResponse: + """ + Delete an edge by UUID + + Parameters + ---------- + uuid_ : str + Edge UUID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + SuccessResponse + Edge deleted + + Examples + -------- + from zep_cloud.client import AsyncZep + + client = AsyncZep( + api_key="YOUR_API_KEY", + ) + await client.graph.edge.delete( + uuid_="uuid", + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"graph/edge/{jsonable_encoder(uuid_)}", method="DELETE", request_options=request_options + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(SuccessResponse, _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/zep_cloud/graph/episode/__init__.py b/src/zep_cloud/graph/episode/__init__.py new file mode 100644 index 00000000..f3ea2659 --- /dev/null +++ b/src/zep_cloud/graph/episode/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/src/zep_cloud/graph/episode/client.py b/src/zep_cloud/graph/episode/client.py new file mode 100644 index 00000000..e45eed42 --- /dev/null +++ b/src/zep_cloud/graph/episode/client.py @@ -0,0 +1,343 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError as core_api_error_ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import pydantic_v1 +from ...core.request_options import RequestOptions +from ...errors.bad_request_error import BadRequestError +from ...errors.internal_server_error import InternalServerError +from ...types.api_error import ApiError as types_api_error_ApiError +from ...types.episode import Episode +from ...types.episode_response import EpisodeResponse + + +class EpisodeClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def get_by_group_id( + self, + group_id: str, + *, + lastn: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> EpisodeResponse: + """ + Get episodes by Group ID + + Parameters + ---------- + group_id : str + Group ID + + lastn : typing.Optional[int] + The number of most recent episodes to retrieve. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + EpisodeResponse + Episodes + + Examples + -------- + from zep_cloud.client import Zep + + client = Zep( + api_key="YOUR_API_KEY", + ) + client.graph.episode.get_by_group_id( + group_id="group_id", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"graph/episodes/group/{jsonable_encoder(group_id)}", + method="GET", + params={"lastn": lastn}, + request_options=request_options, + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(EpisodeResponse, _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + def get_by_user_id( + self, + user_id: str, + *, + lastn: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> EpisodeResponse: + """ + Get episodes by User ID + + Parameters + ---------- + user_id : str + User ID + + lastn : typing.Optional[int] + The number of most recent episodes entries to retrieve. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + EpisodeResponse + Episodes + + Examples + -------- + from zep_cloud.client import Zep + + client = Zep( + api_key="YOUR_API_KEY", + ) + client.graph.episode.get_by_user_id( + user_id="user_id", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"graph/episodes/user/{jsonable_encoder(user_id)}", + method="GET", + params={"lastn": lastn}, + request_options=request_options, + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(EpisodeResponse, _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + def get(self, uuid_: str, *, request_options: typing.Optional[RequestOptions] = None) -> Episode: + """ + Get episode by UUID + + Parameters + ---------- + uuid_ : str + Episode UUID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + Episode + Episode + + Examples + -------- + from zep_cloud.client import Zep + + client = Zep( + api_key="YOUR_API_KEY", + ) + client.graph.episode.get( + uuid_="uuid", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"graph/episodes/{jsonable_encoder(uuid_)}", method="GET", request_options=request_options + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Episode, _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncEpisodeClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def get_by_group_id( + self, + group_id: str, + *, + lastn: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> EpisodeResponse: + """ + Get episodes by Group ID + + Parameters + ---------- + group_id : str + Group ID + + lastn : typing.Optional[int] + The number of most recent episodes to retrieve. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + EpisodeResponse + Episodes + + Examples + -------- + from zep_cloud.client import AsyncZep + + client = AsyncZep( + api_key="YOUR_API_KEY", + ) + await client.graph.episode.get_by_group_id( + group_id="group_id", + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"graph/episodes/group/{jsonable_encoder(group_id)}", + method="GET", + params={"lastn": lastn}, + request_options=request_options, + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(EpisodeResponse, _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + async def get_by_user_id( + self, + user_id: str, + *, + lastn: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> EpisodeResponse: + """ + Get episodes by User ID + + Parameters + ---------- + user_id : str + User ID + + lastn : typing.Optional[int] + The number of most recent episodes entries to retrieve. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + EpisodeResponse + Episodes + + Examples + -------- + from zep_cloud.client import AsyncZep + + client = AsyncZep( + api_key="YOUR_API_KEY", + ) + await client.graph.episode.get_by_user_id( + user_id="user_id", + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"graph/episodes/user/{jsonable_encoder(user_id)}", + method="GET", + params={"lastn": lastn}, + request_options=request_options, + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(EpisodeResponse, _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + async def get(self, uuid_: str, *, request_options: typing.Optional[RequestOptions] = None) -> Episode: + """ + Get episode by UUID + + Parameters + ---------- + uuid_ : str + Episode UUID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + Episode + Episode + + Examples + -------- + from zep_cloud.client import AsyncZep + + client = AsyncZep( + api_key="YOUR_API_KEY", + ) + await client.graph.episode.get( + uuid_="uuid", + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"graph/episodes/{jsonable_encoder(uuid_)}", method="GET", request_options=request_options + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Episode, _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/zep_cloud/graph/node/__init__.py b/src/zep_cloud/graph/node/__init__.py new file mode 100644 index 00000000..f3ea2659 --- /dev/null +++ b/src/zep_cloud/graph/node/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/src/zep_cloud/graph/node/client.py b/src/zep_cloud/graph/node/client.py new file mode 100644 index 00000000..99069dad --- /dev/null +++ b/src/zep_cloud/graph/node/client.py @@ -0,0 +1,307 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError as core_api_error_ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import pydantic_v1 +from ...core.request_options import RequestOptions +from ...errors.bad_request_error import BadRequestError +from ...errors.internal_server_error import InternalServerError +from ...errors.not_found_error import NotFoundError +from ...types.api_error import ApiError as types_api_error_ApiError +from ...types.entity_node import EntityNode + + +class NodeClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def get_by_group_id( + self, group_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> typing.List[EntityNode]: + """ + Get all nodes for a group + + Parameters + ---------- + group_id : str + Group ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[EntityNode] + Nodes + + Examples + -------- + from zep_cloud.client import Zep + + client = Zep( + api_key="YOUR_API_KEY", + ) + client.graph.node.get_by_group_id( + group_id="group_id", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"graph/node/group/{jsonable_encoder(group_id)}", method="GET", request_options=request_options + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[EntityNode], _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + def get_by_user_id( + self, user_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> typing.List[EntityNode]: + """ + Get all nodes for a user + + Parameters + ---------- + user_id : str + User ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[EntityNode] + Nodes + + Examples + -------- + from zep_cloud.client import Zep + + client = Zep( + api_key="YOUR_API_KEY", + ) + client.graph.node.get_by_user_id( + user_id="user_id", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"graph/node/user/{jsonable_encoder(user_id)}", method="GET", request_options=request_options + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[EntityNode], _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + def get(self, uuid_: str, *, request_options: typing.Optional[RequestOptions] = None) -> EntityNode: + """ + Get a specific node by its UUID + + Parameters + ---------- + uuid_ : str + Node UUID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + EntityNode + Node + + Examples + -------- + from zep_cloud.client import Zep + + client = Zep( + api_key="YOUR_API_KEY", + ) + client.graph.node.get( + uuid_="uuid", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"graph/node/{jsonable_encoder(uuid_)}", method="GET", request_options=request_options + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(EntityNode, _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 404: + raise NotFoundError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncNodeClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def get_by_group_id( + self, group_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> typing.List[EntityNode]: + """ + Get all nodes for a group + + Parameters + ---------- + group_id : str + Group ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[EntityNode] + Nodes + + Examples + -------- + from zep_cloud.client import AsyncZep + + client = AsyncZep( + api_key="YOUR_API_KEY", + ) + await client.graph.node.get_by_group_id( + group_id="group_id", + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"graph/node/group/{jsonable_encoder(group_id)}", method="GET", request_options=request_options + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[EntityNode], _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + async def get_by_user_id( + self, user_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> typing.List[EntityNode]: + """ + Get all nodes for a user + + Parameters + ---------- + user_id : str + User ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[EntityNode] + Nodes + + Examples + -------- + from zep_cloud.client import AsyncZep + + client = AsyncZep( + api_key="YOUR_API_KEY", + ) + await client.graph.node.get_by_user_id( + user_id="user_id", + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"graph/node/user/{jsonable_encoder(user_id)}", method="GET", request_options=request_options + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(typing.List[EntityNode], _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + async def get(self, uuid_: str, *, request_options: typing.Optional[RequestOptions] = None) -> EntityNode: + """ + Get a specific node by its UUID + + Parameters + ---------- + uuid_ : str + Node UUID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + EntityNode + Node + + Examples + -------- + from zep_cloud.client import AsyncZep + + client = AsyncZep( + api_key="YOUR_API_KEY", + ) + await client.graph.node.get( + uuid_="uuid", + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"graph/node/{jsonable_encoder(uuid_)}", method="GET", request_options=request_options + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(EntityNode, _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 404: + raise NotFoundError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/zep_cloud/group/__init__.py b/src/zep_cloud/group/__init__.py new file mode 100644 index 00000000..f3ea2659 --- /dev/null +++ b/src/zep_cloud/group/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/src/zep_cloud/group/client.py b/src/zep_cloud/group/client.py new file mode 100644 index 00000000..8d4f190f --- /dev/null +++ b/src/zep_cloud/group/client.py @@ -0,0 +1,239 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError as core_api_error_ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import pydantic_v1 +from ..core.request_options import RequestOptions +from ..errors.bad_request_error import BadRequestError +from ..errors.internal_server_error import InternalServerError +from ..errors.not_found_error import NotFoundError +from ..types.api_error import ApiError as types_api_error_ApiError +from ..types.group import Group +from ..types.success_response import SuccessResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class GroupClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def add( + self, + *, + description: typing.Optional[str] = OMIT, + group_id: typing.Optional[str] = OMIT, + name: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> Group: + """ + Create a new user group + + Parameters + ---------- + description : typing.Optional[str] + + group_id : typing.Optional[str] + + name : typing.Optional[str] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + Group + The added group + + Examples + -------- + from zep_cloud.client import Zep + + client = Zep( + api_key="YOUR_API_KEY", + ) + client.group.add() + """ + _response = self._client_wrapper.httpx_client.request( + "groups", + method="POST", + json={"description": description, "group_id": group_id, "name": name}, + request_options=request_options, + omit=OMIT, + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Group, _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + def delete(self, group_id: str, *, request_options: typing.Optional[RequestOptions] = None) -> SuccessResponse: + """ + Delete group + + Parameters + ---------- + group_id : str + Group ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + SuccessResponse + Deleted + + Examples + -------- + from zep_cloud.client import Zep + + client = Zep( + api_key="YOUR_API_KEY", + ) + client.group.delete( + group_id="groupId", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"groups/{jsonable_encoder(group_id)}", method="DELETE", request_options=request_options + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(SuccessResponse, _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 404: + raise NotFoundError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncGroupClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def add( + self, + *, + description: typing.Optional[str] = OMIT, + group_id: typing.Optional[str] = OMIT, + name: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> Group: + """ + Create a new user group + + Parameters + ---------- + description : typing.Optional[str] + + group_id : typing.Optional[str] + + name : typing.Optional[str] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + Group + The added group + + Examples + -------- + from zep_cloud.client import AsyncZep + + client = AsyncZep( + api_key="YOUR_API_KEY", + ) + await client.group.add() + """ + _response = await self._client_wrapper.httpx_client.request( + "groups", + method="POST", + json={"description": description, "group_id": group_id, "name": name}, + request_options=request_options, + omit=OMIT, + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(Group, _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + + async def delete( + self, group_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> SuccessResponse: + """ + Delete group + + Parameters + ---------- + group_id : str + Group ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + SuccessResponse + Deleted + + Examples + -------- + from zep_cloud.client import AsyncZep + + client = AsyncZep( + api_key="YOUR_API_KEY", + ) + await client.group.delete( + group_id="groupId", + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"groups/{jsonable_encoder(group_id)}", method="DELETE", request_options=request_options + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(SuccessResponse, _response.json()) # type: ignore + if _response.status_code == 400: + raise BadRequestError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 404: + raise NotFoundError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/zep_cloud/langchain/__init__.py b/src/zep_cloud/langchain/__init__.py deleted file mode 100644 index 02a13df3..00000000 --- a/src/zep_cloud/langchain/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from zep_cloud.langchain.history import ZepChatMessageHistory -from zep_cloud.langchain.vectorstore import ZepVectorStore - -__all__ = [ - "ZepChatMessageHistory", - "ZepVectorStore", -] \ No newline at end of file diff --git a/src/zep_cloud/langchain/helpers.py b/src/zep_cloud/langchain/helpers.py deleted file mode 100644 index 789d8a15..00000000 --- a/src/zep_cloud/langchain/helpers.py +++ /dev/null @@ -1,16 +0,0 @@ -from zep_cloud.types.role_type import RoleType - - -def get_zep_message_role_type(role) -> RoleType: - if role == "human": - return "user" - elif role == "ai": - return "assistant" - elif role == "system": - return "system" - elif role == "function": - return "function" - elif role == "tool": - return "tool" - else: - return "system" \ No newline at end of file diff --git a/src/zep_cloud/langchain/history.py b/src/zep_cloud/langchain/history.py deleted file mode 100644 index e24dce60..00000000 --- a/src/zep_cloud/langchain/history.py +++ /dev/null @@ -1,201 +0,0 @@ -from __future__ import annotations - -import logging -from typing import Any, Dict, List, Optional, Union -from zep_cloud.client import Zep -from zep_cloud.environment import ZepEnvironment -from zep_cloud.types import Memory, Message -from zep_cloud.errors import NotFoundError -from zep_cloud.langchain.helpers import get_zep_message_role_type - -try: - from langchain_core.chat_history import BaseChatMessageHistory - from langchain_core.messages import ( - AIMessage, - BaseMessage, - HumanMessage, - SystemMessage, - ) -except ImportError: - raise ImportError( - "Could not import langchain-core package. " - "Please install it with `pip install langchain-core`." - ) - -logger = logging.getLogger(__name__) - - -class ZepChatMessageHistory(BaseChatMessageHistory): - """ - LangChain Chat message history that uses Zep as a backend. - - Attributes - ---------- - session_id : str - The unique identifier of the session. - zep_client : Zep - The Zep client used for making API requests. - Pass in this rather than the API key and URL. - api_url : str - The Zep API service URL. Not required if using Zep Cloud. - api_key : str - The Zep API key. Not required if using Zep Open Source. - memory_type : str - The type of memory to use. Can be "perpetual", "summary_retrieval", - or "message_window". Defaults to "perpetual". - summary_instruction : Optional[str] - Additional instructions for generating dialog summaries. - """ - - def __init__( - self, - session_id: str, - zep_client: Optional[Zep] = None, - api_url: Optional[str] = str(ZepEnvironment.DEFAULT), - api_key: Optional[str] = None, - memory_type: Optional[str] = None, - ai_prefix: Optional[str] = None, - human_prefix: Optional[str] = None, - summary_instruction: Optional[str] = None, - ) -> None: - if zep_client is None: - self._client = Zep(base_url=api_url, api_key=api_key) - else: - self._client = zep_client - - self.session_id = session_id - self.memory_type = memory_type or "perpetual" - - self.ai_prefix = ai_prefix or "ai" - self.human_prefix = human_prefix or "human" - self.summary_instruction = summary_instruction - - @property - def messages(self) -> List[BaseMessage]: # type: ignore - """Retrieve messages from Zep memory""" - - zep_memory: Optional[Memory] = self._get_memory() - if not zep_memory: - return [] - - messages: List[BaseMessage] = [] - # Extract facts and summary, if present, and messages - if zep_memory.facts: - messages.append(SystemMessage(content="\n".join(zep_memory.facts))) - - if zep_memory.summary and zep_memory.summary.content: - if len(zep_memory.summary.content) > 0: - messages.append(SystemMessage(content=zep_memory.summary.content)) - - if zep_memory.messages: - for msg in zep_memory.messages: - metadata = { - "uuid": msg.uuid_, - "created_at": msg.created_at, - "token_count": msg.token_count, - "metadata": msg.metadata, - } - message_class = AIMessage if msg.role == "ai" else HumanMessage - messages.append( - message_class(content=msg.content, additional_kwargs=metadata) # type: ignore # deprecated code - ) - - return messages - - @property - def zep_messages(self) -> Union[List[Message], None]: - """Retrieve summary from Zep memory""" - zep_memory: Optional[Memory] = self._get_memory() - if not zep_memory: - return [] - - return zep_memory.messages - - @property - def zep_summary(self) -> Optional[str]: - """Retrieve summary from Zep memory""" - zep_memory: Optional[Memory] = self._get_memory() - if not zep_memory or not zep_memory.summary: - return None - - return zep_memory.summary.content - - def _get_memory(self) -> Optional[Memory]: - """Retrieve memory from Zep""" - try: - zep_memory: Memory = self._client.memory.get( - session_id=self.session_id, memory_type=self.memory_type - ) - except NotFoundError: - logger.warning( - f"Session {self.session_id} not found in Zep. Returning None" - ) - return None - return zep_memory - - def add_user_message( # type: ignore - self, message: str, metadata: Optional[Dict[str, Any]] = None - ) -> None: - """Convenience method for adding a human message string to the store. - - Args: - message: The string contents of a human message. - metadata: Optional metadata to attach to the message. - """ - from langchain_core.messages import HumanMessage - - self.add_message(HumanMessage(content=message), metadata=metadata) - - def add_ai_message( # type: ignore - self, message: str, metadata: Optional[Dict[str, Any]] = None - ) -> None: - """Convenience method for adding an AI message string to the store. - - Args: - message: The string contents of an AI message. - metadata: Optional metadata to attach to the message. - """ - from langchain_core.messages import AIMessage - - self.add_message(AIMessage(content=message), metadata=metadata) - - def add_message( - self, message: BaseMessage, metadata: Optional[Dict[str, Any]] = None - ) -> None: - """Append the message to the Zep memory history""" - - if message.content is None: - raise ValueError("Message content cannot be None") - - if isinstance(message.content, list): - raise ValueError("Message content cannot be a list") - - if message.type == "ai": - message.name = self.ai_prefix - elif message.type == "human": - message.name = self.human_prefix - - zep_message = Message( - content=message.content, - # If name is not set, use type as role - role=message.name or message.type, - role_type=get_zep_message_role_type(message.type), - metadata=metadata, - ) - - self._client.memory.add( - session_id=self.session_id, - messages=[zep_message], - summary_instruction=self.summary_instruction, - ) - - def clear(self) -> None: - """Clear session memory from Zep. Note that Zep is long-term storage for memory - and this is not advised unless you have specific data retention requirements. - """ - try: - self._client.memory.delete(self.session_id) - except NotFoundError: - logger.warning( - f"Session {self.session_id} not found in Zep. Skipping delete." - ) diff --git a/src/zep_cloud/langchain/vectorstore.py b/src/zep_cloud/langchain/vectorstore.py deleted file mode 100644 index cd903ef9..00000000 --- a/src/zep_cloud/langchain/vectorstore.py +++ /dev/null @@ -1,553 +0,0 @@ -from __future__ import annotations - -import logging -from typing import Any, Dict, Iterable, List, Optional, Tuple -from zep_cloud.types import DocumentCollectionResponse, CreateDocumentRequest -from zep_cloud.errors import NotFoundError -from zep_cloud.client import Zep, AsyncZep -from zep_cloud.environment import ZepEnvironment - -try: - from langchain_core.documents import Document - from langchain_core.vectorstores import VectorStore -except ImportError: - raise ImportError( - "Could not import langchain-core package. " - "Please install it with `pip install langchain-core`." - ) - - -logger = logging.getLogger() - - -class ZepVectorStore(VectorStore): - """`Zep` VectorStore. - - Provides methods for adding texts or documents to a Zep Collection, - searching for similar documents, and deleting documents. - - Search scores are calculated using cosine similarity normalized to [0, 1]. - - Args: - collection_name (str): The name of the collection in the Zep store. - description (Optional[str]): The description of the collection. - metadata (Optional[Dict[str, Any]]): The metadata to associate with the - collection. - api_url (Optional[str]): The URL of the Zep API. Defaults to "https://api.getzep.com". - Not required if passing in a ZepClient. - api_key (str): The API key for the Zep API. - """ - - def __init__( - self, - collection_name: str, - description: Optional[str] = None, - metadata: Optional[Dict[str, Any]] = None, - - api_url: Optional[str] = str(ZepEnvironment.DEFAULT), - api_key: Optional[str] = None, - ) -> None: - super().__init__() - if not collection_name: - raise ValueError( - "collection_name must be specified when using ZepVectorStore." - ) - self._client = Zep(api_key=api_key, base_url=api_url) - self._async_client = AsyncZep(api_key=api_key, base_url=api_url) - - self.collection_name = collection_name - self.c_description = description - self.c_metadata = metadata - - self._collection = self._load_collection() - - def _load_collection(self) -> DocumentCollectionResponse: - """ - Load the collection from the Zep backend. - """ - - try: - collection = self._client.document.get_collection(collection_name=self.collection_name) - print("Collection found") - except NotFoundError: - print("Collection not found") - logger.info( - f"Collection {self.collection_name} not found. Creating new collection." - ) - collection = self._create_collection() - - return collection - - def _create_collection(self) -> DocumentCollectionResponse: - """ - Create a new collection in the Zep backend. - """ - self._client.document.add_collection( - collection_name=self.collection_name, - description=self.c_description, - metadata=self.c_metadata, - ) - collection = self._client.document.get_collection(collection_name=self.collection_name) - return collection - - def _generate_documents_to_add( - self, - texts: Iterable[str], - metadatas: Optional[List[Dict[Any, Any]]] = None, # langchain spelling - document_ids: Optional[List[str]] = None, - ) -> List[CreateDocumentRequest]: - documents: List[CreateDocumentRequest] = [] - for i, d in enumerate(texts): - documents.append( - CreateDocumentRequest( - content=d, - metadata=metadatas[i] if metadatas else None, - document_id=document_ids[i] if document_ids else None, - ) - ) - return documents - - def add_texts( - self, - texts: Iterable[str], - metadatas: Optional[List[Dict[str, Any]]] = None, # langchain spelling - document_ids: Optional[List[str]] = None, - **kwargs: Any, - ) -> List[str]: - """Run more texts through the embeddings and add to the vectorstore. - - Args: - texts: Iterable of strings to add to the vectorstore. - metadatas: Optional list of metadatas associated with the texts. - document_ids: Optional list of document ids associated with the texts. - kwargs: vectorstore specific parameters - - Returns: - List of ids from adding the texts into the vectorstore. - """ - - documents = self._generate_documents_to_add(texts, metadatas, document_ids) - uuids = self._client.document.add_documents(collection_name=self.collection_name, request=documents) - - return uuids - - async def aadd_texts( - self, - texts: Iterable[str], - metadatas: Optional[List[Dict[str, Any]]] = None, # langchain spelling - document_ids: Optional[List[str]] = None, - **kwargs: Any, - ) -> List[str]: - documents = self._generate_documents_to_add(texts, metadatas, document_ids) - uuids = await self._async_client.document.add_documents(collection_name=self.collection_name, request=documents) - - return uuids - - def search( - self, - query: str, - search_type: str, - metadata_filter: Optional[Dict[str, Any]] = None, - k: int = 3, - **kwargs: Any, - ) -> List[Document]: - """Return docs most similar to query using specified search type.""" - if search_type == "similarity": - return self.similarity_search( - query, k=k, metadata=metadata_filter, **kwargs - ) - elif search_type == "mmr": - return self.max_marginal_relevance_search( - query, k=k, metadata_filter=metadata_filter, **kwargs - ) - else: - raise ValueError( - f"search_type of {search_type} not allowed. Expected " - "search_type to be 'similarity' or 'mmr'." - ) - - async def asearch( - self, - query: str, - search_type: str, - metadata_filter: Optional[Dict[str, Any]] = None, - k: int = 3, - **kwargs: Any, - ) -> List[Document]: - """Return docs most similar to query using specified search type.""" - if search_type == "similarity": - return await self.asimilarity_search( - query, k=k, metadata=metadata_filter, **kwargs - ) - elif search_type == "mmr": - return await self.amax_marginal_relevance_search( - query, k=k, metadata_filter=metadata_filter, **kwargs - ) - else: - raise ValueError( - f"search_type of {search_type} not allowed. Expected " - "search_type to be 'similarity' or 'mmr'." - ) - - def similarity_search( - self, - query: str, - k: int = 4, - metadata: Optional[Dict[str, Any]] = None, - **kwargs: Any, - ) -> List[Document]: - """Return docs most similar to query.""" - - results = self._similarity_search_with_relevance_scores( - query, k=k, metadata_filter=metadata, **kwargs - ) - return [doc for doc, _ in results] - - def similarity_search_with_score( - self, - query: str, - k: int = 4, - metadata: Optional[Dict[str, Any]] = None, - **kwargs: Any, - ) -> List[Tuple[Document, float]]: - """Run similarity search with distance.""" - - return self._similarity_search_with_relevance_scores( - query, k=k, metadata_filter=metadata, **kwargs - ) - - def _similarity_search_with_relevance_scores( - self, - query: str, - k: int = 4, - metadata_filter: Optional[Dict[str, Any]] = None, - **kwargs: Any, - ) -> List[Tuple[Document, float]]: - """ - Default similarity search with relevance scores. - Return docs and relevance scores in the range [0, 1]. - - 0 is dissimilar, 1 is most similar. - - Args: - query: input text - k: Number of Documents to return. Defaults to 4. - metadata_filter: Optional, metadata filter - **kwargs: kwargs to be passed to similarity search. Should include: - score_threshold: Optional, a floating point value between 0 to 1 and - filter the resulting set of retrieved docs - - Returns: - List of Tuples of (doc, similarity_score) - """ - - results = self._client.document.search( - collection_name=self.collection_name, - text=query, - limit=k, - metadata=metadata_filter, - **kwargs - ) - - if not results.results: - return [] - - return [ - ( - Document( - page_content=str(doc.content), - metadata=doc.metadata or {}, - ), - doc.score or 0.0, - ) - for doc in results.results - ] - - async def asimilarity_search_with_relevance_scores( - self, - query: str, - k: int = 4, - metadata_filter: Optional[Dict[str, Any]] = None, - **kwargs: Any, - ) -> List[Tuple[Document, float]]: - """Return docs most similar to query.""" - - if not self._collection: - raise ValueError( - "collection should be an instance of a Zep DocumentCollection" - ) - - results = await self._async_client.document.search( - collection_name=self.collection_name, - text=query, - limit=k, - metadata=metadata_filter, - **kwargs - ) - - if not results.results: - return [] - - return [ - ( - Document( - page_content=str(doc.content), - metadata=doc.metadata or {}, - ), - doc.score or 0.0, - ) - for doc in results.results - ] - - async def asimilarity_search( - self, - query: str, - k: int = 4, - metadata: Optional[Dict[str, Any]] = None, - **kwargs: Any, - ) -> List[Document]: - """Return docs most similar to query.""" - - results = await self.asimilarity_search_with_relevance_scores( - query, k, metadata_filter=metadata, **kwargs - ) - - return [doc for doc, _ in results] - - def max_marginal_relevance_search( # type: ignore # ignore inconsistent override - self, - query: str, - k: int = 4, - fetch_k: int = 20, - lambda_mult: float = 0.5, - metadata_filter: Optional[Dict[str, Any]] = None, - ) -> List[Document]: - """Return docs selected using the maximal marginal relevance reranking. - - Maximal marginal relevance optimizes for similarity to query AND diversity - among selected documents. - - Args: - query: Text to look up documents similar to. - k: Number of Documents to return. Defaults to 4. - fetch_k: (Unsupported) Number of Documents to fetch to pass to MMR - algorithm. - lambda_mult: Number between 0 and 1 that determines the degree - of diversity among the results with 0 corresponding - to maximum diversity and 1 to minimum diversity. - Defaults to 0.5. - metadata_filter: Optional, metadata to filter the resulting set of retrieved - docs - Returns: - List of Documents selected by maximal marginal relevance. - - NOTE: Zep automatically tunes the number of results returned by the search prior - to reranking based on `k`. `fetch_k` is ignored. - """ - - results = self._client.document.search( - collection_name=self.collection_name, - text=query, - limit=k, - search_type="mmr", - metadata=metadata_filter, - mmr_lambda=lambda_mult, - ) - - if not results.results: - return [] - - return [ - Document(page_content=str(d.content), metadata=d.metadata or {}) for d in results.results - ] - - async def amax_marginal_relevance_search( - self, - query: str, - k: int = 4, - fetch_k: int = 20, - lambda_mult: float = 0.5, - metadata_filter: Optional[Dict[str, Any]] = None, - **kwargs: Any, - ) -> List[Document]: - """Return docs selected using the maximal marginal relevance reranking. - - Maximal marginal relevance optimizes for similarity to query AND diversity - among selected documents. - - Args: - query: Text to look up documents similar to. - k: Number of Documents to return. Defaults to 4. - fetch_k: (Unsupported) Number of Documents to fetch to pass to MMR - algorithm. - lambda_mult: Number between 0 and 1 that determines the degree - of diversity among the results with 0 corresponding - to maximum diversity and 1 to minimum diversity. - Defaults to 0.5. - metadata_filter: Optional, metadata to filter the resulting set of retrieved - docs - Returns: - List of Documents selected by maximal marginal relevance. - - NOTE: Zep automatically tunes the number of results returned by the - search prior to reranking based on `k`. `fetch_k` is ignored. - """ - - if not self._collection: - raise ValueError( - "collection should be an instance of a Zep DocumentCollection" - ) - results = await self._async_client.document.search( - collection_name=self.collection_name, - text=query, - limit=k, - search_type="mmr", - metadata=metadata_filter, - mmr_lambda=lambda_mult, - **kwargs - ) - - if not results.results: - return [] - - return [ - Document(page_content=str(d.content), metadata=d.metadata or {}) for d in results.results - ] - - @classmethod - def from_texts( # type: ignore # ignore inconsistent override - cls, - texts: List[str], - collection_name: str, - metadatas: Optional[List[dict]] = None, - description: Optional[str] = None, - metadata: Optional[Dict[str, Any]] = None, - api_url: Optional[str] = None, - api_key: Optional[str] = None, - **kwargs: Any, - ) -> ZepVectorStore: - """ - Class method that returns a ZepVectorStore instance initialized from texts. - - If the collection does not exist, it will be created. - - Args: - texts (List[str]): The list of texts to add to the vectorstore. - collection_name (str): The name of the collection in the Zep store. - metadatas (Optional[List[Dict[str, Any]]]): Optional list of metadata - associated with the texts. - description (Optional[str]): The description of the collection. - metadata (Optional[Dict[str, Any]]): The metadata to associate with the - collection. - zep_client (Optional[ZepClient]): The Zep client to use. - api_url (Optional[str]): The URL of the Zep API. Defaults to - "https://api.getzep.com". Not required if passing in a ZepClient. - api_key (Optional[str]): The API key for the Zep API. Not required if - passing in a ZepClient. - **kwargs: Additional parameters specific to the vectorstore. - - Returns: - ZepVectorStore: An instance of ZepVectorStore. - """ - vecstore = cls( - collection_name, - description=description, - metadata=metadata, - api_url=api_url, - api_key=api_key, - ) - vecstore.add_texts(texts, metadatas) - - return vecstore - - @classmethod - async def afrom_texts( # type: ignore # ignore inconsistent override - cls, - texts: List[str], - collection_name: str, - metadatas: Optional[List[dict]] = None, - description: Optional[str] = None, - metadata: Optional[Dict[str, Any]] = None, - api_url: Optional[str] = str(ZepEnvironment.DEFAULT), - api_key: Optional[str] = None, - **kwargs: Any, - ) -> ZepVectorStore: - """ - Class method that asynchronously returns a ZepVectorStore instance - initialized from texts. - - If the collection does not exist, it will be created. - - Args: - texts (List[str]): The list of texts to add to the vectorstore. - collection_name (str): The name of the collection in the Zep store. - metadatas (Optional[List[Dict[str, Any]]]): Optional list of metadata - associated with the texts. - description (Optional[str]): The description of the collection. - metadata (Optional[Dict[str, Any]]): The metadata to associate with the - collection. - zep_client (Optional[Zep]): The Zep client to use. - zep_async_client (Optional[AsyncZep]): The Zep async client to use. - api_url (Optional[str]): The URL of the Zep API. Defaults to - "https://api.getzep.com". Not required if passing in a ZepClient. - api_key (Optional[str]): The API key for the Zep API. Not required if - passing in a ZepClient. - **kwargs: Additional parameters specific to the vectorstore. - - Returns: - ZepVectorStore: An instance of ZepVectorStore. - """ - vecstore = cls( - collection_name, - description=description, - metadata=metadata, - api_url=api_url, - api_key=api_key, - ) - await vecstore.aadd_texts(texts, metadatas) - return vecstore - - @classmethod - def from_documents( # type: ignore # ignore inconsistent override - cls, - documents: List[Document], - **kwargs: Any, - ) -> ZepVectorStore: - """Return VectorStore initialized from documents.""" - texts = [d.page_content for d in documents] - metadatas = [d.metadata for d in documents] - return cls.from_texts(texts, metadatas=metadatas, **kwargs) - - @classmethod - async def afrom_documents( # type: ignore # ignore inconsistent override - cls, - documents: List[Document], - **kwargs: Any, - ) -> ZepVectorStore: - """Asynchronously return VectorStore initialized from documents.""" - texts = [d.page_content for d in documents] - metadatas = [d.metadata for d in documents] - return await cls.afrom_texts(texts, metadatas=metadatas, **kwargs) - - def delete(self, ids: Optional[List[str]] = None, **kwargs: Any) -> None: - """Delete by Zep vector UUIDs. - - Parameters - ---------- - ids : Optional[List[str]] - The UUIDs of the vectors to delete. - - Raises - ------ - ValueError - If no UUIDs are provided. - """ - - if ids is None or len(ids) == 0: - raise ValueError("No uuids provided to delete.") - - if self._collection is None: - raise ValueError("No collection name provided.") - - for u in ids: - self._client.document.delete_document(collection_name=self.collection_name, document_uuid=u) \ No newline at end of file diff --git a/src/zep_cloud/memory/client.py b/src/zep_cloud/memory/client.py index d5b43dea..24ac8d9e 100644 --- a/src/zep_cloud/memory/client.py +++ b/src/zep_cloud/memory/client.py @@ -12,10 +12,8 @@ from ..errors.conflict_error import ConflictError from ..errors.internal_server_error import InternalServerError from ..errors.not_found_error import NotFoundError -from ..types.added_fact import AddedFact from ..types.api_error import ApiError as types_api_error_ApiError from ..types.classify_session_request import ClassifySessionRequest -from ..types.classify_session_response import ClassifySessionResponse from ..types.end_session_response import EndSessionResponse from ..types.end_sessions_response import EndSessionsResponse from ..types.fact_rating_instruction import FactRatingInstruction @@ -23,13 +21,14 @@ from ..types.facts_response import FactsResponse from ..types.memory import Memory from ..types.memory_search_result import MemorySearchResult -from ..types.memory_type import MemoryType from ..types.message import Message from ..types.message_list_response import MessageListResponse +from ..types.new_fact import NewFact from ..types.question import Question from ..types.search_scope import SearchScope from ..types.search_type import SearchType from ..types.session import Session +from ..types.session_classification import SessionClassification from ..types.session_list_response import SessionListResponse from ..types.session_search_response import SessionSearchResponse from ..types.success_response import SuccessResponse @@ -137,9 +136,9 @@ def add_session( self, *, session_id: str, + user_id: str, fact_rating_instruction: typing.Optional[FactRatingInstruction] = OMIT, metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - user_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> Session: """ @@ -150,15 +149,15 @@ def add_session( session_id : str The unique identifier of the session. + user_id : str + The unique identifier of the user associated with the session + fact_rating_instruction : typing.Optional[FactRatingInstruction] Optional instruction to use for fact rating. metadata : typing.Optional[typing.Dict[str, typing.Any]] The metadata associated with the session. - user_id : typing.Optional[str] - The unique identifier of the user associated with the session - request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -176,6 +175,7 @@ def add_session( ) client.memory.add_session( session_id="session_id", + user_id="user_id", ) """ _response = self._client_wrapper.httpx_client.request( @@ -275,7 +275,7 @@ def end_sessions( request_options: typing.Optional[RequestOptions] = None, ) -> EndSessionsResponse: """ - End multiple sessions by their IDs + End multiple sessions by their IDs. Parameters ---------- @@ -328,6 +328,7 @@ def end_sessions( def search_sessions( self, *, + text: str, limit: typing.Optional[int] = None, min_fact_rating: typing.Optional[float] = OMIT, min_score: typing.Optional[float] = OMIT, @@ -336,7 +337,6 @@ def search_sessions( search_scope: typing.Optional[SearchScope] = OMIT, search_type: typing.Optional[SearchType] = OMIT, session_ids: typing.Optional[typing.Sequence[str]] = OMIT, - text: typing.Optional[str] = OMIT, user_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> SessionSearchResponse: @@ -345,28 +345,35 @@ def search_sessions( Parameters ---------- + text : str + The search text. + limit : typing.Optional[int] The maximum number of search results to return. Defaults to None (no limit). min_fact_rating : typing.Optional[float] + The minimum fact rating to filter on. Only supported on cloud. Will be ignored on Community Edition. min_score : typing.Optional[float] + The minimum score for search results. Only supported on cloud. Will be ignored on Community Edition. mmr_lambda : typing.Optional[float] + The lambda parameter for the MMR Reranking Algorithm. Only supported on cloud. Will be ignored on Community Edition. record_filter : typing.Optional[typing.Dict[str, typing.Any]] - filter on the metadata + Record filter on the metadata. Only supported on cloud. Will be ignored on Community Edition. search_scope : typing.Optional[SearchScope] + Search scope. Only supported on cloud. On Community Edition the search scope is always "facts". search_type : typing.Optional[SearchType] + Search type. Only supported on cloud. Will be ignored on Community Edition. session_ids : typing.Optional[typing.Sequence[str]] the session ids to search - text : typing.Optional[str] - user_id : typing.Optional[str] + User ID used to determine which sessions to search. Required on Community Edition. request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -383,7 +390,9 @@ def search_sessions( client = Zep( api_key="YOUR_API_KEY", ) - client.memory.search_sessions() + client.memory.search_sessions( + text="text", + ) """ _response = self._client_wrapper.httpx_client.request( "sessions/search", @@ -500,7 +509,7 @@ def update_session( ) client.memory.update_session( session_id="sessionId", - metadata={}, + metadata={"key": "value"}, ) """ _response = self._client_wrapper.httpx_client.request( @@ -538,9 +547,9 @@ def classify_session( last_n: typing.Optional[int] = OMIT, persist: typing.Optional[bool] = OMIT, request_options: typing.Optional[RequestOptions] = None, - ) -> ClassifySessionResponse: + ) -> SessionClassification: """ - classify a session by session id + classify a session by session id. Parameters ---------- @@ -567,7 +576,7 @@ def classify_session( Returns ------- - ClassifySessionResponse + SessionClassification A response object containing the name and classification result. Examples @@ -591,7 +600,7 @@ def classify_session( omit=OMIT, ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ClassifySessionResponse, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(SessionClassification, _response.json()) # type: ignore if _response.status_code == 404: raise NotFoundError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore if _response.status_code == 500: @@ -613,7 +622,7 @@ def end_session( request_options: typing.Optional[RequestOptions] = None, ) -> EndSessionResponse: """ - End a session by ID + End a session by ID. Parameters ---------- @@ -762,7 +771,7 @@ def get_session_facts( Session ID min_rating : typing.Optional[float] - Minimum rating by which to filter facts + Minimum rating by which to filter facts (Zep Cloud only) request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -807,7 +816,7 @@ def add_session_facts( self, session_id: str, *, - facts: typing.Optional[typing.Sequence[AddedFact]] = OMIT, + facts: typing.Optional[typing.Sequence[NewFact]] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> SuccessResponse: """ @@ -818,7 +827,7 @@ def add_session_facts( session_id : str Session ID - facts : typing.Optional[typing.Sequence[AddedFact]] + facts : typing.Optional[typing.Sequence[NewFact]] request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -864,22 +873,18 @@ def get( self, session_id: str, *, - memory_type: typing.Optional[MemoryType] = None, lastn: typing.Optional[int] = None, min_rating: typing.Optional[float] = None, request_options: typing.Optional[RequestOptions] = None, ) -> Memory: """ - Returns a memory (latest summary, list of messages and facts for models.MemoryTypePerpetual) for a given session + Returns a memory (latest summary, list of messages and facts) for a given session Parameters ---------- session_id : str The ID of the session for which to retrieve memory. - memory_type : typing.Optional[MemoryType] - The type of memory to retrieve: perpetual, summary_retriever, or message_window. Defaults to perpetual. - lastn : typing.Optional[int] The number of most recent memory entries to retrieve. @@ -908,7 +913,7 @@ def get( _response = self._client_wrapper.httpx_client.request( f"sessions/{jsonable_encoder(session_id)}/memory", method="GET", - params={"memoryType": memory_type, "lastn": lastn, "minRating": min_rating}, + params={"lastn": lastn, "minRating": min_rating}, request_options=request_options, ) if 200 <= _response.status_code < 300: @@ -946,10 +951,10 @@ def add( A list of message objects, where each message contains a role and content. fact_instruction : typing.Optional[str] - Additional instruction for generating the facts. + Additional instruction for generating the facts. Zep Cloud Only, will be ignored on Community Edition. summary_instruction : typing.Optional[str] - Additional instruction for generating the summary. + Additional instruction for generating the summary. Zep Cloud Only, will be ignored on Community Edition. request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -969,7 +974,12 @@ def add( ) client.memory.add( session_id="sessionId", - messages=[Message()], + messages=[ + Message( + content="content", + role_type="norole", + ) + ], ) """ _response = self._client_wrapper.httpx_client.request( @@ -1194,7 +1204,7 @@ def update_message_metadata( client.memory.update_message_metadata( session_id="sessionId", message_uuid="messageUUID", - metadata={}, + metadata={"key": "value"}, ) """ _response = self._client_wrapper.httpx_client.request( @@ -1233,7 +1243,7 @@ def search( request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[MemorySearchResult]: """ - Search memory for the specified session. + Search memory for the specified session. Deprecated, please use search_sessions method instead Parameters ---------- @@ -1512,9 +1522,9 @@ async def add_session( self, *, session_id: str, + user_id: str, fact_rating_instruction: typing.Optional[FactRatingInstruction] = OMIT, metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - user_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> Session: """ @@ -1525,15 +1535,15 @@ async def add_session( session_id : str The unique identifier of the session. + user_id : str + The unique identifier of the user associated with the session + fact_rating_instruction : typing.Optional[FactRatingInstruction] Optional instruction to use for fact rating. metadata : typing.Optional[typing.Dict[str, typing.Any]] The metadata associated with the session. - user_id : typing.Optional[str] - The unique identifier of the user associated with the session - request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -1551,6 +1561,7 @@ async def add_session( ) await client.memory.add_session( session_id="session_id", + user_id="user_id", ) """ _response = await self._client_wrapper.httpx_client.request( @@ -1650,7 +1661,7 @@ async def end_sessions( request_options: typing.Optional[RequestOptions] = None, ) -> EndSessionsResponse: """ - End multiple sessions by their IDs + End multiple sessions by their IDs. Parameters ---------- @@ -1703,6 +1714,7 @@ async def end_sessions( async def search_sessions( self, *, + text: str, limit: typing.Optional[int] = None, min_fact_rating: typing.Optional[float] = OMIT, min_score: typing.Optional[float] = OMIT, @@ -1711,7 +1723,6 @@ async def search_sessions( search_scope: typing.Optional[SearchScope] = OMIT, search_type: typing.Optional[SearchType] = OMIT, session_ids: typing.Optional[typing.Sequence[str]] = OMIT, - text: typing.Optional[str] = OMIT, user_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> SessionSearchResponse: @@ -1720,28 +1731,35 @@ async def search_sessions( Parameters ---------- + text : str + The search text. + limit : typing.Optional[int] The maximum number of search results to return. Defaults to None (no limit). min_fact_rating : typing.Optional[float] + The minimum fact rating to filter on. Only supported on cloud. Will be ignored on Community Edition. min_score : typing.Optional[float] + The minimum score for search results. Only supported on cloud. Will be ignored on Community Edition. mmr_lambda : typing.Optional[float] + The lambda parameter for the MMR Reranking Algorithm. Only supported on cloud. Will be ignored on Community Edition. record_filter : typing.Optional[typing.Dict[str, typing.Any]] - filter on the metadata + Record filter on the metadata. Only supported on cloud. Will be ignored on Community Edition. search_scope : typing.Optional[SearchScope] + Search scope. Only supported on cloud. On Community Edition the search scope is always "facts". search_type : typing.Optional[SearchType] + Search type. Only supported on cloud. Will be ignored on Community Edition. session_ids : typing.Optional[typing.Sequence[str]] the session ids to search - text : typing.Optional[str] - user_id : typing.Optional[str] + User ID used to determine which sessions to search. Required on Community Edition. request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -1758,7 +1776,9 @@ async def search_sessions( client = AsyncZep( api_key="YOUR_API_KEY", ) - await client.memory.search_sessions() + await client.memory.search_sessions( + text="text", + ) """ _response = await self._client_wrapper.httpx_client.request( "sessions/search", @@ -1875,7 +1895,7 @@ async def update_session( ) await client.memory.update_session( session_id="sessionId", - metadata={}, + metadata={"key": "value"}, ) """ _response = await self._client_wrapper.httpx_client.request( @@ -1913,9 +1933,9 @@ async def classify_session( last_n: typing.Optional[int] = OMIT, persist: typing.Optional[bool] = OMIT, request_options: typing.Optional[RequestOptions] = None, - ) -> ClassifySessionResponse: + ) -> SessionClassification: """ - classify a session by session id + classify a session by session id. Parameters ---------- @@ -1942,7 +1962,7 @@ async def classify_session( Returns ------- - ClassifySessionResponse + SessionClassification A response object containing the name and classification result. Examples @@ -1966,7 +1986,7 @@ async def classify_session( omit=OMIT, ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ClassifySessionResponse, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(SessionClassification, _response.json()) # type: ignore if _response.status_code == 404: raise NotFoundError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore if _response.status_code == 500: @@ -1988,7 +2008,7 @@ async def end_session( request_options: typing.Optional[RequestOptions] = None, ) -> EndSessionResponse: """ - End a session by ID + End a session by ID. Parameters ---------- @@ -2137,7 +2157,7 @@ async def get_session_facts( Session ID min_rating : typing.Optional[float] - Minimum rating by which to filter facts + Minimum rating by which to filter facts (Zep Cloud only) request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -2182,7 +2202,7 @@ async def add_session_facts( self, session_id: str, *, - facts: typing.Optional[typing.Sequence[AddedFact]] = OMIT, + facts: typing.Optional[typing.Sequence[NewFact]] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> SuccessResponse: """ @@ -2193,7 +2213,7 @@ async def add_session_facts( session_id : str Session ID - facts : typing.Optional[typing.Sequence[AddedFact]] + facts : typing.Optional[typing.Sequence[NewFact]] request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -2239,22 +2259,18 @@ async def get( self, session_id: str, *, - memory_type: typing.Optional[MemoryType] = None, lastn: typing.Optional[int] = None, min_rating: typing.Optional[float] = None, request_options: typing.Optional[RequestOptions] = None, ) -> Memory: """ - Returns a memory (latest summary, list of messages and facts for models.MemoryTypePerpetual) for a given session + Returns a memory (latest summary, list of messages and facts) for a given session Parameters ---------- session_id : str The ID of the session for which to retrieve memory. - memory_type : typing.Optional[MemoryType] - The type of memory to retrieve: perpetual, summary_retriever, or message_window. Defaults to perpetual. - lastn : typing.Optional[int] The number of most recent memory entries to retrieve. @@ -2283,7 +2299,7 @@ async def get( _response = await self._client_wrapper.httpx_client.request( f"sessions/{jsonable_encoder(session_id)}/memory", method="GET", - params={"memoryType": memory_type, "lastn": lastn, "minRating": min_rating}, + params={"lastn": lastn, "minRating": min_rating}, request_options=request_options, ) if 200 <= _response.status_code < 300: @@ -2321,10 +2337,10 @@ async def add( A list of message objects, where each message contains a role and content. fact_instruction : typing.Optional[str] - Additional instruction for generating the facts. + Additional instruction for generating the facts. Zep Cloud Only, will be ignored on Community Edition. summary_instruction : typing.Optional[str] - Additional instruction for generating the summary. + Additional instruction for generating the summary. Zep Cloud Only, will be ignored on Community Edition. request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -2344,7 +2360,12 @@ async def add( ) await client.memory.add( session_id="sessionId", - messages=[Message()], + messages=[ + Message( + content="content", + role_type="norole", + ) + ], ) """ _response = await self._client_wrapper.httpx_client.request( @@ -2571,7 +2592,7 @@ async def update_message_metadata( await client.memory.update_message_metadata( session_id="sessionId", message_uuid="messageUUID", - metadata={}, + metadata={"key": "value"}, ) """ _response = await self._client_wrapper.httpx_client.request( @@ -2610,7 +2631,7 @@ async def search( request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[MemorySearchResult]: """ - Search memory for the specified session. + Search memory for the specified session. Deprecated, please use search_sessions method instead Parameters ---------- diff --git a/src/zep_cloud/types/__init__.py b/src/zep_cloud/types/__init__.py index ead4cc4e..5a20091c 100644 --- a/src/zep_cloud/types/__init__.py +++ b/src/zep_cloud/types/__init__.py @@ -1,31 +1,41 @@ # This file was auto-generated by Fern from our API Definition. -from .added_fact import AddedFact from .api_error import ApiError +from .apidata_document import ApidataDocument +from .apidata_document_collection import ApidataDocumentCollection +from .apidata_document_search_response import ApidataDocumentSearchResponse +from .apidata_document_with_score import ApidataDocumentWithScore from .classify_session_request import ClassifySessionRequest -from .classify_session_response import ClassifySessionResponse from .create_document_request import CreateDocumentRequest -from .document_collection_response import DocumentCollectionResponse -from .document_response import DocumentResponse -from .document_search_result import DocumentSearchResult -from .document_search_result_page import DocumentSearchResultPage from .end_session_response import EndSessionResponse from .end_sessions_response import EndSessionsResponse +from .entity_edge import EntityEdge +from .entity_node import EntityNode +from .episode import Episode +from .episode_response import EpisodeResponse from .fact import Fact from .fact_rating_examples import FactRatingExamples from .fact_rating_instruction import FactRatingInstruction from .fact_response import FactResponse from .facts_response import FactsResponse +from .graph_data_type import GraphDataType +from .graph_search_results import GraphSearchResults +from .graph_search_scope import GraphSearchScope +from .group import Group from .memory import Memory from .memory_search_result import MemorySearchResult -from .memory_type import MemoryType from .message import Message from .message_list_response import MessageListResponse +from .new_fact import NewFact from .question import Question +from .reranker import Reranker from .role_type import RoleType from .search_scope import SearchScope from .search_type import SearchType from .session import Session +from .session_classification import SessionClassification +from .session_fact_rating_examples import SessionFactRatingExamples +from .session_fact_rating_instruction import SessionFactRatingInstruction from .session_list_response import SessionListResponse from .session_search_response import SessionSearchResponse from .session_search_result import SessionSearchResult @@ -37,32 +47,42 @@ from .user_list_response import UserListResponse __all__ = [ - "AddedFact", "ApiError", + "ApidataDocument", + "ApidataDocumentCollection", + "ApidataDocumentSearchResponse", + "ApidataDocumentWithScore", "ClassifySessionRequest", - "ClassifySessionResponse", "CreateDocumentRequest", - "DocumentCollectionResponse", - "DocumentResponse", - "DocumentSearchResult", - "DocumentSearchResultPage", "EndSessionResponse", "EndSessionsResponse", + "EntityEdge", + "EntityNode", + "Episode", + "EpisodeResponse", "Fact", "FactRatingExamples", "FactRatingInstruction", "FactResponse", "FactsResponse", + "GraphDataType", + "GraphSearchResults", + "GraphSearchScope", + "Group", "Memory", "MemorySearchResult", - "MemoryType", "Message", "MessageListResponse", + "NewFact", "Question", + "Reranker", "RoleType", "SearchScope", "SearchType", "Session", + "SessionClassification", + "SessionFactRatingExamples", + "SessionFactRatingInstruction", "SessionListResponse", "SessionSearchResponse", "SessionSearchResult", diff --git a/src/zep_cloud/types/document_response.py b/src/zep_cloud/types/apidata_document.py similarity index 97% rename from src/zep_cloud/types/document_response.py rename to src/zep_cloud/types/apidata_document.py index cef59dda..bded660f 100644 --- a/src/zep_cloud/types/document_response.py +++ b/src/zep_cloud/types/apidata_document.py @@ -7,7 +7,7 @@ from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -class DocumentResponse(pydantic_v1.BaseModel): +class ApidataDocument(pydantic_v1.BaseModel): content: typing.Optional[str] = None created_at: typing.Optional[str] = None document_id: typing.Optional[str] = None diff --git a/src/zep_cloud/types/document_collection_response.py b/src/zep_cloud/types/apidata_document_collection.py similarity index 83% rename from src/zep_cloud/types/document_collection_response.py rename to src/zep_cloud/types/apidata_document_collection.py index 1f5e7775..ed61656b 100644 --- a/src/zep_cloud/types/document_collection_response.py +++ b/src/zep_cloud/types/apidata_document_collection.py @@ -7,19 +7,11 @@ from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -class DocumentCollectionResponse(pydantic_v1.BaseModel): +class ApidataDocumentCollection(pydantic_v1.BaseModel): created_at: typing.Optional[str] = None description: typing.Optional[str] = None - document_count: typing.Optional[int] = pydantic_v1.Field(default=None) - """ - Number of documents in the collection - """ - - document_embedded_count: typing.Optional[int] = pydantic_v1.Field(default=None) - """ - Number of documents with embeddings - """ - + document_count: typing.Optional[int] = None + document_embedded_count: typing.Optional[int] = None embedding_dimensions: typing.Optional[int] = None embedding_model_name: typing.Optional[str] = None is_auto_embedded: typing.Optional[bool] = None diff --git a/src/zep_cloud/types/document_search_result_page.py b/src/zep_cloud/types/apidata_document_search_response.py similarity index 86% rename from src/zep_cloud/types/document_search_result_page.py rename to src/zep_cloud/types/apidata_document_search_response.py index 3678ea1c..4a81051f 100644 --- a/src/zep_cloud/types/document_search_result_page.py +++ b/src/zep_cloud/types/apidata_document_search_response.py @@ -5,14 +5,14 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .document_search_result import DocumentSearchResult +from .apidata_document_with_score import ApidataDocumentWithScore -class DocumentSearchResultPage(pydantic_v1.BaseModel): +class ApidataDocumentSearchResponse(pydantic_v1.BaseModel): current_page: typing.Optional[int] = None query_vector: typing.Optional[typing.List[float]] = None result_count: typing.Optional[int] = None - results: typing.Optional[typing.List[DocumentSearchResult]] = None + results: typing.Optional[typing.List[ApidataDocumentWithScore]] = None total_pages: typing.Optional[int] = None def json(self, **kwargs: typing.Any) -> str: diff --git a/src/zep_cloud/types/document_search_result.py b/src/zep_cloud/types/apidata_document_with_score.py similarity index 96% rename from src/zep_cloud/types/document_search_result.py rename to src/zep_cloud/types/apidata_document_with_score.py index 87735823..33691f40 100644 --- a/src/zep_cloud/types/document_search_result.py +++ b/src/zep_cloud/types/apidata_document_with_score.py @@ -7,7 +7,7 @@ from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -class DocumentSearchResult(pydantic_v1.BaseModel): +class ApidataDocumentWithScore(pydantic_v1.BaseModel): content: typing.Optional[str] = None created_at: typing.Optional[str] = None document_id: typing.Optional[str] = None diff --git a/src/zep_cloud/types/end_session_response.py b/src/zep_cloud/types/end_session_response.py index aa564f06..375609f0 100644 --- a/src/zep_cloud/types/end_session_response.py +++ b/src/zep_cloud/types/end_session_response.py @@ -5,12 +5,12 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .classify_session_response import ClassifySessionResponse from .session import Session +from .session_classification import SessionClassification class EndSessionResponse(pydantic_v1.BaseModel): - classification: typing.Optional[ClassifySessionResponse] = None + classification: typing.Optional[SessionClassification] = None session: typing.Optional[Session] = None def json(self, **kwargs: typing.Any) -> str: diff --git a/src/zep_cloud/types/entity_edge.py b/src/zep_cloud/types/entity_edge.py new file mode 100644 index 00000000..d12e28af --- /dev/null +++ b/src/zep_cloud/types/entity_edge.py @@ -0,0 +1,79 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 + + +class EntityEdge(pydantic_v1.BaseModel): + created_at: str = pydantic_v1.Field() + """ + Creation time of the edge + """ + + episodes: typing.Optional[typing.List[str]] = pydantic_v1.Field(default=None) + """ + List of episode ids that reference these entity edges + """ + + expired_at: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + Datetime of when the node was invalidated + """ + + fact: str = pydantic_v1.Field() + """ + Fact representing the edge and nodes that it connects + """ + + invalid_at: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + Datetime of when the fact stopped being true + """ + + name: str = pydantic_v1.Field() + """ + Name of the edge, relation name + """ + + source_node_uuid: str = pydantic_v1.Field() + """ + UUID of the source node + """ + + target_node_uuid: str = pydantic_v1.Field() + """ + UUID of the target node + """ + + uuid_: str = pydantic_v1.Field(alias="uuid") + """ + UUID of the edge + """ + + valid_at: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + Datetime of when the fact became true + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/zep_cloud/types/entity_node.py b/src/zep_cloud/types/entity_node.py new file mode 100644 index 00000000..575b52ed --- /dev/null +++ b/src/zep_cloud/types/entity_node.py @@ -0,0 +1,54 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 + + +class EntityNode(pydantic_v1.BaseModel): + created_at: str = pydantic_v1.Field() + """ + Creation time of the node + """ + + labels: typing.Optional[typing.List[str]] = pydantic_v1.Field(default=None) + """ + Labels associated with the node + """ + + name: str = pydantic_v1.Field() + """ + Name of the node + """ + + summary: str = pydantic_v1.Field() + """ + Regional summary of surrounding edges + """ + + uuid_: str = pydantic_v1.Field(alias="uuid") + """ + UUID of the node + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/zep_cloud/types/episode.py b/src/zep_cloud/types/episode.py new file mode 100644 index 00000000..138b5e0a --- /dev/null +++ b/src/zep_cloud/types/episode.py @@ -0,0 +1,37 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .graph_data_type import GraphDataType + + +class Episode(pydantic_v1.BaseModel): + content: typing.Optional[str] = None + created_at: typing.Optional[str] = None + name: typing.Optional[str] = None + source: typing.Optional[GraphDataType] = None + source_description: typing.Optional[str] = None + uuid_: typing.Optional[str] = pydantic_v1.Field(alias="uuid", default=None) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/zep_cloud/types/episode_response.py b/src/zep_cloud/types/episode_response.py new file mode 100644 index 00000000..bf66cb2e --- /dev/null +++ b/src/zep_cloud/types/episode_response.py @@ -0,0 +1,30 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .episode import Episode + + +class EpisodeResponse(pydantic_v1.BaseModel): + episodes: typing.Optional[typing.List[Episode]] = None + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/zep_cloud/types/fact.py b/src/zep_cloud/types/fact.py index 3664eb06..bf20ddfa 100644 --- a/src/zep_cloud/types/fact.py +++ b/src/zep_cloud/types/fact.py @@ -8,10 +8,16 @@ class Fact(pydantic_v1.BaseModel): - created_at: typing.Optional[str] = None - fact: typing.Optional[str] = None + created_at: str + expired_at: typing.Optional[str] = None + fact: str + invalid_at: typing.Optional[str] = None + name: typing.Optional[str] = None rating: typing.Optional[float] = None - uuid_: typing.Optional[str] = pydantic_v1.Field(alias="uuid", default=None) + source_node_name: typing.Optional[str] = None + target_node_name: typing.Optional[str] = None + uuid_: str = pydantic_v1.Field(alias="uuid") + valid_at: typing.Optional[str] = None def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} diff --git a/src/zep_cloud/types/graph_data_type.py b/src/zep_cloud/types/graph_data_type.py new file mode 100644 index 00000000..3d163c00 --- /dev/null +++ b/src/zep_cloud/types/graph_data_type.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +GraphDataType = typing.Union[typing.Literal["text", "json", "message"], typing.Any] diff --git a/src/zep_cloud/types/graph_search_results.py b/src/zep_cloud/types/graph_search_results.py new file mode 100644 index 00000000..feed264c --- /dev/null +++ b/src/zep_cloud/types/graph_search_results.py @@ -0,0 +1,32 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .entity_edge import EntityEdge +from .entity_node import EntityNode + + +class GraphSearchResults(pydantic_v1.BaseModel): + edges: typing.Optional[typing.List[EntityEdge]] = None + nodes: typing.Optional[typing.List[EntityNode]] = None + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/zep_cloud/types/graph_search_scope.py b/src/zep_cloud/types/graph_search_scope.py new file mode 100644 index 00000000..b57a0216 --- /dev/null +++ b/src/zep_cloud/types/graph_search_scope.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +GraphSearchScope = typing.Union[typing.Literal["edges", "nodes"], typing.Any] diff --git a/src/zep_cloud/types/group.py b/src/zep_cloud/types/group.py new file mode 100644 index 00000000..0eb70652 --- /dev/null +++ b/src/zep_cloud/types/group.py @@ -0,0 +1,37 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 + + +class Group(pydantic_v1.BaseModel): + created_at: typing.Optional[str] = None + description: typing.Optional[str] = None + external_id: typing.Optional[str] = None + id: typing.Optional[int] = None + name: typing.Optional[str] = None + project_uuid: typing.Optional[str] = None + uuid_: typing.Optional[str] = pydantic_v1.Field(alias="uuid", default=None) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/zep_cloud/types/memory.py b/src/zep_cloud/types/memory.py index 4dfc249a..a65294cb 100644 --- a/src/zep_cloud/types/memory.py +++ b/src/zep_cloud/types/memory.py @@ -13,13 +13,13 @@ class Memory(pydantic_v1.BaseModel): facts: typing.Optional[typing.List[str]] = pydantic_v1.Field(default=None) """ - Most recent list of facts derived from the session. Included only with perpetual memory type. + Most recent list of facts derived from the session. (cloud only) Deprecated: Facts will be deprecated in future releases and relevant_facts should be used instead. """ messages: typing.Optional[typing.List[Message]] = pydantic_v1.Field(default=None) """ - A list of message objects, where each message contains a role and content. + A list of message objects, where each message contains a role and content. Only last_n messages will be returned """ metadata: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field(default=None) @@ -27,15 +27,14 @@ class Memory(pydantic_v1.BaseModel): A dictionary containing metadata associated with the memory. """ - relevant_facts: typing.Optional[typing.List[Fact]] = None - relevant_summaries: typing.Optional[typing.List[Summary]] = pydantic_v1.Field(default=None) + relevant_facts: typing.Optional[typing.List[Fact]] = pydantic_v1.Field(default=None) """ - Summary list result from Summary Retriever Memory Type. + Most relevant facts to the recent messages in the session. """ summary: typing.Optional[Summary] = pydantic_v1.Field(default=None) """ - A Summary object. + The most recent summary before last nth message. (cloud only) """ def json(self, **kwargs: typing.Any) -> str: diff --git a/src/zep_cloud/types/memory_type.py b/src/zep_cloud/types/memory_type.py deleted file mode 100644 index 42deafea..00000000 --- a/src/zep_cloud/types/memory_type.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -MemoryType = typing.Union[typing.Literal["perpetual", "summary_retriever", "message_window"], typing.Any] diff --git a/src/zep_cloud/types/message.py b/src/zep_cloud/types/message.py index 8148f0b6..8d75d75e 100644 --- a/src/zep_cloud/types/message.py +++ b/src/zep_cloud/types/message.py @@ -9,7 +9,7 @@ class Message(pydantic_v1.BaseModel): - content: typing.Optional[str] = pydantic_v1.Field(default=None) + content: str = pydantic_v1.Field() """ The content of the message. """ @@ -29,7 +29,7 @@ class Message(pydantic_v1.BaseModel): The role of the sender of the message (e.g., "user", "assistant"). """ - role_type: typing.Optional[RoleType] = pydantic_v1.Field(default=None) + role_type: RoleType = pydantic_v1.Field() """ The type of the role (e.g., "user", "system"). """ diff --git a/src/zep_cloud/types/added_fact.py b/src/zep_cloud/types/new_fact.py similarity index 96% rename from src/zep_cloud/types/added_fact.py rename to src/zep_cloud/types/new_fact.py index 469ca4d4..11daee05 100644 --- a/src/zep_cloud/types/added_fact.py +++ b/src/zep_cloud/types/new_fact.py @@ -7,7 +7,7 @@ from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -class AddedFact(pydantic_v1.BaseModel): +class NewFact(pydantic_v1.BaseModel): fact: typing.Optional[str] = None def json(self, **kwargs: typing.Any) -> str: diff --git a/src/zep_cloud/types/reranker.py b/src/zep_cloud/types/reranker.py new file mode 100644 index 00000000..470cbc81 --- /dev/null +++ b/src/zep_cloud/types/reranker.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +Reranker = typing.Union[typing.Literal["rrf", "mmr", "node_distance", "episode_mentions"], typing.Any] diff --git a/src/zep_cloud/types/session.py b/src/zep_cloud/types/session.py index 34b442fa..5ce00a49 100644 --- a/src/zep_cloud/types/session.py +++ b/src/zep_cloud/types/session.py @@ -5,7 +5,7 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .fact_rating_instruction import FactRatingInstruction +from .session_fact_rating_instruction import SessionFactRatingInstruction class Session(pydantic_v1.BaseModel): @@ -13,8 +13,7 @@ class Session(pydantic_v1.BaseModel): created_at: typing.Optional[str] = None deleted_at: typing.Optional[str] = None ended_at: typing.Optional[str] = None - fact_rating_instruction: typing.Optional[FactRatingInstruction] = None - fact_version_uuid: typing.Optional[str] = None + fact_rating_instruction: typing.Optional[SessionFactRatingInstruction] = None facts: typing.Optional[typing.List[str]] = None id: typing.Optional[int] = None metadata: typing.Optional[typing.Dict[str, typing.Any]] = None diff --git a/src/zep_cloud/types/classify_session_response.py b/src/zep_cloud/types/session_classification.py similarity index 92% rename from src/zep_cloud/types/classify_session_response.py rename to src/zep_cloud/types/session_classification.py index 81bc3917..c21b33e5 100644 --- a/src/zep_cloud/types/classify_session_response.py +++ b/src/zep_cloud/types/session_classification.py @@ -7,9 +7,9 @@ from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -class ClassifySessionResponse(pydantic_v1.BaseModel): +class SessionClassification(pydantic_v1.BaseModel): class_: typing.Optional[str] = pydantic_v1.Field(alias="class", default=None) - name: typing.Optional[str] = None + label: typing.Optional[str] = None def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} diff --git a/src/zep_cloud/types/session_fact_rating_examples.py b/src/zep_cloud/types/session_fact_rating_examples.py new file mode 100644 index 00000000..8c3aedb3 --- /dev/null +++ b/src/zep_cloud/types/session_fact_rating_examples.py @@ -0,0 +1,31 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 + + +class SessionFactRatingExamples(pydantic_v1.BaseModel): + high: typing.Optional[str] = None + low: typing.Optional[str] = None + medium: typing.Optional[str] = None + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/zep_cloud/types/session_fact_rating_instruction.py b/src/zep_cloud/types/session_fact_rating_instruction.py new file mode 100644 index 00000000..f9dc2c85 --- /dev/null +++ b/src/zep_cloud/types/session_fact_rating_instruction.py @@ -0,0 +1,45 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .session_fact_rating_examples import SessionFactRatingExamples + + +class SessionFactRatingInstruction(pydantic_v1.BaseModel): + examples: typing.Optional[SessionFactRatingExamples] = pydantic_v1.Field(default=None) + """ + Examples is a list of examples that demonstrate how facts might be rated based on your instruction. You should provide + an example of a highly rated example, a low rated example, and a medium (or in between example). For example, if you are rating + based on relevance to a trip planning application, your examples might be: + High: "Joe's dream vacation is Bali" + Medium: "Joe has a fear of flying", + Low: "Joe's favorite food is Japanese", + """ + + instruction: typing.Optional[str] = pydantic_v1.Field(default=None) + """ + A string describing how to rate facts as they apply to your application. A trip planning application may + use something like "relevancy to planning a trip, the user's preferences when traveling, + or the user's travel history." + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/zep_cloud/user/client.py b/src/zep_cloud/user/client.py index 3c8c2bc7..59c8c258 100644 --- a/src/zep_cloud/user/client.py +++ b/src/zep_cloud/user/client.py @@ -12,6 +12,7 @@ from ..errors.internal_server_error import InternalServerError from ..errors.not_found_error import NotFoundError from ..types.api_error import ApiError as types_api_error_ApiError +from ..types.facts_response import FactsResponse from ..types.session import Session from ..types.success_response import SuccessResponse from ..types.user import User @@ -316,6 +317,51 @@ def update( raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + def get_facts(self, user_id: str, *, request_options: typing.Optional[RequestOptions] = None) -> FactsResponse: + """ + Get user facts. + + Parameters + ---------- + user_id : str + The user_id of the user to get. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + FactsResponse + The user facts. + + Examples + -------- + from zep_cloud.client import Zep + + client = Zep( + api_key="YOUR_API_KEY", + ) + client.user.get_facts( + user_id="userId", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"users/{jsonable_encoder(user_id)}/facts", method="GET", request_options=request_options + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(FactsResponse, _response.json()) # type: ignore + if _response.status_code == 404: + raise NotFoundError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + def get_sessions( self, user_id: str, *, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[Session]: @@ -657,6 +703,53 @@ async def update( raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + async def get_facts( + self, user_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> FactsResponse: + """ + Get user facts. + + Parameters + ---------- + user_id : str + The user_id of the user to get. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + FactsResponse + The user facts. + + Examples + -------- + from zep_cloud.client import AsyncZep + + client = AsyncZep( + api_key="YOUR_API_KEY", + ) + await client.user.get_facts( + user_id="userId", + ) + """ + _response = await self._client_wrapper.httpx_client.request( + f"users/{jsonable_encoder(user_id)}/facts", method="GET", request_options=request_options + ) + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(FactsResponse, _response.json()) # type: ignore + if _response.status_code == 404: + raise NotFoundError(pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json())) # type: ignore + if _response.status_code == 500: + raise InternalServerError( + pydantic_v1.parse_obj_as(types_api_error_ApiError, _response.json()) # type: ignore + ) + try: + _response_json = _response.json() + except JSONDecodeError: + raise core_api_error_ApiError(status_code=_response.status_code, body=_response.text) + raise core_api_error_ApiError(status_code=_response.status_code, body=_response_json) + async def get_sessions( self, user_id: str, *, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[Session]: